SOLR-940 and SOLR-1079 -- Updating Lucene jars, updating Trie field types per Lucene's changes, adding ReverseStringFilterFactory, updates related to LUCENE-1500.

Committing all the above changes in one go to avoid compile errors due to Lucene API updates (except for ReverseStringFilterFactory).

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@764291 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shalin Shekhar Mangar 2009-04-12 11:03:09 +00:00
parent 918860396b
commit ad42cc8059
21 changed files with 164 additions and 225 deletions

View File

@ -374,6 +374,10 @@ Other Changes
23. SOLR-952: Cleanup duplicated code in deprecated HighlightingUtils (hossman) 23. SOLR-952: Cleanup duplicated code in deprecated HighlightingUtils (hossman)
24. Upgraded to Lucene 2.9-dev r764281 (shalin)
25. SOLR-1079: Rename omitTf to omitTermFreqAndPositions (shalin)
Build Build
---------------------- ----------------------
1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers) 1. SOLR-776: Added in ability to sign artifacts via Ant for releases (gsingers)

View File

@ -39,7 +39,7 @@
not normally be changed by applications. not normally be changed by applications.
1.0: multiValued attribute did not exist, all fields are multiValued by nature 1.0: multiValued attribute did not exist, all fields are multiValued by nature
1.1: multiValued attribute introduced, false by default 1.1: multiValued attribute introduced, false by default
1.2: omitTf attribute introduced, true by default --> 1.2: omitTermFreqAndPositions attribute introduced, true by default -->
<types> <types>
<!-- field type definitions. The "name" attribute is <!-- field type definitions. The "name" attribute is
@ -120,10 +120,7 @@
human readable in the internal form. Range searches on such fields use the fast Trie Range Queries human readable in the internal form. Range searches on such fields use the fast Trie Range Queries
which are much faster than range searches on the SortableNumberField types. which are much faster than range searches on the SortableNumberField types.
For the fast range search to work, trie fields must be indexed. Trie fields are <b>not</b> sortable For the fast range search to work, trie fields must be indexed.
in numerical order. Also, they cannot be used in function queries. If one needs sorting as well as
fast range search, one should create a copy field specifically for sorting. Same workaround is
suggested for using trie fields in function queries as well.
For each number being added to this field, multiple terms are generated as per the algorithm described in For each number being added to this field, multiple terms are generated as per the algorithm described in
org.apache.lucene.search.trie package description. The possible number of terms depend on the precisionStep org.apache.lucene.search.trie package description. The possible number of terms depend on the precisionStep
@ -131,8 +128,7 @@
value of precisionStep is 8. value of precisionStep is 8.
Note that if you use a precisionStep of 32 for int/float and 64 for long/double, then multiple terms Note that if you use a precisionStep of 32 for int/float and 64 for long/double, then multiple terms
will not be generated, range search will be no faster than any other number field, will not be generated, and range search will be no faster than any other number field.
but sorting will be possible.
--> -->
<fieldType name="tint" class="solr.TrieField" type="integer" omitNorms="true" positionIncrementGap="0" indexed="true" stored="false" /> <fieldType name="tint" class="solr.TrieField" type="integer" omitNorms="true" positionIncrementGap="0" indexed="true" stored="false" />
<fieldType name="tfloat" class="solr.TrieField" type="float" omitNorms="true" positionIncrementGap="0" indexed="true" stored="false" /> <fieldType name="tfloat" class="solr.TrieField" type="float" omitNorms="true" positionIncrementGap="0" indexed="true" stored="false" />
@ -143,8 +139,7 @@
<!-- <!--
This date field manipulates the value into a trie encoded strings for fast range searches. They follow the This date field manipulates the value into a trie encoded strings for fast range searches. They follow the
same format and semantics as the normal DateField and support the date math syntax except that they are same format and semantics as the normal DateField and support the date math syntax.
not sortable and cannot be used in function queries.
--> -->
<fieldType name="tdate" class="solr.TrieField" type="date" omitNorms="true" positionIncrementGap="0" indexed="true" stored="false" /> <fieldType name="tdate" class="solr.TrieField" type="date" omitNorms="true" positionIncrementGap="0" indexed="true" stored="false" />

View File

@ -1,2 +1,2 @@
AnyObjectId[19a9360312b381c572a18d651949632a29f855fc] was removed in git history. AnyObjectId[7d13c44721860aef7c2ea9b17b2b4cbac98cf6f7] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -1,2 +1,2 @@
AnyObjectId[9a5a59a77ba824adf6352b10977e561b7569003e] was removed in git history. AnyObjectId[04bc7ae6e6cc343ade7e43bf42b3bce97d9e5954] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -1,2 +1,2 @@
AnyObjectId[fe912bb07ac94c5fc6411fc687495b0c77d5128e] was removed in git history. AnyObjectId[6a686abf403738ec93b9e8b7de66878ed0aee14d] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -1,2 +1,2 @@
AnyObjectId[87a1ba872e2530ee13180eeb16a7cbf3ecbacec9] was removed in git history. AnyObjectId[9664195864691ffb40bb729ea581f7ec8644497a] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -1,2 +1,2 @@
AnyObjectId[52b619b39e18950c4dec057389ad555332b998b5] was removed in git history. AnyObjectId[26ab10a447cb308f1f139087d031c378a84ff635] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -1,2 +1,2 @@
AnyObjectId[fd78299deb94ef675d487778a5007d647de629c8] was removed in git history. AnyObjectId[e3d36c294abda52c7fa7598c1f614ff0aa7594ad] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -1,2 +1,2 @@
AnyObjectId[d29f22b80b94be6b586f66e7a2176e5f8d05fcbe] was removed in git history. AnyObjectId[2c189b0b4f45bddd0f307ce73bc670e22a7a3c26] was removed in git history.
Apache SVN contains full history. Apache SVN contains full history.

View File

@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
/**
* A FilterFactory which reverses the input.
*
* @version $Id$
* @since solr 1.4
*/
public class ReverseStringFilterFactory extends BaseTokenFilterFactory {
public ReverseStringFilter create(TokenStream in) {
return new ReverseStringFilter(in);
}
}

View File

@ -20,6 +20,8 @@ import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.search.trie.TrieUtils; import org.apache.lucene.search.trie.TrieUtils;
import org.apache.lucene.search.trie.IntTrieTokenStream;
import org.apache.lucene.search.trie.LongTrieTokenStream;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.schema.DateField; import org.apache.solr.schema.DateField;
import static org.apache.solr.schema.TrieField.TrieTypes; import static org.apache.solr.schema.TrieField.TrieTypes;
@ -39,8 +41,9 @@ import java.io.Reader;
* @since solr 1.4 * @since solr 1.4
*/ */
public class TrieIndexTokenizerFactory extends BaseTokenizerFactory { public class TrieIndexTokenizerFactory extends BaseTokenizerFactory {
private final int precisionStep; protected static final DateField dateField = new DateField();
private final TrieTypes type; protected final int precisionStep;
protected final TrieTypes type;
public TrieIndexTokenizerFactory(TrieTypes type, int precisionStep) { public TrieIndexTokenizerFactory(TrieTypes type, int precisionStep) {
this.type = type; this.type = type;
@ -49,55 +52,27 @@ public class TrieIndexTokenizerFactory extends BaseTokenizerFactory {
public TokenStream create(Reader input) { public TokenStream create(Reader input) {
try { try {
return new TrieIndexTokenizer(input, type, precisionStep); StringBuilder builder = new StringBuilder();
char[] buf = new char[8];
int len;
while ((len = input.read(buf)) != -1)
builder.append(buf, 0, len);
switch (type) {
case INTEGER:
return new IntTrieTokenStream(Integer.parseInt(builder.toString()), precisionStep);
case FLOAT:
return new IntTrieTokenStream(TrieUtils.floatToSortableInt(Float.parseFloat(builder.toString())), precisionStep);
case LONG:
return new LongTrieTokenStream(Long.parseLong(builder.toString()), precisionStep);
case DOUBLE:
return new LongTrieTokenStream(TrieUtils.doubleToSortableLong(Double.parseDouble(builder.toString())), precisionStep);
case DATE:
return new LongTrieTokenStream(dateField.parseMath(null, builder.toString()).getTime(), precisionStep);
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field");
}
} catch (IOException e) { } catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create TrieIndexTokenizer", e); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create TrieIndexTokenizer", e);
} }
} }
/**
* @version $Id$
* @since solr 1.4
*/
public static class TrieIndexTokenizer extends Tokenizer {
private final String[] trieVals;
private int pos = 0;
protected static final DateField dateField = new DateField();
public TrieIndexTokenizer(Reader reader, TrieTypes type, int precisionStep) throws IOException {
super(reader);
StringBuilder builder = new StringBuilder();
char[] buf = new char[8];
int len;
while ((len = reader.read(buf)) != -1)
builder.append(buf, 0, len);
switch (type) {
case INTEGER:
this.trieVals = TrieUtils.trieCodeInt(Integer.parseInt(builder.toString()), precisionStep);
break;
case FLOAT:
this.trieVals = TrieUtils.trieCodeInt(TrieUtils.floatToSortableInt(Float.parseFloat(builder.toString())), precisionStep);
break;
case LONG:
this.trieVals = TrieUtils.trieCodeLong(Long.parseLong(builder.toString()), precisionStep);
break;
case DOUBLE:
this.trieVals = TrieUtils.trieCodeLong(TrieUtils.doubleToSortableLong(Double.parseDouble(builder.toString())), precisionStep);
break;
case DATE:
this.trieVals = TrieUtils.trieCodeLong(dateField.parseMath(null, builder.toString()).getTime(), precisionStep);
break;
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field");
}
}
public Token next(Token token) {
if (pos >= trieVals.length) return null;
token.reinit(trieVals[pos++], 0, 0);
token.setPositionIncrement(0);
return token;
}
}
} }

View File

@ -16,9 +16,8 @@
*/ */
package org.apache.solr.analysis; package org.apache.solr.analysis;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.KeywordTokenizer;
import org.apache.lucene.search.trie.TrieUtils; import org.apache.lucene.search.trie.TrieUtils;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.schema.DateField; import org.apache.solr.schema.DateField;
@ -26,6 +25,7 @@ import org.apache.solr.schema.TrieField;
import java.io.IOException; import java.io.IOException;
import java.io.Reader; import java.io.Reader;
import java.io.StringReader;
/** /**
* Query time tokenizer for trie fields. It uses methods in TrieUtils to create a prefix coded representation of the * Query time tokenizer for trie fields. It uses methods in TrieUtils to create a prefix coded representation of the
@ -39,7 +39,8 @@ import java.io.Reader;
* @since solr 1.4 * @since solr 1.4
*/ */
public class TrieQueryTokenizerFactory extends BaseTokenizerFactory { public class TrieQueryTokenizerFactory extends BaseTokenizerFactory {
private final TrieField.TrieTypes type; protected static final DateField dateField = new DateField();
protected final TrieField.TrieTypes type;
public TrieQueryTokenizerFactory(TrieField.TrieTypes type) { public TrieQueryTokenizerFactory(TrieField.TrieTypes type) {
this.type = type; this.type = type;
@ -47,34 +48,12 @@ public class TrieQueryTokenizerFactory extends BaseTokenizerFactory {
public TokenStream create(Reader reader) { public TokenStream create(Reader reader) {
try { try {
return new TrieQueryTokenizer(reader, type);
} catch (IOException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create TrieQueryTokenizer", e);
}
}
public static class TrieQueryTokenizer extends Tokenizer {
private static final DateField dateField = new DateField();
private final String number;
private boolean used = false;
private TrieField.TrieTypes type;
public TrieQueryTokenizer(Reader reader, TrieField.TrieTypes type) throws IOException {
super(reader);
this.type = type;
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
char[] buf = new char[8]; char[] buf = new char[8];
int len; int len;
while ((len = reader.read(buf)) != -1) while ((len = reader.read(buf)) != -1)
builder.append(buf, 0, len); builder.append(buf, 0, len);
number = builder.toString(); String value, number = builder.toString();
}
@Override
public Token next(Token token) throws IOException {
if (used) return null;
String value = number;
switch (type) { switch (type) {
case INTEGER: case INTEGER:
value = TrieUtils.intToPrefixCoded(Integer.parseInt(number)); value = TrieUtils.intToPrefixCoded(Integer.parseInt(number));
@ -94,10 +73,9 @@ public class TrieQueryTokenizerFactory extends BaseTokenizerFactory {
default: default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field"); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field");
} }
token.reinit(value, 0, 0); return new KeywordTokenizer(new StringReader(value));
token.setPositionIncrement(0); } catch (IOException e) {
used = true; throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to create trie query tokenizer", e);
return token;
} }
} }
} }

View File

@ -17,7 +17,7 @@
package org.apache.solr.handler.component; package org.apache.solr.handler.component;
import org.apache.lucene.document.Fieldable; import org.apache.lucene.document.Field;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
@ -180,7 +180,7 @@ public class QueryComponent extends SearchComponent
SortField[] sortFields = sort==null ? new SortField[]{SortField.FIELD_SCORE} : sort.getSort(); SortField[] sortFields = sort==null ? new SortField[]{SortField.FIELD_SCORE} : sort.getSort();
ScoreDoc sd = new ScoreDoc(0,1.0f); // won't work for comparators that look at the score ScoreDoc sd = new ScoreDoc(0,1.0f); // won't work for comparators that look at the score
NamedList sortVals = new NamedList(); // order is important for the sort fields NamedList sortVals = new NamedList(); // order is important for the sort fields
StringFieldable field = new StringFieldable(); Field field = new Field("dummy", "", Field.Store.YES, Field.Index.NO); // a dummy Field
for (SortField sortField: sortFields) { for (SortField sortField: sortFields) {
int type = sortField.getType(); int type = sortField.getType();
@ -229,7 +229,7 @@ public class QueryComponent extends SearchComponent
// indexedToReadable() should be a no-op and should // indexedToReadable() should be a no-op and should
// thus be harmless anyway (for all current ways anyway) // thus be harmless anyway (for all current ways anyway)
if (val instanceof String) { if (val instanceof String) {
field.val = (String)val; field.setValue((String)val);
val = ft.toObject(field); val = ft.toObject(field);
} }
vals.add(val); vals.add(val);
@ -728,104 +728,6 @@ public class QueryComponent extends SearchComponent
}; };
} }
static class StringFieldable implements Fieldable {
public String val;
public void setBoost(float boost) {
}
public float getBoost() {
return 0;
}
public String name() {
return null;
}
public String stringValue() {
return val;
}
public Reader readerValue() {
return null;
}
public byte[] binaryValue() {
return new byte[0];
}
public TokenStream tokenStreamValue() {
return null;
}
public boolean isStored() {
return true;
}
public boolean isIndexed() {
return true;
}
public boolean isTokenized() {
return true;
}
public boolean isCompressed() {
return false;
}
public boolean isTermVectorStored() {
return false;
}
public boolean isStoreOffsetWithTermVector() {
return false;
}
public boolean isStorePositionWithTermVector() {
return false;
}
public boolean isBinary() {
return false;
}
public boolean getOmitNorms() {
return false;
}
public void setOmitNorms(boolean omitNorms) {
}
public void setOmitTf(boolean omitTf) {
}
public boolean getOmitTf() {
return false;
}
public boolean isLazy() {
return false;
}
public int getBinaryOffset() {
return 0;
}
public int getBinaryLength() {
return 0;
}
public byte[] getBinaryValue() {
return new byte[0];
}
public byte[] getBinaryValue(byte[] result) {
return new byte[0];
}
}
///////////////////////////////////////////// /////////////////////////////////////////////
/// SolrInfoMBean /// SolrInfoMBean
//////////////////////////////////////////// ////////////////////////////////////////////

View File

@ -38,13 +38,7 @@ import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.highlight.Formatter; import org.apache.lucene.search.highlight.*;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SpanScorer;
import org.apache.lucene.search.highlight.TextFragment;
import org.apache.lucene.search.highlight.TokenSources;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.HighlightParams; import org.apache.solr.common.params.HighlightParams;
import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.SolrParams;
@ -309,12 +303,16 @@ public class DefaultSolrHighlighter extends SolrHighlighter
} else { } else {
highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze); highlighter.setMaxDocCharsToAnalyze(maxCharsToAnalyze);
} }
TextFragment[] bestTextFragments = highlighter.getBestTextFragments(tstream, docTexts[j], mergeContiguousFragments, numFragments); try {
for (int k = 0; k < bestTextFragments.length; k++) { TextFragment[] bestTextFragments = highlighter.getBestTextFragments(tstream, docTexts[j], mergeContiguousFragments, numFragments);
if ((bestTextFragments[k] != null) && (bestTextFragments[k].getScore() > 0)) { for (int k = 0; k < bestTextFragments.length; k++) {
frags.add(bestTextFragments[k]); if ((bestTextFragments[k] != null) && (bestTextFragments[k].getScore() > 0)) {
frags.add(bestTextFragments[k]);
}
} }
} catch (InvalidTokenOffsetsException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
} }
} }
// sort such that the fragments with the highest score come first // sort such that the fragments with the highest score come first

View File

@ -35,7 +35,7 @@ abstract class FieldProperties {
final static int BINARY = 0x00000008; final static int BINARY = 0x00000008;
final static int COMPRESSED = 0x00000010; final static int COMPRESSED = 0x00000010;
final static int OMIT_NORMS = 0x00000020; final static int OMIT_NORMS = 0x00000020;
final static int OMIT_TF = 0x00000040; final static int OMIT_TF_POSITIONS = 0x00000040;
final static int STORE_TERMVECTORS = 0x00000080; final static int STORE_TERMVECTORS = 0x00000080;
final static int STORE_TERMPOSITIONS = 0x00000100; final static int STORE_TERMPOSITIONS = 0x00000100;
final static int STORE_TERMOFFSETS = 0x00000200; final static int STORE_TERMOFFSETS = 0x00000200;
@ -49,7 +49,7 @@ abstract class FieldProperties {
static final String[] propertyNames = { static final String[] propertyNames = {
"indexed", "tokenized", "stored", "indexed", "tokenized", "stored",
"binary", "compressed", "omitNorms", "omitTf", "binary", "compressed", "omitNorms", "omitTermFreqAndPositions",
"termVectors", "termPositions", "termOffsets", "termVectors", "termPositions", "termOffsets",
"multiValued", "multiValued",
"sortMissingFirst","sortMissingLast","required" "sortMissingFirst","sortMissingLast","required"

View File

@ -88,10 +88,10 @@ public abstract class FieldType extends FieldProperties {
// Handle additional arguments... // Handle additional arguments...
void setArgs(IndexSchema schema, Map<String,String> args) { void setArgs(IndexSchema schema, Map<String,String> args) {
// default to STORED, INDEXED, OMIT_TF and MULTIVALUED depending on schema version // default to STORED, INDEXED, OMIT_TF_POSITIONS and MULTIVALUED depending on schema version
properties = (STORED | INDEXED); properties = (STORED | INDEXED);
if (schema.getVersion()< 1.1f) properties |= MULTIVALUED; if (schema.getVersion()< 1.1f) properties |= MULTIVALUED;
if (schema.getVersion()> 1.1f) properties |= OMIT_TF; if (schema.getVersion()> 1.1f) properties |= OMIT_TF_POSITIONS;
this.args=args; this.args=args;
Map<String,String> initArgs = new HashMap<String,String>(args); Map<String,String> initArgs = new HashMap<String,String>(args);
@ -202,7 +202,7 @@ public abstract class FieldType extends FieldProperties {
getFieldIndex(field, val), getFieldIndex(field, val),
getFieldTermVec(field, val)); getFieldTermVec(field, val));
f.setOmitNorms(field.omitNorms()); f.setOmitNorms(field.omitNorms());
f.setOmitTf(field.omitTf()); f.setOmitTermFreqAndPositions(field.omitTf());
f.setBoost(boost); f.setBoost(boost);
return f; return f;
} }

View File

@ -79,7 +79,7 @@ public final class SchemaField extends FieldProperties {
public boolean storeTermPositions() { return (properties & STORE_TERMPOSITIONS)!=0; } public boolean storeTermPositions() { return (properties & STORE_TERMPOSITIONS)!=0; }
public boolean storeTermOffsets() { return (properties & STORE_TERMOFFSETS)!=0; } public boolean storeTermOffsets() { return (properties & STORE_TERMOFFSETS)!=0; }
public boolean omitNorms() { return (properties & OMIT_NORMS)!=0; } public boolean omitNorms() { return (properties & OMIT_NORMS)!=0; }
public boolean omitTf() { return (properties & OMIT_TF)!=0; } public boolean omitTf() { return (properties & OMIT_TF_POSITIONS)!=0; }
public boolean multiValued() { return (properties & MULTIVALUED)!=0; } public boolean multiValued() { return (properties & MULTIVALUED)!=0; }
public boolean sortMissingFirst() { return (properties & SORT_MISSING_FIRST)!=0; } public boolean sortMissingFirst() { return (properties & SORT_MISSING_FIRST)!=0; }
public boolean sortMissingLast() { return (properties & SORT_MISSING_LAST)!=0; } public boolean sortMissingLast() { return (properties & SORT_MISSING_LAST)!=0; }
@ -137,7 +137,7 @@ public final class SchemaField extends FieldProperties {
} }
if (on(falseProps,INDEXED)) { if (on(falseProps,INDEXED)) {
int pp = (INDEXED | OMIT_NORMS | OMIT_TF int pp = (INDEXED | OMIT_NORMS | OMIT_TF_POSITIONS
| STORE_TERMVECTORS | STORE_TERMPOSITIONS | STORE_TERMOFFSETS | STORE_TERMVECTORS | STORE_TERMPOSITIONS | STORE_TERMOFFSETS
| SORT_MISSING_FIRST | SORT_MISSING_LAST); | SORT_MISSING_FIRST | SORT_MISSING_LAST);
if (on(pp,trueProps)) { if (on(pp,trueProps)) {

View File

@ -32,7 +32,7 @@ import java.io.IOException;
public class TextField extends CompressableField { public class TextField extends CompressableField {
protected void init(IndexSchema schema, Map<String,String> args) { protected void init(IndexSchema schema, Map<String,String> args) {
properties |= TOKENIZED; properties |= TOKENIZED;
if (schema.getVersion()> 1.1f) properties &= ~OMIT_TF; if (schema.getVersion()> 1.1f) properties &= ~OMIT_TF_POSITIONS;
super.init(schema, args); super.init(schema, args);
} }

View File

@ -29,6 +29,7 @@ import org.apache.solr.common.SolrException;
import org.apache.solr.request.TextResponseWriter; import org.apache.solr.request.TextResponseWriter;
import org.apache.solr.request.XMLWriter; import org.apache.solr.request.XMLWriter;
import org.apache.solr.search.QParser; import org.apache.solr.search.QParser;
import org.apache.solr.search.function.*;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -121,6 +122,23 @@ public class TrieField extends FieldType {
} }
} }
public ValueSource getValueSource(SchemaField field) {
switch (type) {
case INTEGER:
return new IntFieldSource(field.getName(), TrieUtils.FIELD_CACHE_INT_PARSER);
case FLOAT:
return new FloatFieldSource(field.getName(), TrieUtils.FIELD_CACHE_FLOAT_PARSER);
case LONG:
return new LongFieldSource(field.getName(), TrieUtils.FIELD_CACHE_LONG_PARSER);
case DOUBLE:
return new DoubleFieldSource(field.getName(), TrieUtils.FIELD_CACHE_DOUBLE_PARSER);
case DATE:
return new LongFieldSource(field.getName(), TrieUtils.FIELD_CACHE_LONG_PARSER);
default:
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown type for trie field: " + field.name);
}
}
public void write(XMLWriter xmlWriter, String name, Fieldable f) throws IOException { public void write(XMLWriter xmlWriter, String name, Fieldable f) throws IOException {
xmlWriter.writeVal(name, toObject(f)); xmlWriter.writeVal(name, toObject(f));
} }
@ -153,31 +171,31 @@ public class TrieField extends FieldType {
Query query = null; Query query = null;
switch (type) { switch (type) {
case INTEGER: case INTEGER:
query = new IntTrieRangeFilter(field, field, precisionStep, query = new IntTrieRangeFilter(field, precisionStep,
min == null ? null : Integer.parseInt(min), min == null ? null : Integer.parseInt(min),
max == null ? null : Integer.parseInt(max), max == null ? null : Integer.parseInt(max),
minInclusive, maxInclusive).asQuery(); minInclusive, maxInclusive).asQuery();
break; break;
case FLOAT: case FLOAT:
query = new IntTrieRangeFilter(field, field, precisionStep, query = new IntTrieRangeFilter(field, precisionStep,
min == null ? null : TrieUtils.floatToSortableInt(Float.parseFloat(min)), min == null ? null : TrieUtils.floatToSortableInt(Float.parseFloat(min)),
max == null ? null : TrieUtils.floatToSortableInt(Float.parseFloat(max)), max == null ? null : TrieUtils.floatToSortableInt(Float.parseFloat(max)),
minInclusive, maxInclusive).asQuery(); minInclusive, maxInclusive).asQuery();
break; break;
case LONG: case LONG:
query = new LongTrieRangeFilter(field, field, precisionStep, query = new LongTrieRangeFilter(field, precisionStep,
min == null ? null : Long.parseLong(min), min == null ? null : Long.parseLong(min),
max == null ? null : Long.parseLong(max), max == null ? null : Long.parseLong(max),
minInclusive, maxInclusive).asQuery(); minInclusive, maxInclusive).asQuery();
break; break;
case DOUBLE: case DOUBLE:
query = new LongTrieRangeFilter(field, field, precisionStep, query = new LongTrieRangeFilter(field, precisionStep,
min == null ? null : TrieUtils.doubleToSortableLong(Double.parseDouble(min)), min == null ? null : TrieUtils.doubleToSortableLong(Double.parseDouble(min)),
max == null ? null : TrieUtils.doubleToSortableLong(Double.parseDouble(max)), max == null ? null : TrieUtils.doubleToSortableLong(Double.parseDouble(max)),
minInclusive, maxInclusive).asQuery(); minInclusive, maxInclusive).asQuery();
break; break;
case DATE: case DATE:
query = new LongTrieRangeFilter(field, field, precisionStep, query = new LongTrieRangeFilter(field, precisionStep,
min == null ? null : dateField.parseMath(null, min).getTime(), min == null ? null : dateField.parseMath(null, min).getTime(),
max == null ? null : dateField.parseMath(null, max).getTime(), max == null ? null : dateField.parseMath(null, max).getTime(),
minInclusive, maxInclusive).asQuery(); minInclusive, maxInclusive).asQuery();

View File

@ -56,6 +56,13 @@ public class TestTrie extends AbstractSolrTestCase {
assertQ("Range filter tint:[-9 to *] must match 20 documents", req("q", "*:*", "fq", "tint:[-10 TO *]"), "//*[@numFound='20']"); assertQ("Range filter tint:[-9 to *] must match 20 documents", req("q", "*:*", "fq", "tint:[-10 TO *]"), "//*[@numFound='20']");
assertQ("Range filter tint:[* to 9] must match 20 documents", req("q", "*:*", "fq", "tint:[* TO 10]"), "//*[@numFound='20']"); assertQ("Range filter tint:[* to 9] must match 20 documents", req("q", "*:*", "fq", "tint:[* TO 10]"), "//*[@numFound='20']");
assertQ("Range filter tint:[* to *] must match 20 documents", req("q", "*:*", "fq", "tint:[* TO *]"), "//*[@numFound='20']"); assertQ("Range filter tint:[* to *] must match 20 documents", req("q", "*:*", "fq", "tint:[* TO *]"), "//*[@numFound='20']");
// Sorting
assertQ("Sort descending does not work correctly on tint fields", req("q", "*:*", "sort", "tint desc"), "//*[@numFound='20']", "//int[@name='tint'][.='9']");
assertQ("Sort ascending does not work correctly on tint fields", req("q", "*:*", "sort", "tint asc"), "//*[@numFound='20']", "//int[@name='tint'][.='-10']");
// Function queries
assertQ("Function queries does not work correctly on tint fields", req("q", "_val_:\"sum(tint,1)\""), "//*[@numFound='20']", "//int[@name='tint'][.='9']");
} }
public void testTrieTermQuery() throws Exception { public void testTrieTermQuery() throws Exception {
@ -90,6 +97,13 @@ public class TestTrie extends AbstractSolrTestCase {
assertQ("Range filter must match only 5 documents", req, "//*[@numFound='9']"); assertQ("Range filter must match only 5 documents", req, "//*[@numFound='9']");
req = req("q", "*:*", "fq", "tfloat:[0 TO *]"); req = req("q", "*:*", "fq", "tfloat:[0 TO *]");
assertQ("Range filter must match 10 documents", req, "//*[@numFound='10']"); assertQ("Range filter must match 10 documents", req, "//*[@numFound='10']");
// Sorting
assertQ("Sort descending does not work correctly on tfloat fields", req("q", "*:*", "sort", "tfloat desc"), "//*[@numFound='10']", "//float[@name='tfloat'][.='2519.9102']");
assertQ("Sort ascending does not work correctly on tfloat fields", req("q", "*:*", "sort", "tfloat asc"), "//*[@numFound='10']", "//float[@name='tfloat'][.='0.0']");
// Function queries
assertQ("Function queries does not work correctly on tfloat fields", req("q", "_val_:\"sum(tfloat,1.0)\""), "//*[@numFound='10']", "//float[@name='tfloat'][.='2519.9102']");
} }
public void testTrieLongRangeSearch() throws Exception { public void testTrieLongRangeSearch() throws Exception {
@ -101,6 +115,13 @@ public class TestTrie extends AbstractSolrTestCase {
SolrQueryRequest req = req("q", "*:*", "fq", fq); SolrQueryRequest req = req("q", "*:*", "fq", fq);
assertQ("Range filter must match only 5 documents", req, "//*[@numFound='6']"); assertQ("Range filter must match only 5 documents", req, "//*[@numFound='6']");
assertQ("Range filter tlong:[* to *] must match 10 documents", req("q", "*:*", "fq", "tlong:[* TO *]"), "//*[@numFound='10']"); assertQ("Range filter tlong:[* to *] must match 10 documents", req("q", "*:*", "fq", "tlong:[* TO *]"), "//*[@numFound='10']");
// Sorting
assertQ("Sort descending does not work correctly on tlong fields", req("q", "*:*", "sort", "tlong desc"), "//*[@numFound='10']", "//long[@name='tlong'][.='2147483656']");
assertQ("Sort ascending does not work correctly on tlong fields", req("q", "*:*", "sort", "tlong asc"), "//*[@numFound='10']", "//long[@name='tlong'][.='2147483647']");
// Function queries
assertQ("Function queries does not work correctly on tlong fields", req("q", "_val_:\"sum(tlong,1.0)\""), "//*[@numFound='10']", "//long[@name='tlong'][.='2147483656']");
} }
public void testTrieDoubleRangeSearch() throws Exception { public void testTrieDoubleRangeSearch() throws Exception {
@ -111,6 +132,13 @@ public class TestTrie extends AbstractSolrTestCase {
String fq = "tdouble:[" + Integer.MAX_VALUE * 2.33d + " TO " + (5l + Integer.MAX_VALUE) * 2.33d + "]"; String fq = "tdouble:[" + Integer.MAX_VALUE * 2.33d + " TO " + (5l + Integer.MAX_VALUE) * 2.33d + "]";
assertQ("Range filter must match only 5 documents", req("q", "*:*", "fq", fq), "//*[@numFound='6']"); assertQ("Range filter must match only 5 documents", req("q", "*:*", "fq", fq), "//*[@numFound='6']");
assertQ("Range filter tdouble:[* to *] must match 10 documents", req("q", "*:*", "fq", "tdouble:[* TO *]"), "//*[@numFound='10']"); assertQ("Range filter tdouble:[* to *] must match 10 documents", req("q", "*:*", "fq", "tdouble:[* TO *]"), "//*[@numFound='10']");
// Sorting
assertQ("Sort descending does not work correctly on tdouble fields", req("q", "*:*", "sort", "tdouble desc"), "//*[@numFound='10']", "//double[@name='tdouble'][.='5.0036369184800005E9']");
assertQ("Sort ascending does not work correctly on tdouble fields", req("q", "*:*", "sort", "tdouble asc"), "//*[@numFound='10']", "//double[@name='tdouble'][.='5.00363689751E9']");
// Function queries
assertQ("Function queries does not work correctly on tdouble fields", req("q", "_val_:\"sum(tdouble,1.0)\""), "//*[@numFound='10']", "//double[@name='tdouble'][.='5.0036369184800005E9']");
} }
public void testTrieDateRangeSearch() throws Exception { public void testTrieDateRangeSearch() throws Exception {
@ -147,6 +175,13 @@ public class TestTrie extends AbstractSolrTestCase {
assertU(commit()); assertU(commit());
assertQ("Term query must match only 1 document", req("q", "tdate:1995-12-31T23\\:59\\:59.999Z"), "//*[@numFound='1']"); assertQ("Term query must match only 1 document", req("q", "tdate:1995-12-31T23\\:59\\:59.999Z"), "//*[@numFound='1']");
assertQ("Term query must match only 1 document", req("q", "*:*", "fq", "tdate:1995-12-31T23\\:59\\:59.999Z"), "//*[@numFound='1']"); assertQ("Term query must match only 1 document", req("q", "*:*", "fq", "tdate:1995-12-31T23\\:59\\:59.999Z"), "//*[@numFound='1']");
// Sorting
assertQ("Sort descending does not work correctly on tdate fields", req("q", "*:*", "sort", "tdate desc"), "//*[@numFound='11']", "//date[@name='tdate'][.='2009-04-21T00:00:00Z']");
assertQ("Sort ascending does not work correctly on tdate fields", req("q", "*:*", "sort", "tdate asc"), "//*[@numFound='11']", "//date[@name='tdate'][.='2009-04-12T00:00:00Z']");
// Function queries
assertQ("Function queries does not work correctly on tdate fields", req("q", "_val_:\"sum(tdate,1.0)\""), "//*[@numFound='11']", "//date[@name='tdate'][.='2009-04-21T00:00:00Z']");
} }
public void testTrieDoubleRangeSearch_CustomPrecisionStep() throws Exception { public void testTrieDoubleRangeSearch_CustomPrecisionStep() throws Exception {

View File

@ -272,10 +272,10 @@
<field name="id" type="sfloat" indexed="true" stored="true" required="true" /> <field name="id" type="sfloat" indexed="true" stored="true" required="true" />
<field name="text" type="text" indexed="true" stored="false" /> <field name="text" type="text" indexed="true" stored="false" />
<field name="tint" type="tint" indexed="true" stored="false" /> <field name="tint" type="tint" indexed="true" stored="true" />
<field name="tfloat" type="tfloat" indexed="true" stored="false" /> <field name="tfloat" type="tfloat" indexed="true" stored="true" />
<field name="tlong" type="tlong" indexed="true" stored="false" /> <field name="tlong" type="tlong" indexed="true" stored="true" />
<field name="tdouble" type="tdouble" indexed="true" stored="false" /> <field name="tdouble" type="tdouble" indexed="true" stored="true" />
<field name="tdouble4" type="tdouble4" indexed="true" stored="false" /> <field name="tdouble4" type="tdouble4" indexed="true" stored="false" />