mirror of https://github.com/apache/lucene.git
LUCENE-1334: add Term(String fieldName) constructor that sets term text to empty string
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@687014 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
bc47023b3a
commit
e31a9da835
|
@ -105,6 +105,9 @@ API Changes
|
||||||
the provided byte[] result to getBinaryValue. (Eks Dev via Mike
|
the provided byte[] result to getBinaryValue. (Eks Dev via Mike
|
||||||
McCandless)
|
McCandless)
|
||||||
|
|
||||||
|
16. LUCENE-1334: Add new constructor for Term: Term(String fieldName)
|
||||||
|
which defaults term text to "". (DM Smith via Mike McCandless)
|
||||||
|
|
||||||
Bug fixes
|
Bug fixes
|
||||||
|
|
||||||
1. LUCENE-1134: Fixed BooleanQuery.rewrite to only optimize a single
|
1. LUCENE-1134: Fixed BooleanQuery.rewrite to only optimize a single
|
||||||
|
|
|
@ -138,7 +138,7 @@ public class QueryAutoStopWordAnalyzer extends Analyzer {
|
||||||
public int addStopWords(IndexReader reader, String fieldName, int maxDocFreq) throws IOException {
|
public int addStopWords(IndexReader reader, String fieldName, int maxDocFreq) throws IOException {
|
||||||
HashSet stopWords = new HashSet();
|
HashSet stopWords = new HashSet();
|
||||||
String internedFieldName = fieldName.intern();
|
String internedFieldName = fieldName.intern();
|
||||||
TermEnum te = reader.terms(new Term(fieldName, ""));
|
TermEnum te = reader.terms(new Term(fieldName));
|
||||||
Term term = te.term();
|
Term term = te.term();
|
||||||
while (term != null) {
|
while (term != null) {
|
||||||
if (term.field() != internedFieldName) {
|
if (term.field() != internedFieldName) {
|
||||||
|
|
|
@ -704,7 +704,7 @@ public class MemoryIndex implements Serializable {
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
// Nested classes:
|
// Nested classes:
|
||||||
///////////////////////////////////////////////////////////////////////////////
|
///////////////////////////////////////////////////////////////////////////////
|
||||||
private static final Term MATCH_ALL_TERM = new Term("", "");
|
private static final Term MATCH_ALL_TERM = new Term("");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Search support for Lucene framework integration; implements all methods
|
* Search support for Lucene framework integration; implements all methods
|
||||||
|
@ -821,7 +821,7 @@ public class MemoryIndex implements Serializable {
|
||||||
Term template = info.template;
|
Term template = info.template;
|
||||||
if (template == null) { // not yet cached?
|
if (template == null) { // not yet cached?
|
||||||
String fieldName = (String) sortedFields[pos].getKey();
|
String fieldName = (String) sortedFields[pos].getKey();
|
||||||
template = new Term(fieldName, "");
|
template = new Term(fieldName);
|
||||||
info.template = template;
|
info.template = template;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class FieldNormModifier {
|
||||||
if (sim == null)
|
if (sim == null)
|
||||||
fakeNorms = SegmentReader.createFakeNorms(reader.maxDoc());
|
fakeNorms = SegmentReader.createFakeNorms(reader.maxDoc());
|
||||||
try {
|
try {
|
||||||
termEnum = reader.terms(new Term(field,""));
|
termEnum = reader.terms(new Term(field));
|
||||||
try {
|
try {
|
||||||
termDocs = reader.termDocs();
|
termDocs = reader.termDocs();
|
||||||
do {
|
do {
|
||||||
|
|
|
@ -114,7 +114,7 @@ public class LengthNormModifier {
|
||||||
reader = IndexReader.open(dir);
|
reader = IndexReader.open(dir);
|
||||||
termCounts = new int[reader.maxDoc()];
|
termCounts = new int[reader.maxDoc()];
|
||||||
try {
|
try {
|
||||||
termEnum = reader.terms(new Term(field,""));
|
termEnum = reader.terms(new Term(field));
|
||||||
try {
|
try {
|
||||||
termDocs = reader.termDocs();
|
termDocs = reader.termDocs();
|
||||||
do {
|
do {
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class DuplicateFilter extends Filter
|
||||||
{
|
{
|
||||||
|
|
||||||
OpenBitSet bits=new OpenBitSet(reader.maxDoc()); //assume all are INvalid
|
OpenBitSet bits=new OpenBitSet(reader.maxDoc()); //assume all are INvalid
|
||||||
Term startTerm=new Term(fieldName,"");
|
Term startTerm=new Term(fieldName);
|
||||||
TermEnum te = reader.terms(startTerm);
|
TermEnum te = reader.terms(startTerm);
|
||||||
if(te!=null)
|
if(te!=null)
|
||||||
{
|
{
|
||||||
|
@ -123,7 +123,7 @@ public class DuplicateFilter extends Filter
|
||||||
|
|
||||||
OpenBitSet bits=new OpenBitSet(reader.maxDoc());
|
OpenBitSet bits=new OpenBitSet(reader.maxDoc());
|
||||||
bits.set(0,reader.maxDoc()); //assume all are valid
|
bits.set(0,reader.maxDoc()); //assume all are valid
|
||||||
Term startTerm=new Term(fieldName,"");
|
Term startTerm=new Term(fieldName);
|
||||||
TermEnum te = reader.terms(startTerm);
|
TermEnum te = reader.terms(startTerm);
|
||||||
if(te!=null)
|
if(te!=null)
|
||||||
{
|
{
|
||||||
|
|
|
@ -106,7 +106,7 @@ public class FuzzyLikeThisQuery extends Query
|
||||||
TokenStream ts=analyzer.tokenStream(f.fieldName,new StringReader(f.queryString));
|
TokenStream ts=analyzer.tokenStream(f.fieldName,new StringReader(f.queryString));
|
||||||
Token token=ts.next();
|
Token token=ts.next();
|
||||||
int corpusNumDocs=reader.numDocs();
|
int corpusNumDocs=reader.numDocs();
|
||||||
Term internSavingTemplateTerm =new Term(f.fieldName,""); //optimization to avoid constructing new Term() objects
|
Term internSavingTemplateTerm =new Term(f.fieldName); //optimization to avoid constructing new Term() objects
|
||||||
HashSet processedTerms=new HashSet();
|
HashSet processedTerms=new HashSet();
|
||||||
while(token!=null)
|
while(token!=null)
|
||||||
{
|
{
|
||||||
|
|
|
@ -57,7 +57,7 @@ public class LuceneDictionary implements Dictionary {
|
||||||
|
|
||||||
LuceneIterator() {
|
LuceneIterator() {
|
||||||
try {
|
try {
|
||||||
termEnum = reader.terms(new Term(field, ""));
|
termEnum = reader.terms(new Term(field));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -486,7 +486,7 @@ public class ParallelReader extends IndexReader {
|
||||||
}
|
}
|
||||||
while (fieldIterator.hasNext()) {
|
while (fieldIterator.hasNext()) {
|
||||||
field = (String) fieldIterator.next();
|
field = (String) fieldIterator.next();
|
||||||
termEnum = ((IndexReader)fieldToReader.get(field)).terms(new Term(field, ""));
|
termEnum = ((IndexReader)fieldToReader.get(field)).terms(new Term(field));
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
if (term!=null && term.field()==field)
|
if (term!=null && term.field()==field)
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -35,6 +35,17 @@ public final class Term implements Comparable, java.io.Serializable {
|
||||||
public Term(String fld, String txt) {
|
public Term(String fld, String txt) {
|
||||||
this(fld, txt, true);
|
this(fld, txt, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Constructs a Term with the given field and empty text.
|
||||||
|
* This serves two purposes: 1) reuse of a Term with the same field.
|
||||||
|
* 2) pattern for a query.
|
||||||
|
*
|
||||||
|
* @param fld
|
||||||
|
*/
|
||||||
|
public Term(String fld) {
|
||||||
|
this(fld, "", true);
|
||||||
|
}
|
||||||
|
|
||||||
Term(String fld, String txt, boolean intern) {
|
Term(String fld, String txt, boolean intern) {
|
||||||
field = intern ? fld.intern() : fld; // field names are interned
|
field = intern ? fld.intern() : fld; // field names are interned
|
||||||
text = txt; // unless already known to be
|
text = txt; // unless already known to be
|
||||||
|
|
|
@ -45,7 +45,7 @@ class ExtendedFieldCacheImpl extends FieldCacheImpl implements ExtendedFieldCach
|
||||||
LongParser parser = (LongParser) entry.custom;
|
LongParser parser = (LongParser) entry.custom;
|
||||||
final long[] retArray = new long[reader.maxDoc()];
|
final long[] retArray = new long[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term(field, ""));
|
TermEnum termEnum = reader.terms (new Term(field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -85,7 +85,7 @@ class ExtendedFieldCacheImpl extends FieldCacheImpl implements ExtendedFieldCach
|
||||||
DoubleParser parser = (DoubleParser) entry.custom;
|
DoubleParser parser = (DoubleParser) entry.custom;
|
||||||
final double[] retArray = new double[reader.maxDoc()];
|
final double[] retArray = new double[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -115,7 +115,7 @@ class ExtendedFieldCacheImpl extends FieldCacheImpl implements ExtendedFieldCach
|
||||||
protected Object createValue(IndexReader reader, Object fieldKey)
|
protected Object createValue(IndexReader reader, Object fieldKey)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
String field = ((String)fieldKey).intern();
|
String field = ((String)fieldKey).intern();
|
||||||
TermEnum enumerator = reader.terms (new Term (field, ""));
|
TermEnum enumerator = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
Term term = enumerator.term();
|
Term term = enumerator.term();
|
||||||
if (term == null) {
|
if (term == null) {
|
||||||
|
|
|
@ -176,7 +176,7 @@ implements FieldCache {
|
||||||
ByteParser parser = (ByteParser) entry.custom;
|
ByteParser parser = (ByteParser) entry.custom;
|
||||||
final byte[] retArray = new byte[reader.maxDoc()];
|
final byte[] retArray = new byte[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -215,7 +215,7 @@ implements FieldCache {
|
||||||
ShortParser parser = (ShortParser) entry.custom;
|
ShortParser parser = (ShortParser) entry.custom;
|
||||||
final short[] retArray = new short[reader.maxDoc()];
|
final short[] retArray = new short[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -254,7 +254,7 @@ implements FieldCache {
|
||||||
IntParser parser = (IntParser) entry.custom;
|
IntParser parser = (IntParser) entry.custom;
|
||||||
final int[] retArray = new int[reader.maxDoc()];
|
final int[] retArray = new int[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -295,7 +295,7 @@ implements FieldCache {
|
||||||
FloatParser parser = (FloatParser) entry.custom;
|
FloatParser parser = (FloatParser) entry.custom;
|
||||||
final float[] retArray = new float[reader.maxDoc()];
|
final float[] retArray = new float[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -327,7 +327,7 @@ implements FieldCache {
|
||||||
String field = ((String) fieldKey).intern();
|
String field = ((String) fieldKey).intern();
|
||||||
final String[] retArray = new String[reader.maxDoc()];
|
final String[] retArray = new String[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
@ -360,7 +360,7 @@ implements FieldCache {
|
||||||
final int[] retArray = new int[reader.maxDoc()];
|
final int[] retArray = new int[reader.maxDoc()];
|
||||||
String[] mterms = new String[reader.maxDoc()+1];
|
String[] mterms = new String[reader.maxDoc()+1];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
int t = 0; // current term number
|
int t = 0; // current term number
|
||||||
|
|
||||||
// an entry for documents that have no terms in this field
|
// an entry for documents that have no terms in this field
|
||||||
|
@ -431,7 +431,7 @@ implements FieldCache {
|
||||||
protected Object createValue(IndexReader reader, Object fieldKey)
|
protected Object createValue(IndexReader reader, Object fieldKey)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
String field = ((String)fieldKey).intern();
|
String field = ((String)fieldKey).intern();
|
||||||
TermEnum enumerator = reader.terms (new Term (field, ""));
|
TermEnum enumerator = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
Term term = enumerator.term();
|
Term term = enumerator.term();
|
||||||
if (term == null) {
|
if (term == null) {
|
||||||
|
@ -488,7 +488,7 @@ implements FieldCache {
|
||||||
SortComparator comparator = (SortComparator) entry.custom;
|
SortComparator comparator = (SortComparator) entry.custom;
|
||||||
final Comparable[] retArray = new Comparable[reader.maxDoc()];
|
final Comparable[] retArray = new Comparable[reader.maxDoc()];
|
||||||
TermDocs termDocs = reader.termDocs();
|
TermDocs termDocs = reader.termDocs();
|
||||||
TermEnum termEnum = reader.terms (new Term (field, ""));
|
TermEnum termEnum = reader.terms (new Term (field));
|
||||||
try {
|
try {
|
||||||
do {
|
do {
|
||||||
Term term = termEnum.term();
|
Term term = termEnum.term();
|
||||||
|
|
|
@ -102,7 +102,7 @@ public class RangeFilter extends Filter {
|
||||||
TermEnum enumerator =
|
TermEnum enumerator =
|
||||||
(null != lowerTerm
|
(null != lowerTerm
|
||||||
? reader.terms(new Term(fieldName, lowerTerm))
|
? reader.terms(new Term(fieldName, lowerTerm))
|
||||||
: reader.terms(new Term(fieldName,"")));
|
: reader.terms(new Term(fieldName)));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
|
@ -164,7 +164,7 @@ public class RangeFilter extends Filter {
|
||||||
TermEnum enumerator =
|
TermEnum enumerator =
|
||||||
(null != lowerTerm
|
(null != lowerTerm
|
||||||
? reader.terms(new Term(fieldName, lowerTerm))
|
? reader.terms(new Term(fieldName, lowerTerm))
|
||||||
: reader.terms(new Term(fieldName,"")));
|
: reader.terms(new Term(fieldName)));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class RangeQuery extends Query
|
||||||
this.lowerTerm = lowerTerm;
|
this.lowerTerm = lowerTerm;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
this.lowerTerm = new Term(upperTerm.field(), "");
|
this.lowerTerm = new Term(upperTerm.field());
|
||||||
}
|
}
|
||||||
|
|
||||||
this.upperTerm = upperTerm;
|
this.upperTerm = upperTerm;
|
||||||
|
|
Loading…
Reference in New Issue