LUCENE-3470: Changed Field constructor signatures order to value, fieldtype

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1176773 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christopher John Male 2011-09-28 08:07:16 +00:00
parent 1f731984ba
commit 8d28270460
70 changed files with 295 additions and 300 deletions

View File

@ -437,7 +437,7 @@ If your usage fits one of those common cases you can simply
instantiate the above class. To use the TYPE_STORED variant, do this instantiate the above class. To use the TYPE_STORED variant, do this
instead: instead:
Field f = new Field("field", StringField.TYPE_STORED, "value"); Field f = new Field("field", "value", StringField.TYPE_STORED);
Alternatively, if an existing type is close to what you want but you Alternatively, if an existing type is close to what you want but you
need to make a few changes, you can copy that type and make changes: need to make a few changes, you can copy that type and make changes:
@ -472,7 +472,7 @@ If instead the value was stored:
you can now do this: you can now do this:
new Field("field", StringField.TYPE_STORED, "value") new Field("field", "value", StringField.TYPE_STORED)
If you didn't omit norms: If you didn't omit norms:
@ -482,7 +482,7 @@ you can now do this:
FieldType ft = new FieldType(StringField.TYPE_STORED); FieldType ft = new FieldType(StringField.TYPE_STORED);
ft.setOmitNorms(false); ft.setOmitNorms(false);
new Field("field", ft, "value") new Field("field", "value", ft)
If you did this before (value can be String or Reader): If you did this before (value can be String or Reader):
@ -498,7 +498,7 @@ If instead the value was stored:
you can now do this: you can now do this:
new Field("field", TextField.TYPE_STORED, value) new Field("field", value, TextField.TYPE_STORED)
If in addition you omit norms: If in addition you omit norms:
@ -508,7 +508,7 @@ you can now do this:
FieldType ft = new FieldType(TextField.TYPE_STORED); FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setOmitNorms(true); ft.setOmitNorms(true);
new Field("field", ft, value) new Field("field", value, ft)
If you did this before (bytes is a byte[]): If you did this before (bytes is a byte[]):

View File

@ -174,7 +174,7 @@ public class IndexFiles {
// field that is indexed (i.e. searchable), but don't tokenize // field that is indexed (i.e. searchable), but don't tokenize
// the field into separate words and don't index term frequency // the field into separate words and don't index term frequency
// or positional information: // or positional information:
Field pathField = new Field("path", StringField.TYPE_STORED, file.getPath()); Field pathField = new Field("path", file.getPath(), StringField.TYPE_STORED);
doc.add(pathField); doc.add(pathField);
// Add the last modified date of the file a field named "modified". // Add the last modified date of the file a field named "modified".

View File

@ -134,10 +134,10 @@ public class FormBasedXmlQueryDemo extends HttpServlet {
//parse row and create a document //parse row and create a document
StringTokenizer st = new StringTokenizer(line, "\t"); StringTokenizer st = new StringTokenizer(line, "\t");
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("location", textNoNorms, st.nextToken())); doc.add(new Field("location", st.nextToken(), textNoNorms));
doc.add(new Field("salary", textNoNorms, st.nextToken())); doc.add(new Field("salary", st.nextToken(), textNoNorms));
doc.add(new Field("type", textNoNorms, st.nextToken())); doc.add(new Field("type", st.nextToken(), textNoNorms));
doc.add(new Field("description", textNoNorms, st.nextToken())); doc.add(new Field("description", st.nextToken(), textNoNorms));
writer.addDocument(doc); writer.addDocument(doc);
} }
line = br.readLine(); line = br.readLine();

View File

@ -133,7 +133,7 @@ public abstract class BaseFragmentsBuilder implements FragmentsBuilder {
ft.setStoreTermVectors(fieldInfo.storeTermVector); ft.setStoreTermVectors(fieldInfo.storeTermVector);
ft.setStoreTermVectorOffsets(fieldInfo.storeOffsetWithTermVector); ft.setStoreTermVectorOffsets(fieldInfo.storeOffsetWithTermVector);
ft.setStoreTermVectorPositions(fieldInfo.storePositionWithTermVector); ft.setStoreTermVectorPositions(fieldInfo.storePositionWithTermVector);
fields.add(new Field(fieldInfo.name, ft, new String(b, "UTF-8"))); fields.add(new Field(fieldInfo.name, new String(b, "UTF-8"), ft));
} else { } else {
in.seek(in.getFilePointer() + numUTF8Bytes); in.seek(in.getFilePointer() + numUTF8Bytes);
} }

View File

@ -64,7 +64,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
document.add(new Field(FIELD, customType, new TokenStreamConcurrent())); document.add(new Field(FIELD, new TokenStreamConcurrent(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -112,7 +112,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
document.add(new Field(FIELD, customType, new TokenStreamConcurrent())); document.add(new Field(FIELD, new TokenStreamConcurrent(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -187,7 +187,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
document.add(new Field(FIELD, customType, new TokenStreamSparse())); document.add(new Field(FIELD, new TokenStreamSparse(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -233,7 +233,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
document.add(new Field(FIELD, customType, TEXT)); document.add(new Field(FIELD, TEXT, customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -277,7 +277,7 @@ public class HighlighterPhraseTest extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
document.add(new Field(FIELD, customType, new TokenStreamSparse())); document.add(new Field(FIELD, new TokenStreamSparse(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();

View File

@ -1625,7 +1625,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
private Document doc( String f, String v ){ private Document doc( String f, String v ){
Document doc = new Document(); Document doc = new Document();
doc.add( new Field( f, TextField.TYPE_STORED, v)); doc.add( new Field( f, v, TextField.TYPE_STORED));
return doc; return doc;
} }
@ -1776,7 +1776,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
private void addDoc(IndexWriter writer, String text) throws IOException { private void addDoc(IndexWriter writer, String text) throws IOException {
Document d = new Document(); Document d = new Document();
Field f = new Field(FIELD_NAME, TextField.TYPE_STORED, text); Field f = new Field(FIELD_NAME, text, TextField.TYPE_STORED);
d.add(f); d.add(f);
writer.addDocument(d); writer.addDocument(d);

View File

@ -109,7 +109,7 @@ public class TokenSourcesTest extends LuceneTestCase {
FieldType customType = new FieldType(TextField.TYPE_UNSTORED); FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
document.add(new Field(FIELD, customType, new TokenStreamOverlap())); document.add(new Field(FIELD, new TokenStreamOverlap(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -158,7 +158,7 @@ public class TokenSourcesTest extends LuceneTestCase {
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
document.add(new Field(FIELD, customType, new TokenStreamOverlap())); document.add(new Field(FIELD, new TokenStreamOverlap(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -206,7 +206,7 @@ public class TokenSourcesTest extends LuceneTestCase {
FieldType customType = new FieldType(TextField.TYPE_UNSTORED); FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
document.add(new Field(FIELD, customType, new TokenStreamOverlap())); document.add(new Field(FIELD, new TokenStreamOverlap(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();
@ -255,7 +255,7 @@ public class TokenSourcesTest extends LuceneTestCase {
FieldType customType = new FieldType(TextField.TYPE_UNSTORED); FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
document.add(new Field(FIELD, customType, new TokenStreamOverlap())); document.add(new Field(FIELD, new TokenStreamOverlap(), customType));
indexWriter.addDocument(document); indexWriter.addDocument(document);
} finally { } finally {
indexWriter.close(); indexWriter.close();

View File

@ -359,7 +359,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
for( String value: values ) { for( String value: values ) {
doc.add( new Field( F, customType, value ) ); doc.add( new Field( F, value, customType) );
} }
writer.addDocument( doc ); writer.addDocument( doc );
writer.close(); writer.close();
@ -377,7 +377,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
for( String value: values ) { for( String value: values ) {
doc.add( new Field( F, customType, value )); doc.add( new Field( F, value, customType));
//doc.add( new Field( F, value, Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) ); //doc.add( new Field( F, value, Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
} }
writer.addDocument( doc ); writer.addDocument( doc );

View File

@ -142,7 +142,7 @@ public class SimpleFragmentsBuilderTest extends AbstractTestCase {
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
doc.add( new Field( F, customType, "aaa" ) ); doc.add( new Field( F, "aaa", customType) );
//doc.add( new Field( F, "aaa", Store.NO, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) ); //doc.add( new Field( F, "aaa", Store.NO, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
writer.addDocument( doc ); writer.addDocument( doc );
writer.close(); writer.close();

View File

@ -211,39 +211,39 @@ public class TestIndicesEquals extends LuceneTestCase {
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
//document.add(new Field("a", i + " Do you really want to go and live in that house all winter?", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); //document.add(new Field("a", i + " Do you really want to go and live in that house all winter?", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
document.add(new Field("a", customType, i + " Do you really want to go and live in that house all winter?")); document.add(new Field("a", i + " Do you really want to go and live in that house all winter?", customType));
if (i > 0) { if (i > 0) {
//document.add(new Field("b0", i + " All work and no play makes Jack a dull boy", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); //document.add(new Field("b0", i + " All work and no play makes Jack a dull boy", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
document.add(new Field("b0", customType, i + " All work and no play makes Jack a dull boy")); document.add(new Field("b0", i + " All work and no play makes Jack a dull boy", customType));
//document.add(new Field("b1", i + " All work and no play makes Jack a dull boy", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO)); //document.add(new Field("b1", i + " All work and no play makes Jack a dull boy", Field.Store.YES, Field.Index.NOT_ANALYZED_NO_NORMS, Field.TermVector.NO));
FieldType customType2 = new FieldType(TextField.TYPE_STORED); FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.setTokenized(false); customType2.setTokenized(false);
customType2.setOmitNorms(true); customType2.setOmitNorms(true);
document.add(new Field("b1", customType2, i + " All work and no play makes Jack a dull boy")); document.add(new Field("b1", i + " All work and no play makes Jack a dull boy", customType2));
//document.add(new Field("b2", i + " All work and no play makes Jack a dull boy", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.NO)); //document.add(new Field("b2", i + " All work and no play makes Jack a dull boy", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.NO));
FieldType customType3 = new FieldType(TextField.TYPE_UNSTORED); FieldType customType3 = new FieldType(TextField.TYPE_UNSTORED);
customType3.setTokenized(false); customType3.setTokenized(false);
document.add(new Field("b1", customType3, i + " All work and no play makes Jack a dull boy")); document.add(new Field("b1", i + " All work and no play makes Jack a dull boy", customType3));
//document.add(new Field("b3", i + " All work and no play makes Jack a dull boy", Field.Store.YES, Field.Index.NO, Field.TermVector.NO)); //document.add(new Field("b3", i + " All work and no play makes Jack a dull boy", Field.Store.YES, Field.Index.NO, Field.TermVector.NO));
FieldType customType4 = new FieldType(TextField.TYPE_STORED); FieldType customType4 = new FieldType(TextField.TYPE_STORED);
customType4.setIndexed(false); customType4.setIndexed(false);
customType4.setTokenized(false); customType4.setTokenized(false);
document.add(new Field("b1", customType4, i + " All work and no play makes Jack a dull boy")); document.add(new Field("b1", i + " All work and no play makes Jack a dull boy", customType4));
if (i > 1) { if (i > 1) {
//document.add(new Field("c", i + " Redrum redrum", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); //document.add(new Field("c", i + " Redrum redrum", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
document.add(new Field("c", customType, i + " Redrum redrum")); document.add(new Field("c", i + " Redrum redrum", customType));
if (i > 2) { if (i > 2) {
//document.add(new Field("d", i + " Hello Danny, come and play with us... forever and ever. and ever.", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); //document.add(new Field("d", i + " Hello Danny, come and play with us... forever and ever. and ever.", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
document.add(new Field("d", customType, i + " Hello Danny, come and play with us... forever and ever. and ever.")); document.add(new Field("d", i + " Hello Danny, come and play with us... forever and ever. and ever.", customType));
if (i > 3) { if (i > 3) {
//Field f = new Field("e", i + " Heres Johnny!", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS); //Field f = new Field("e", i + " Heres Johnny!", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
//f.setOmitNorms(true); //f.setOmitNorms(true);
FieldType customType5 = new FieldType(TextField.TYPE_UNSTORED); FieldType customType5 = new FieldType(TextField.TYPE_UNSTORED);
customType5.setOmitNorms(true); customType5.setOmitNorms(true);
Field f = new Field("e", customType5, i + " Heres Johnny!"); Field f = new Field("e", i + " Heres Johnny!", customType5);
document.add(f); document.add(f);
if (i > 4) { if (i > 4) {
final List<Token> tokens = new ArrayList<Token>(2); final List<Token> tokens = new ArrayList<Token>(2);

View File

@ -90,7 +90,7 @@ public class FieldSelectorVisitor extends StoredFieldVisitor {
ft.setStoreTermVectors(fieldInfo.storeTermVector); ft.setStoreTermVectors(fieldInfo.storeTermVector);
ft.setStoreTermVectorOffsets(fieldInfo.storeOffsetWithTermVector); ft.setStoreTermVectorOffsets(fieldInfo.storeOffsetWithTermVector);
ft.setStoreTermVectorPositions(fieldInfo.storePositionWithTermVector); ft.setStoreTermVectorPositions(fieldInfo.storePositionWithTermVector);
doc.add(new Field(fieldInfo.name, ft, new String(b, "UTF-8"))); doc.add(new Field(fieldInfo.name, new String(b, "UTF-8"), ft));
return accept != FieldSelectorResult.LOAD; return accept != FieldSelectorResult.LOAD;
case LAZY_LOAD: case LAZY_LOAD:
case LATENT: case LATENT:

View File

@ -113,10 +113,10 @@ public class TestIndexSplitter extends LuceneTestCase {
Directory fsDir = newFSDirectory(indexPath); Directory fsDir = newFSDirectory(indexPath);
IndexWriter indexWriter = new IndexWriter(fsDir, iwConfig); IndexWriter indexWriter = new IndexWriter(fsDir, iwConfig);
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("content", StringField.TYPE_STORED, "doc 1")); doc.add(new Field("content", "doc 1", StringField.TYPE_STORED));
indexWriter.addDocument(doc); indexWriter.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(new Field("content", StringField.TYPE_STORED, "doc 2")); doc.add(new Field("content", "doc 2", StringField.TYPE_STORED));
indexWriter.addDocument(doc); indexWriter.addDocument(doc);
indexWriter.close(); indexWriter.close();
fsDir.close(); fsDir.close();

View File

@ -31,16 +31,16 @@ public final class BinaryField extends Field {
/** Creates a new BinaryField */ /** Creates a new BinaryField */
public BinaryField(String name, byte[] value) { public BinaryField(String name, byte[] value) {
super(name, BinaryField.TYPE_STORED, value); super(name, value, BinaryField.TYPE_STORED);
} }
/** Creates a new BinaryField */ /** Creates a new BinaryField */
public BinaryField(String name, byte[] value, int offset, int length) { public BinaryField(String name, byte[] value, int offset, int length) {
super(name, BinaryField.TYPE_STORED, value, offset, length); super(name, value, offset, length, BinaryField.TYPE_STORED);
} }
/** Creates a new BinaryField */ /** Creates a new BinaryField */
public BinaryField(String name, BytesRef bytes) { public BinaryField(String name, BytesRef bytes) {
super(name, BinaryField.TYPE_STORED, bytes); super(name, bytes, BinaryField.TYPE_STORED);
} }
} }

View File

@ -82,8 +82,8 @@ public class DocumentStoredFieldVisitor extends StoredFieldVisitor {
ft.setOmitNorms(fieldInfo.omitNorms); ft.setOmitNorms(fieldInfo.omitNorms);
ft.setIndexOptions(fieldInfo.indexOptions); ft.setIndexOptions(fieldInfo.indexOptions);
doc.add(new Field(fieldInfo.name, doc.add(new Field(fieldInfo.name,
ft, new String(b, "UTF-8"), ft
new String(b, "UTF-8"))); ));
} else { } else {
in.seek(in.getFilePointer() + numUTF8Bytes); in.seek(in.getFilePointer() + numUTF8Bytes);
} }

View File

@ -60,7 +60,7 @@ public class Field implements IndexableField {
this.type = type; this.type = type;
} }
public Field(String name, IndexableFieldType type, Reader reader) { public Field(String name, Reader reader, IndexableFieldType type) {
if (name == null) { if (name == null) {
throw new NullPointerException("name cannot be null"); throw new NullPointerException("name cannot be null");
} }
@ -76,7 +76,7 @@ public class Field implements IndexableField {
this.type = type; this.type = type;
} }
public Field(String name, IndexableFieldType type, TokenStream tokenStream) { public Field(String name, TokenStream tokenStream, IndexableFieldType type) {
if (name == null) { if (name == null) {
throw new NullPointerException("name cannot be null"); throw new NullPointerException("name cannot be null");
} }
@ -93,15 +93,15 @@ public class Field implements IndexableField {
this.type = type; this.type = type;
} }
public Field(String name, IndexableFieldType type, byte[] value) { public Field(String name, byte[] value, IndexableFieldType type) {
this(name, type, value, 0, value.length); this(name, value, 0, value.length, type);
} }
public Field(String name, IndexableFieldType type, byte[] value, int offset, int length) { public Field(String name, byte[] value, int offset, int length, IndexableFieldType type) {
this(name, type, new BytesRef(value, offset, length)); this(name, new BytesRef(value, offset, length), type);
} }
public Field(String name, IndexableFieldType type, BytesRef bytes) { public Field(String name, BytesRef bytes, IndexableFieldType type) {
if (type.indexed() && !type.tokenized()) { if (type.indexed() && !type.tokenized()) {
throw new IllegalArgumentException("Non-tokenized fields must use String values"); throw new IllegalArgumentException("Non-tokenized fields must use String values");
} }
@ -111,7 +111,7 @@ public class Field implements IndexableField {
this.name = name; this.name = name;
} }
public Field(String name, IndexableFieldType type, String value) { public Field(String name, String value, IndexableFieldType type) {
if (name == null) { if (name == null) {
throw new IllegalArgumentException("name cannot be null"); throw new IllegalArgumentException("name cannot be null");
} }

View File

@ -54,7 +54,7 @@ public final class StringField extends Field {
/** Creates a new un-stored StringField */ /** Creates a new un-stored StringField */
public StringField(String name, String value) { public StringField(String name, String value) {
super(name, TYPE_UNSTORED, value); super(name, value, TYPE_UNSTORED);
} }
@Override @Override

View File

@ -50,16 +50,16 @@ public final class TextField extends Field {
/** Creates a new un-stored TextField */ /** Creates a new un-stored TextField */
public TextField(String name, Reader reader) { public TextField(String name, Reader reader) {
super(name, TextField.TYPE_UNSTORED, reader); super(name, reader, TextField.TYPE_UNSTORED);
} }
/** Creates a new un-stored TextField */ /** Creates a new un-stored TextField */
public TextField(String name, String value) { public TextField(String name, String value) {
super(name, TextField.TYPE_UNSTORED, value); super(name, value, TextField.TYPE_UNSTORED);
} }
/** Creates a new un-stored TextField */ /** Creates a new un-stored TextField */
public TextField(String name, TokenStream stream) { public TextField(String name, TokenStream stream) {
super(name, TextField.TYPE_UNSTORED, stream); super(name, stream, TextField.TYPE_UNSTORED);
} }
} }

View File

@ -188,12 +188,12 @@ public class PersistentSnapshotDeletionPolicy extends SnapshotDeletionPolicy {
Document d = new Document(); Document d = new Document();
FieldType ft = new FieldType(); FieldType ft = new FieldType();
ft.setStored(true); ft.setStored(true);
d.add(new Field(SNAPSHOTS_ID, ft, "")); d.add(new Field(SNAPSHOTS_ID, "", ft));
for (Entry<String, String> e : super.getSnapshots().entrySet()) { for (Entry<String, String> e : super.getSnapshots().entrySet()) {
d.add(new Field(e.getKey(), ft, e.getValue())); d.add(new Field(e.getKey(), e.getValue(), ft));
} }
if (id != null) { if (id != null) {
d.add(new Field(id, ft, segment)); d.add(new Field(id, segment, ft));
} }
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();

View File

@ -81,8 +81,8 @@ public abstract class CollationTestBase extends LuceneTestCase {
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig( IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
TEST_VERSION_CURRENT, analyzer)); TEST_VERSION_CURRENT, analyzer));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "\u0633\u0627\u0628")); doc.add(new Field("content", "\u0633\u0627\u0628", TextField.TYPE_STORED));
doc.add(new Field("body", StringField.TYPE_STORED, "body")); doc.add(new Field("body", "body", StringField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();
IndexSearcher searcher = new IndexSearcher(ramDir, true); IndexSearcher searcher = new IndexSearcher(ramDir, true);
@ -116,7 +116,7 @@ public abstract class CollationTestBase extends LuceneTestCase {
// orders the U+0698 character before the U+0633 character, so the single // orders the U+0698 character before the U+0633 character, so the single
// index Term below should NOT be returned by a TermRangeQuery with a Farsi // index Term below should NOT be returned by a TermRangeQuery with a Farsi
// Collator (or an Arabic one for the case when Farsi is not supported). // Collator (or an Arabic one for the case when Farsi is not supported).
doc.add(new Field("content", TextField.TYPE_STORED, "\u0633\u0627\u0628")); doc.add(new Field("content", "\u0633\u0627\u0628", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();
IndexSearcher searcher = new IndexSearcher(ramDir, true); IndexSearcher searcher = new IndexSearcher(ramDir, true);
@ -138,8 +138,8 @@ public abstract class CollationTestBase extends LuceneTestCase {
IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig( IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(
TEST_VERSION_CURRENT, analyzer)); TEST_VERSION_CURRENT, analyzer));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "\u0633\u0627\u0628")); doc.add(new Field("content", "\u0633\u0627\u0628", TextField.TYPE_STORED));
doc.add(new Field("body", StringField.TYPE_STORED, "body")); doc.add(new Field("body", "body", StringField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();
@ -204,7 +204,7 @@ public abstract class CollationTestBase extends LuceneTestCase {
for (int i = 0 ; i < sortData.length ; ++i) { for (int i = 0 ; i < sortData.length ; ++i) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("tracer", customType, sortData[i][0])); doc.add(new Field("tracer", sortData[i][0], customType));
doc.add(new TextField("contents", sortData[i][1])); doc.add(new TextField("contents", sortData[i][1]));
if (sortData[i][2] != null) if (sortData[i][2] != null)
doc.add(new TextField("US", usAnalyzer.tokenStream("US", new StringReader(sortData[i][2])))); doc.add(new TextField("US", usAnalyzer.tokenStream("US", new StringReader(sortData[i][2]))));

View File

@ -46,7 +46,7 @@ class DocHelper {
public static Field textField1; public static Field textField1;
static { static {
customType = new FieldType(TextField.TYPE_STORED); customType = new FieldType(TextField.TYPE_STORED);
textField1 = new Field(TEXT_FIELD_1_KEY, customType, FIELD_1_TEXT); textField1 = new Field(TEXT_FIELD_1_KEY, FIELD_1_TEXT, customType);
} }
public static final FieldType customType2; public static final FieldType customType2;
@ -60,7 +60,7 @@ class DocHelper {
customType2.setStoreTermVectors(true); customType2.setStoreTermVectors(true);
customType2.setStoreTermVectorPositions(true); customType2.setStoreTermVectorPositions(true);
customType2.setStoreTermVectorOffsets(true); customType2.setStoreTermVectorOffsets(true);
textField2 = new Field(TEXT_FIELD_2_KEY, customType2, FIELD_2_TEXT); textField2 = new Field(TEXT_FIELD_2_KEY, FIELD_2_TEXT, customType2);
} }
public static final FieldType customType3; public static final FieldType customType3;
@ -71,14 +71,14 @@ class DocHelper {
static { static {
customType3 = new FieldType(TextField.TYPE_STORED); customType3 = new FieldType(TextField.TYPE_STORED);
customType3.setOmitNorms(true); customType3.setOmitNorms(true);
textField3 = new Field(TEXT_FIELD_3_KEY, customType3, FIELD_3_TEXT); textField3 = new Field(TEXT_FIELD_3_KEY, FIELD_3_TEXT, customType3);
} }
public static final String KEYWORD_TEXT = "Keyword"; public static final String KEYWORD_TEXT = "Keyword";
public static final String KEYWORD_FIELD_KEY = "keyField"; public static final String KEYWORD_FIELD_KEY = "keyField";
public static Field keyField; public static Field keyField;
static { static {
keyField = new Field(KEYWORD_FIELD_KEY, StringField.TYPE_STORED, KEYWORD_TEXT); keyField = new Field(KEYWORD_FIELD_KEY, KEYWORD_TEXT, StringField.TYPE_STORED);
} }
public static final FieldType customType5; public static final FieldType customType5;
@ -89,7 +89,7 @@ class DocHelper {
customType5 = new FieldType(TextField.TYPE_STORED); customType5 = new FieldType(TextField.TYPE_STORED);
customType5.setOmitNorms(true); customType5.setOmitNorms(true);
customType5.setTokenized(false); customType5.setTokenized(false);
noNormsField = new Field(NO_NORMS_KEY, customType5, NO_NORMS_TEXT); noNormsField = new Field(NO_NORMS_KEY, NO_NORMS_TEXT, customType5);
} }
public static final FieldType customType6; public static final FieldType customType6;
@ -99,7 +99,7 @@ class DocHelper {
static { static {
customType6 = new FieldType(TextField.TYPE_STORED); customType6 = new FieldType(TextField.TYPE_STORED);
customType6.setIndexOptions(IndexOptions.DOCS_ONLY); customType6.setIndexOptions(IndexOptions.DOCS_ONLY);
noTFField = new Field(NO_TF_KEY, customType6, NO_TF_TEXT); noTFField = new Field(NO_TF_KEY, NO_TF_TEXT, customType6);
} }
public static final FieldType customType7; public static final FieldType customType7;
@ -109,13 +109,13 @@ class DocHelper {
static { static {
customType7 = new FieldType(); customType7 = new FieldType();
customType7.setStored(true); customType7.setStored(true);
unIndField = new Field(UNINDEXED_FIELD_KEY, customType7, UNINDEXED_FIELD_TEXT); unIndField = new Field(UNINDEXED_FIELD_KEY, UNINDEXED_FIELD_TEXT, customType7);
} }
public static final String UNSTORED_1_FIELD_TEXT = "unstored field text"; public static final String UNSTORED_1_FIELD_TEXT = "unstored field text";
public static final String UNSTORED_FIELD_1_KEY = "unStoredField1"; public static final String UNSTORED_FIELD_1_KEY = "unStoredField1";
public static Field unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, TextField.TYPE_UNSTORED, UNSTORED_1_FIELD_TEXT); public static Field unStoredField1 = new Field(UNSTORED_FIELD_1_KEY, UNSTORED_1_FIELD_TEXT, TextField.TYPE_UNSTORED);
public static final FieldType customType8; public static final FieldType customType8;
public static final String UNSTORED_2_FIELD_TEXT = "unstored field text"; public static final String UNSTORED_2_FIELD_TEXT = "unstored field text";
@ -124,7 +124,7 @@ class DocHelper {
static { static {
customType8 = new FieldType(TextField.TYPE_UNSTORED); customType8 = new FieldType(TextField.TYPE_UNSTORED);
customType8.setStoreTermVectors(true); customType8.setStoreTermVectors(true);
unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, customType8, UNSTORED_2_FIELD_TEXT); unStoredField2 = new Field(UNSTORED_FIELD_2_KEY, UNSTORED_2_FIELD_TEXT, customType8);
} }
public static final String LAZY_FIELD_BINARY_KEY = "lazyFieldBinary"; public static final String LAZY_FIELD_BINARY_KEY = "lazyFieldBinary";
@ -133,7 +133,7 @@ class DocHelper {
public static final String LAZY_FIELD_KEY = "lazyField"; public static final String LAZY_FIELD_KEY = "lazyField";
public static final String LAZY_FIELD_TEXT = "These are some field bytes"; public static final String LAZY_FIELD_TEXT = "These are some field bytes";
public static Field lazyField = new Field(LAZY_FIELD_KEY, customType, LAZY_FIELD_TEXT); public static Field lazyField = new Field(LAZY_FIELD_KEY, LAZY_FIELD_TEXT, customType);
public static final String LARGE_LAZY_FIELD_KEY = "largeLazyField"; public static final String LARGE_LAZY_FIELD_KEY = "largeLazyField";
public static String LARGE_LAZY_FIELD_TEXT; public static String LARGE_LAZY_FIELD_TEXT;
@ -142,13 +142,13 @@ class DocHelper {
//From Issue 509 //From Issue 509
public static final String FIELD_UTF1_TEXT = "field one \u4e00text"; public static final String FIELD_UTF1_TEXT = "field one \u4e00text";
public static final String TEXT_FIELD_UTF1_KEY = "textField1Utf8"; public static final String TEXT_FIELD_UTF1_KEY = "textField1Utf8";
public static Field textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, customType, FIELD_UTF1_TEXT); public static Field textUtfField1 = new Field(TEXT_FIELD_UTF1_KEY, FIELD_UTF1_TEXT, customType);
public static final String FIELD_UTF2_TEXT = "field field field \u4e00two text"; public static final String FIELD_UTF2_TEXT = "field field field \u4e00two text";
//Fields will be lexicographically sorted. So, the order is: field, text, two //Fields will be lexicographically sorted. So, the order is: field, text, two
public static final int [] FIELD_UTF2_FREQS = {3, 1, 1}; public static final int [] FIELD_UTF2_FREQS = {3, 1, 1};
public static final String TEXT_FIELD_UTF2_KEY = "textField2Utf8"; public static final String TEXT_FIELD_UTF2_KEY = "textField2Utf8";
public static Field textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, customType2, FIELD_UTF2_TEXT); public static Field textUtfField2 = new Field(TEXT_FIELD_UTF2_KEY, FIELD_UTF2_TEXT, customType2);
@ -200,7 +200,7 @@ class DocHelper {
lazyFieldBinary = new BinaryField(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES); lazyFieldBinary = new BinaryField(LAZY_FIELD_BINARY_KEY, LAZY_FIELD_BINARY_BYTES);
fields[fields.length - 2] = lazyFieldBinary; fields[fields.length - 2] = lazyFieldBinary;
LARGE_LAZY_FIELD_TEXT = buffer.toString(); LARGE_LAZY_FIELD_TEXT = buffer.toString();
largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, customType, LARGE_LAZY_FIELD_TEXT); largeLazyField = new Field(LARGE_LAZY_FIELD_KEY, LARGE_LAZY_FIELD_TEXT, customType);
fields[fields.length - 1] = largeLazyField; fields[fields.length - 1] = largeLazyField;
for (int i=0; i<fields.length; i++) { for (int i=0; i<fields.length; i++) {
IndexableField f = fields[i]; IndexableField f = fields[i];
@ -304,15 +304,15 @@ class DocHelper {
customType1.setStoreTermVectorOffsets(true); customType1.setStoreTermVectorOffsets(true);
final Document doc = new Document(); final Document doc = new Document();
doc.add(new Field("id", customType1, Integer.toString(n))); doc.add(new Field("id", Integer.toString(n), customType1));
doc.add(new Field("indexname", customType1, indexName)); doc.add(new Field("indexname", indexName, customType1));
sb.append("a"); sb.append("a");
sb.append(n); sb.append(n);
doc.add(new Field("field1", customType, sb.toString())); doc.add(new Field("field1", sb.toString(), customType));
sb.append(" b"); sb.append(" b");
sb.append(n); sb.append(n);
for (int i = 1; i < numFields; i++) { for (int i = 1; i < numFields; i++) {
doc.add(new Field("field" + (i + 1), customType, sb.toString())); doc.add(new Field("field" + (i + 1), sb.toString(), customType));
} }
return doc; return doc;
} }

View File

@ -128,16 +128,16 @@ public class LineFileDocs implements Closeable {
ft.setStoreTermVectorOffsets(true); ft.setStoreTermVectorOffsets(true);
ft.setStoreTermVectorPositions(true); ft.setStoreTermVectorPositions(true);
titleTokenized = new Field("titleTokenized", ft, ""); titleTokenized = new Field("titleTokenized", "", ft);
doc.add(titleTokenized); doc.add(titleTokenized);
body = new Field("body", ft, ""); body = new Field("body", "", ft);
doc.add(body); doc.add(body);
id = new Field("docid", StringField.TYPE_STORED, ""); id = new Field("docid", "", StringField.TYPE_STORED);
doc.add(id); doc.add(id);
date = new Field("date", StringField.TYPE_STORED, ""); date = new Field("date", "", StringField.TYPE_STORED);
doc.add(date); doc.add(date);
} }
} }

View File

@ -1159,7 +1159,7 @@ public abstract class LuceneTestCase extends Assert {
public static Field newField(Random random, String name, String value, FieldType type) { public static Field newField(Random random, String name, String value, FieldType type) {
if (usually(random) || !type.indexed()) { if (usually(random) || !type.indexed()) {
// most of the time, don't modify the params // most of the time, don't modify the params
return new Field(name, type, value); return new Field(name, value, type);
} }
FieldType newType = new FieldType(type); FieldType newType = new FieldType(type);
@ -1186,7 +1186,7 @@ public abstract class LuceneTestCase extends Assert {
} }
*/ */
return new Field(name, newType, value); return new Field(name, value, newType);
} }
/** return a random Locale from the available locales on the system */ /** return a random Locale from the available locales on the system */

View File

@ -513,10 +513,7 @@ public class _TestUtil {
for(IndexableField f : doc1) { for(IndexableField f : doc1) {
Field field1 = (Field) f; Field field1 = (Field) f;
Field field2 = new Field(field1.name(), Field field2 = new Field(field1.name(), field1.stringValue(), field1.fieldType());
field1.fieldType(),
field1.stringValue()
);
doc2.add(field2); doc2.add(field2);
} }

View File

@ -38,7 +38,7 @@ public class TestBinaryDocument extends LuceneTestCase {
FieldType ft = new FieldType(); FieldType ft = new FieldType();
ft.setStored(true); ft.setStored(true);
IndexableField binaryFldStored = new BinaryField("binaryStored", binaryValStored.getBytes()); IndexableField binaryFldStored = new BinaryField("binaryStored", binaryValStored.getBytes());
IndexableField stringFldStored = new Field("stringStored", ft, binaryValStored); IndexableField stringFldStored = new Field("stringStored", binaryValStored, ft);
Document doc = new Document(); Document doc = new Document();

View File

@ -42,7 +42,7 @@ public class TestDocument extends LuceneTestCase {
FieldType ft = new FieldType(); FieldType ft = new FieldType();
ft.setStored(true); ft.setStored(true);
IndexableField stringFld = new Field("string", ft, binaryVal); IndexableField stringFld = new Field("string", binaryVal, ft);
IndexableField binaryFld = new BinaryField("binary", binaryVal.getBytes()); IndexableField binaryFld = new BinaryField("binary", binaryVal.getBytes());
IndexableField binaryFld2 = new BinaryField("binary", binaryVal2.getBytes()); IndexableField binaryFld2 = new BinaryField("binary", binaryVal2.getBytes());
@ -121,20 +121,20 @@ public class TestDocument extends LuceneTestCase {
public void testConstructorExceptions() { public void testConstructorExceptions() {
FieldType ft = new FieldType(); FieldType ft = new FieldType();
ft.setStored(true); ft.setStored(true);
new Field("name", ft, "value"); // okay new Field("name", "value", ft); // okay
new StringField("name", "value"); // okay new StringField("name", "value"); // okay
try { try {
new Field("name", new FieldType(), "value"); new Field("name", "value", new FieldType());
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
// expected exception // expected exception
} }
new Field("name", ft, "value"); // okay new Field("name", "value", ft); // okay
try { try {
FieldType ft2 = new FieldType(); FieldType ft2 = new FieldType();
ft2.setStored(true); ft2.setStored(true);
ft2.setStoreTermVectors(true); ft2.setStoreTermVectors(true);
new Field("name", ft2, "value"); new Field("name", "value", ft2);
fail(); fail();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
// expected exception // expected exception
@ -195,12 +195,12 @@ public class TestDocument extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType stored = new FieldType(); FieldType stored = new FieldType();
stored.setStored(true); stored.setStored(true);
doc.add(new Field("keyword", StringField.TYPE_STORED, "test1")); doc.add(new Field("keyword", "test1", StringField.TYPE_STORED));
doc.add(new Field("keyword", StringField.TYPE_STORED, "test2")); doc.add(new Field("keyword", "test2", StringField.TYPE_STORED));
doc.add(new Field("text", TextField.TYPE_STORED, "test1")); doc.add(new Field("text", "test1", TextField.TYPE_STORED));
doc.add(new Field("text", TextField.TYPE_STORED, "test2")); doc.add(new Field("text", "test2", TextField.TYPE_STORED));
doc.add(new Field("unindexed", stored, "test1")); doc.add(new Field("unindexed", "test1", stored));
doc.add(new Field("unindexed", stored, "test2")); doc.add(new Field("unindexed", "test2", stored));
doc doc
.add(new TextField("unstored", "test1")); .add(new TextField("unstored", "test1"));
doc doc
@ -239,10 +239,10 @@ public class TestDocument extends LuceneTestCase {
public void testFieldSetValue() throws Exception { public void testFieldSetValue() throws Exception {
Field field = new Field("id", StringField.TYPE_STORED, "id1"); Field field = new Field("id", "id1", StringField.TYPE_STORED);
Document doc = new Document(); Document doc = new Document();
doc.add(field); doc.add(field);
doc.add(new Field("keyword", StringField.TYPE_STORED, "test")); doc.add(new Field("keyword", "test", StringField.TYPE_STORED));
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random, dir); RandomIndexWriter writer = new RandomIndexWriter(random, dir);
@ -278,7 +278,7 @@ public class TestDocument extends LuceneTestCase {
public void testFieldSetValueChangeBinary() { public void testFieldSetValueChangeBinary() {
Field field1 = new BinaryField("field1", new byte[0]); Field field1 = new BinaryField("field1", new byte[0]);
Field field2 = new Field("field2", TextField.TYPE_STORED, ""); Field field2 = new Field("field2", "", TextField.TYPE_STORED);
try { try {
field1.setValue("abc"); field1.setValue("abc");
fail("did not hit expected exception"); fail("did not hit expected exception");

View File

@ -66,7 +66,7 @@ public class Test2BPostings extends LuceneTestCase {
FieldType ft = new FieldType(TextField.TYPE_UNSTORED); FieldType ft = new FieldType(TextField.TYPE_UNSTORED);
ft.setOmitNorms(true); ft.setOmitNorms(true);
ft.setIndexOptions(IndexOptions.DOCS_ONLY); ft.setIndexOptions(IndexOptions.DOCS_ONLY);
Field field = new Field("field", ft, new MyTokenStream()); Field field = new Field("field", new MyTokenStream(), ft);
doc.add(field); doc.add(field);
final int numDocs = (Integer.MAX_VALUE / 26) + 1; final int numDocs = (Integer.MAX_VALUE / 26) + 1;

View File

@ -180,7 +180,7 @@ public class Test2BTerms extends LuceneTestCase {
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setIndexOptions(IndexOptions.DOCS_ONLY); customType.setIndexOptions(IndexOptions.DOCS_ONLY);
customType.setOmitNorms(true); customType.setOmitNorms(true);
Field field = new Field("field", customType, ts); Field field = new Field("field", ts, customType);
doc.add(field); doc.add(field);
//w.setInfoStream(System.out); //w.setInfoStream(System.out);
final int numDocs = (int) (TERM_COUNT/TERMS_PER_DOC); final int numDocs = (int) (TERM_COUNT/TERMS_PER_DOC);

View File

@ -1059,7 +1059,7 @@ public class TestAddIndexes extends LuceneTestCase {
Document d = new Document(); Document d = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
d.add(new Field("c", customType, "v")); d.add(new Field("c", "v", customType));
w.addDocument(d); w.addDocument(d);
w.close(); w.close();
} }
@ -1097,7 +1097,7 @@ public class TestAddIndexes extends LuceneTestCase {
new MockAnalyzer(random)).setMergePolicy(lmp2); new MockAnalyzer(random)).setMergePolicy(lmp2);
IndexWriter w2 = new IndexWriter(src, conf2); IndexWriter w2 = new IndexWriter(src, conf2);
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("c", TextField.TYPE_STORED, "some text")); doc.add(new Field("c", "some text", TextField.TYPE_STORED));
w2.addDocument(doc); w2.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(new StringField("d", "delete")); doc.add(new StringField("d", "delete"));

View File

@ -95,7 +95,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
// Update all 100 docs... // Update all 100 docs...
for(int i=0; i<100; i++) { for(int i=0; i<100; i++) {
Document d = new Document(); Document d = new Document();
d.add(new Field("id", StringField.TYPE_STORED, Integer.toString(i))); d.add(new Field("id", Integer.toString(i), StringField.TYPE_STORED));
d.add(new TextField("contents", English.intToEnglish(i+10*count))); d.add(new TextField("contents", English.intToEnglish(i+10*count)));
writer.updateDocument(new Term("id", Integer.toString(i)), d); writer.updateDocument(new Term("id", Integer.toString(i)), d);
} }

View File

@ -597,15 +597,15 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
{ {
Document doc = new Document(); Document doc = new Document();
doc.add(new TextField("content", "aaa")); doc.add(new TextField("content", "aaa"));
doc.add(new Field("id", StringField.TYPE_STORED, Integer.toString(id))); doc.add(new Field("id", Integer.toString(id), StringField.TYPE_STORED));
FieldType customType2 = new FieldType(TextField.TYPE_STORED); FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.setStoreTermVectors(true); customType2.setStoreTermVectors(true);
customType2.setStoreTermVectorPositions(true); customType2.setStoreTermVectorPositions(true);
customType2.setStoreTermVectorOffsets(true); customType2.setStoreTermVectorOffsets(true);
doc.add(new Field("autf8", customType2, "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd")); doc.add(new Field("autf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2));
doc.add(new Field("utf8", customType2, "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd")); doc.add(new Field("utf8", "Lu\uD834\uDD1Ece\uD834\uDD60ne \u0000 \u2620 ab\ud917\udc17cd", customType2));
doc.add(new Field("content2", customType2, "here is more content with aaa aaa aaa")); doc.add(new Field("content2", "here is more content with aaa aaa aaa", customType2));
doc.add(new Field("fie\u2C77ld", customType2, "field with non-ascii name")); doc.add(new Field("fie\u2C77ld", "field with non-ascii name", customType2));
// add numeric fields, to test if flex preserves encoding // add numeric fields, to test if flex preserves encoding
doc.add(new NumericField("trieInt", 4).setIntValue(id)); doc.add(new NumericField("trieInt", 4).setIntValue(id));
doc.add(new NumericField("trieLong", 4).setLongValue(id)); doc.add(new NumericField("trieLong", 4).setLongValue(id));
@ -616,12 +616,12 @@ public class TestBackwardsCompatibility extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setIndexOptions(IndexOptions.DOCS_ONLY); customType.setIndexOptions(IndexOptions.DOCS_ONLY);
Field f = new Field("content3", customType, "aaa"); Field f = new Field("content3", "aaa", customType);
doc.add(f); doc.add(f);
FieldType customType2 = new FieldType(); FieldType customType2 = new FieldType();
customType2.setStored(true); customType2.setStored(true);
customType2.setIndexOptions(IndexOptions.DOCS_ONLY); customType2.setIndexOptions(IndexOptions.DOCS_ONLY);
f = new Field("content4", customType2, "aaa"); f = new Field("content4", "aaa", customType2);
doc.add(f); doc.add(f);
writer.addDocument(doc); writer.addDocument(doc);
} }

View File

@ -51,7 +51,7 @@ public class TestBinaryTerms extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType customType = new FieldType(); FieldType customType = new FieldType();
customType.setStored(true); customType.setStored(true);
doc.add(new Field("id", customType, "" + i)); doc.add(new Field("id", "" + i, customType));
doc.add(new TextField("bytes", tokenStream)); doc.add(new TextField("bytes", tokenStream));
iw.addDocument(doc); iw.addDocument(doc);
} }

View File

@ -39,8 +39,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d1 = new Document(); Document d1 = new Document();
d1.add(new Field("f1", StringField.TYPE_STORED, "first field")); d1.add(new Field("f1", "first field", StringField.TYPE_STORED));
d1.add(new Field("f2", StringField.TYPE_STORED, "second field")); d1.add(new Field("f2", "second field", StringField.TYPE_STORED));
writer.addDocument(d1); writer.addDocument(d1);
if (i == 1) { if (i == 1) {
@ -54,7 +54,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
FieldType customType2 = new FieldType(TextField.TYPE_STORED); FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.setStoreTermVectors(true); customType2.setStoreTermVectors(true);
d2.add(new TextField("f2", "second field")); d2.add(new TextField("f2", "second field"));
d2.add(new Field("f1", customType2, "first field")); d2.add(new Field("f1", "first field", customType2));
d2.add(new TextField("f3", "third field")); d2.add(new TextField("f3", "third field"));
d2.add(new TextField("f4", "fourth field")); d2.add(new TextField("f4", "fourth field"));
writer.addDocument(d2); writer.addDocument(d2);
@ -102,8 +102,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES)); IndexWriter writer = new IndexWriter(dir1, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(NoMergePolicy.COMPOUND_FILES));
Document d1 = new Document(); Document d1 = new Document();
d1.add(new Field("f1", TextField.TYPE_STORED, "first field")); d1.add(new Field("f1", "first field", TextField.TYPE_STORED));
d1.add(new Field("f2", TextField.TYPE_STORED, "second field")); d1.add(new Field("f2", "second field", TextField.TYPE_STORED));
writer.addDocument(d1); writer.addDocument(d1);
writer.close(); writer.close();
@ -112,10 +112,10 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
Document d2 = new Document(); Document d2 = new Document();
FieldType customType2 = new FieldType(TextField.TYPE_STORED); FieldType customType2 = new FieldType(TextField.TYPE_STORED);
customType2.setStoreTermVectors(true); customType2.setStoreTermVectors(true);
d2.add(new Field("f2", TextField.TYPE_STORED, "second field")); d2.add(new Field("f2", "second field", TextField.TYPE_STORED));
d2.add(new Field("f1", customType2, "first field")); d2.add(new Field("f1", "first field", customType2));
d2.add(new Field("f3", TextField.TYPE_STORED, "third field")); d2.add(new Field("f3", "third field", TextField.TYPE_STORED));
d2.add(new Field("f4", TextField.TYPE_STORED, "fourth field")); d2.add(new Field("f4", "fourth field", TextField.TYPE_STORED));
writer.addDocument(d2); writer.addDocument(d2);
writer.close(); writer.close();
@ -168,8 +168,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergePolicy(
NoMergePolicy.NO_COMPOUND_FILES)); NoMergePolicy.NO_COMPOUND_FILES));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d1 first field")); d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d1 second field")); d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
writer.addDocument(d); writer.addDocument(d);
writer.close(); writer.close();
SegmentInfos sis = new SegmentInfos(); SegmentInfos sis = new SegmentInfos();
@ -188,7 +188,7 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES)); : NoMergePolicy.COMPOUND_FILES));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d2 first field")); d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d); writer.addDocument(d);
writer.close(); writer.close();
@ -212,8 +212,8 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES random.nextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES
: NoMergePolicy.COMPOUND_FILES)); : NoMergePolicy.COMPOUND_FILES));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d3 first field")); d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d3 second field")); d.add(new Field("f2", "d3 second field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 }));
writer.addDocument(d); writer.addDocument(d);
writer.close(); writer.close();
@ -385,22 +385,22 @@ public class TestConsistentFieldNumbers extends LuceneTestCase {
customType15.setStoreTermVectorPositions(true); customType15.setStoreTermVectorPositions(true);
switch (mode) { switch (mode) {
case 0: return new Field(fieldName, customType, "some text"); case 0: return new Field(fieldName, "some text", customType);
case 1: return new TextField(fieldName, "some text"); case 1: return new TextField(fieldName, "some text");
case 2: return new Field(fieldName, customType2, "some text"); case 2: return new Field(fieldName, "some text", customType2);
case 3: return new Field(fieldName, customType3, "some text"); case 3: return new Field(fieldName, "some text", customType3);
case 4: return new Field(fieldName, customType4, "some text"); case 4: return new Field(fieldName, "some text", customType4);
case 5: return new Field(fieldName, customType5, "some text"); case 5: return new Field(fieldName, "some text", customType5);
case 6: return new Field(fieldName, customType6, "some text"); case 6: return new Field(fieldName, "some text", customType6);
case 7: return new Field(fieldName, customType7, "some text"); case 7: return new Field(fieldName, "some text", customType7);
case 8: return new Field(fieldName, customType8, "some text"); case 8: return new Field(fieldName, "some text", customType8);
case 9: return new Field(fieldName, customType9, "some text"); case 9: return new Field(fieldName, "some text", customType9);
case 10: return new Field(fieldName, customType10, "some text"); case 10: return new Field(fieldName, "some text", customType10);
case 11: return new Field(fieldName, customType11, "some text"); case 11: return new Field(fieldName, "some text", customType11);
case 12: return new Field(fieldName, customType12, "some text"); case 12: return new Field(fieldName, "some text", customType12);
case 13: return new Field(fieldName, customType13, "some text"); case 13: return new Field(fieldName, "some text", customType13);
case 14: return new Field(fieldName, customType14, "some text"); case 14: return new Field(fieldName, "some text", customType14);
case 15: return new Field(fieldName, customType15, "some text"); case 15: return new Field(fieldName, "some text", customType15);
default: return null; default: return null;
} }
} }

View File

@ -304,8 +304,8 @@ public class TestFieldsReader extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType onlyStored = new FieldType(); FieldType onlyStored = new FieldType();
onlyStored.setStored(true); onlyStored.setStored(true);
doc.add(new Field("field", onlyStored, "value")); doc.add(new Field("field", "value", onlyStored));
doc.add(new Field("field2", StringField.TYPE_STORED, "value")); doc.add(new Field("field2", "value", StringField.TYPE_STORED));
w.addDocument(doc); w.addDocument(doc);
IndexReader r = w.getReader(); IndexReader r = w.getReader();
w.close(); w.close();

View File

@ -50,8 +50,8 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
new MockAnalyzer(random)); new MockAnalyzer(random));
IndexWriter writer = new IndexWriter(dir, config); IndexWriter writer = new IndexWriter(dir, config);
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d1 first field")); d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d1 second field")); d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
writer.addDocument(d); writer.addDocument(d);
for (String string : writer.getIndexFileNames()) { for (String string : writer.getIndexFileNames()) {
assertFalse(string.endsWith(".fnx")); assertFalse(string.endsWith(".fnx"));
@ -65,7 +65,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
assertFNXFiles(dir, "1.fnx"); assertFNXFiles(dir, "1.fnx");
d = new Document(); d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d2 first field")); d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
@ -83,8 +83,8 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))); TEST_VERSION_CURRENT, new MockAnalyzer(random)));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d3 first field")); d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d3 second field")); d.add(new Field("f2", "d3 second field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 }));
writer.addDocument(d); writer.addDocument(d);
writer.close(); writer.close();
@ -117,13 +117,13 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
new MockAnalyzer(random)); new MockAnalyzer(random));
IndexWriter writer = new IndexWriter(dir, config); IndexWriter writer = new IndexWriter(dir, config);
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d1 first field")); d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d1 second field")); d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
assertFNXFiles(dir, "1.fnx"); assertFNXFiles(dir, "1.fnx");
d = new Document(); d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d2 first field")); d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
@ -158,13 +158,13 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
new KeepAllDeletionPolicy())); new KeepAllDeletionPolicy()));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d1 first field")); d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d1 second field")); d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
assertFNXFiles(dir, "1.fnx"); assertFNXFiles(dir, "1.fnx");
d = new Document(); d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d2 first field")); d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
@ -179,8 +179,8 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig( IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
TEST_VERSION_CURRENT, new MockAnalyzer(random))); TEST_VERSION_CURRENT, new MockAnalyzer(random)));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d3 first field")); d.add(new Field("f1", "d3 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d3 second field")); d.add(new Field("f2", "d3 second field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3, 4, 5 }));
writer.addDocument(d); writer.addDocument(d);
writer.close(); writer.close();
@ -204,13 +204,13 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy( TEST_VERSION_CURRENT, new MockAnalyzer(random)).setIndexDeletionPolicy(
new KeepAllDeletionPolicy())); new KeepAllDeletionPolicy()));
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d1 first field")); d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
d.add(new Field("f2", TextField.TYPE_STORED, "d1 second field")); d.add(new Field("f2", "d1 second field", TextField.TYPE_STORED));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
assertFNXFiles(dir, "1.fnx"); assertFNXFiles(dir, "1.fnx");
d = new Document(); d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d2 first field")); d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d); writer.addDocument(d);
assertFNXFiles(dir, "1.fnx"); assertFNXFiles(dir, "1.fnx");
@ -224,7 +224,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
new KeepAllDeletionPolicy()).setIndexCommit(listCommits.get(0))); new KeepAllDeletionPolicy()).setIndexCommit(listCommits.get(0)));
d = new Document(); d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d2 first field")); d.add(new Field("f1", "d2 first field", TextField.TYPE_STORED));
d.add(new BinaryField("f3", new byte[] { 1, 2, 3 })); d.add(new BinaryField("f3", new byte[] { 1, 2, 3 }));
writer.addDocument(d); writer.addDocument(d);
writer.commit(); writer.commit();
@ -475,7 +475,7 @@ public class TestGlobalFieldNumbers extends LuceneTestCase {
} }
Document d = new Document(); Document d = new Document();
d.add(new Field("f1", TextField.TYPE_STORED, "d1 first field")); d.add(new Field("f1", "d1 first field", TextField.TYPE_STORED));
writer.addDocument(d); writer.addDocument(d);
writer.prepareCommit(); writer.prepareCommit();
// the fnx file should still be under control of the SIS // the fnx file should still be under control of the SIS

View File

@ -152,9 +152,9 @@ public class TestIndexReader extends LuceneTestCase
FieldType customType3 = new FieldType(); FieldType customType3 = new FieldType();
customType3.setStored(true); customType3.setStored(true);
doc.add(new Field("keyword",StringField.TYPE_STORED,"test1")); doc.add(new Field("keyword", "test1", StringField.TYPE_STORED));
doc.add(new Field("text",TextField.TYPE_STORED,"test1")); doc.add(new Field("text", "test1", TextField.TYPE_STORED));
doc.add(new Field("unindexed",customType3,"test1")); doc.add(new Field("unindexed", "test1", customType3));
doc.add(new TextField("unstored","test1")); doc.add(new TextField("unstored","test1"));
writer.addDocument(doc); writer.addDocument(doc);
@ -178,18 +178,18 @@ public class TestIndexReader extends LuceneTestCase
int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor(); int mergeFactor = ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor();
for (int i = 0; i < 5*mergeFactor; i++) { for (int i = 0; i < 5*mergeFactor; i++) {
doc = new Document(); doc = new Document();
doc.add(new Field("keyword",StringField.TYPE_STORED,"test1")); doc.add(new Field("keyword", "test1", StringField.TYPE_STORED));
doc.add(new Field("text",TextField.TYPE_STORED, "test1")); doc.add(new Field("text", "test1", TextField.TYPE_STORED));
doc.add(new Field("unindexed",customType3,"test1")); doc.add(new Field("unindexed", "test1", customType3));
doc.add(new TextField("unstored","test1")); doc.add(new TextField("unstored","test1"));
writer.addDocument(doc); writer.addDocument(doc);
} }
// new fields are in some different segments (we hope) // new fields are in some different segments (we hope)
for (int i = 0; i < 5*mergeFactor; i++) { for (int i = 0; i < 5*mergeFactor; i++) {
doc = new Document(); doc = new Document();
doc.add(new Field("keyword2",StringField.TYPE_STORED,"test1")); doc.add(new Field("keyword2", "test1", StringField.TYPE_STORED));
doc.add(new Field("text2",TextField.TYPE_STORED, "test1")); doc.add(new Field("text2", "test1", TextField.TYPE_STORED));
doc.add(new Field("unindexed2",customType3,"test1")); doc.add(new Field("unindexed2", "test1", customType3));
doc.add(new TextField("unstored2","test1")); doc.add(new TextField("unstored2","test1"));
writer.addDocument(doc); writer.addDocument(doc);
} }
@ -210,11 +210,11 @@ public class TestIndexReader extends LuceneTestCase
for (int i = 0; i < 5*mergeFactor; i++) { for (int i = 0; i < 5*mergeFactor; i++) {
doc = new Document(); doc = new Document();
doc.add(new Field("tvnot",TextField.TYPE_STORED,"tvnot")); doc.add(new Field("tvnot", "tvnot", TextField.TYPE_STORED));
doc.add(new Field("termvector",customType5,"termvector")); doc.add(new Field("termvector", "termvector", customType5));
doc.add(new Field("tvoffset",customType6,"tvoffset")); doc.add(new Field("tvoffset", "tvoffset", customType6));
doc.add(new Field("tvposition",customType7,"tvposition")); doc.add(new Field("tvposition", "tvposition", customType7));
doc.add(new Field("tvpositionoffset",customType8, "tvpositionoffset")); doc.add(new Field("tvpositionoffset", "tvpositionoffset", customType8));
writer.addDocument(doc); writer.addDocument(doc);
} }
@ -303,11 +303,11 @@ public class TestIndexReader extends LuceneTestCase
customType8.setStoreTermVectorPositions(true); customType8.setStoreTermVectorPositions(true);
for (int i = 0; i < 5 * mergeFactor; i++) { for (int i = 0; i < 5 * mergeFactor; i++) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("tvnot",TextField.TYPE_STORED,"one two two three three three")); doc.add(new Field("tvnot", "one two two three three three", TextField.TYPE_STORED));
doc.add(new Field("termvector",customType5,"one two two three three three")); doc.add(new Field("termvector", "one two two three three three", customType5));
doc.add(new Field("tvoffset",customType6,"one two two three three three")); doc.add(new Field("tvoffset", "one two two three three three", customType6));
doc.add(new Field("tvposition",customType7,"one two two three three three")); doc.add(new Field("tvposition", "one two two three three three", customType7));
doc.add(new Field("tvpositionoffset",customType8, "one two two three three three")); doc.add(new Field("tvpositionoffset", "one two two three three three", customType8));
writer.addDocument(doc); writer.addDocument(doc);
} }

View File

@ -40,8 +40,6 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.SimilarityProvider;
import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector; import org.apache.lucene.util.BitVector;
@ -969,13 +967,13 @@ public class TestIndexReaderReopen extends LuceneTestCase {
customType2.setOmitNorms(true); customType2.setOmitNorms(true);
FieldType customType3 = new FieldType(); FieldType customType3 = new FieldType();
customType3.setStored(true); customType3.setStored(true);
doc.add(new Field("field1", TextField.TYPE_STORED, sb.toString())); doc.add(new Field("field1", sb.toString(), TextField.TYPE_STORED));
doc.add(new Field("fielda", customType2, sb.toString())); doc.add(new Field("fielda", sb.toString(), customType2));
doc.add(new Field("fieldb", customType3, sb.toString())); doc.add(new Field("fieldb", sb.toString(), customType3));
sb.append(" b"); sb.append(" b");
sb.append(n); sb.append(n);
for (int i = 1; i < numFields; i++) { for (int i = 1; i < numFields; i++) {
doc.add(new Field("field" + (i+1), TextField.TYPE_STORED, sb.toString())); doc.add(new Field("field" + (i+1), sb.toString(), TextField.TYPE_STORED));
} }
return doc; return doc;
} }

View File

@ -304,7 +304,7 @@ public class TestIndexWriter extends LuceneTestCase {
int lastFlushCount = -1; int lastFlushCount = -1;
for(int j=1;j<52;j++) { for(int j=1;j<52;j++) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("field", storedTextType, "aaa" + j)); doc.add(new Field("field", "aaa" + j, storedTextType));
writer.addDocument(doc); writer.addDocument(doc);
_TestUtil.syncConcurrentMerges(writer); _TestUtil.syncConcurrentMerges(writer);
int flushCount = writer.getFlushCount(); int flushCount = writer.getFlushCount();
@ -358,7 +358,7 @@ public class TestIndexWriter extends LuceneTestCase {
for(int j=1;j<52;j++) { for(int j=1;j<52;j++) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("field", storedTextType, "aaa" + j)); doc.add(new Field("field", "aaa" + j, storedTextType));
writer.addDocument(doc); writer.addDocument(doc);
} }
@ -1236,7 +1236,7 @@ public class TestIndexWriter extends LuceneTestCase {
customType.setTokenized(true); customType.setTokenized(true);
customType.setIndexed(true); customType.setIndexed(true);
Field f = new Field("binary", customType, b, 10, 17); Field f = new Field("binary", b, 10, 17, customType);
f.setTokenStream(new MockTokenizer(new StringReader("doc1field1"), MockTokenizer.WHITESPACE, false)); f.setTokenStream(new MockTokenizer(new StringReader("doc1field1"), MockTokenizer.WHITESPACE, false));
FieldType customType2 = new FieldType(TextField.TYPE_STORED); FieldType customType2 = new FieldType(TextField.TYPE_STORED);
@ -1685,10 +1685,10 @@ public class TestIndexWriter extends LuceneTestCase {
for (int i=0; i<2; i++) { for (int i=0; i<2; i++) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("id", customType3, Integer.toString(i)+BIG)); doc.add(new Field("id", Integer.toString(i)+BIG, customType3));
doc.add(new Field("str", customType2, Integer.toString(i)+BIG)); doc.add(new Field("str", Integer.toString(i)+BIG, customType2));
doc.add(new Field("str2", storedTextType, Integer.toString(i)+BIG)); doc.add(new Field("str2", Integer.toString(i)+BIG, storedTextType));
doc.add(new Field("str3", customType, Integer.toString(i)+BIG)); doc.add(new Field("str3", Integer.toString(i)+BIG, customType));
indexWriter.addDocument(doc); indexWriter.addDocument(doc);
} }
@ -1805,7 +1805,7 @@ public class TestIndexWriter extends LuceneTestCase {
doc = new Document(); doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED); FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setTokenized(false); customType.setTokenized(false);
Field contentField = new Field("content", customType, ""); Field contentField = new Field("content", "", customType);
doc.add(contentField); doc.add(contentField);
w = new RandomIndexWriter(random, dir); w = new RandomIndexWriter(random, dir);

View File

@ -1264,7 +1264,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
Document document = new Document(); Document document = new Document();
document.add(new Field("field", TextField.TYPE_STORED, "a field")); document.add(new Field("field", "a field", TextField.TYPE_STORED));
w.addDocument(document); w.addDocument(document);
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
@ -1284,7 +1284,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
} }
} }
document = new Document(); document = new Document();
document.add(new Field("field", TextField.TYPE_STORED, "a field")); document.add(new Field("field", "a field", TextField.TYPE_STORED));
w.addDocument(document); w.addDocument(document);
w.close(); w.close();
IndexReader reader = IndexReader.open(dir); IndexReader reader = IndexReader.open(dir);

View File

@ -227,22 +227,22 @@ public class TestOmitNorms extends LuceneTestCase {
public void testOmitNormsCombos() throws IOException { public void testOmitNormsCombos() throws IOException {
// indexed with norms // indexed with norms
FieldType customType = new FieldType(TextField.TYPE_STORED); FieldType customType = new FieldType(TextField.TYPE_STORED);
Field norms = new Field("foo", customType, "a"); Field norms = new Field("foo", "a", customType);
// indexed without norms // indexed without norms
FieldType customType1 = new FieldType(TextField.TYPE_STORED); FieldType customType1 = new FieldType(TextField.TYPE_STORED);
customType1.setOmitNorms(true); customType1.setOmitNorms(true);
Field noNorms = new Field("foo", customType1, "a"); Field noNorms = new Field("foo", "a", customType1);
// not indexed, but stored // not indexed, but stored
FieldType customType2 = new FieldType(); FieldType customType2 = new FieldType();
customType2.setStored(true); customType2.setStored(true);
Field noIndex = new Field("foo", customType2, "a"); Field noIndex = new Field("foo", "a", customType2);
// not indexed but stored, omitNorms is set // not indexed but stored, omitNorms is set
FieldType customType3 = new FieldType(); FieldType customType3 = new FieldType();
customType3.setStored(true); customType3.setStored(true);
customType3.setOmitNorms(true); customType3.setOmitNorms(true);
Field noNormsNoIndex = new Field("foo", customType3, "a"); Field noNormsNoIndex = new Field("foo", "a", customType3);
// not indexed nor stored (doesnt exist at all, we index a different field instead) // not indexed nor stored (doesnt exist at all, we index a different field instead)
Field emptyNorms = new Field("bar", customType, "a"); Field emptyNorms = new Field("bar", "a", customType);
assertNotNull(getNorms("foo", norms, norms)); assertNotNull(getNorms("foo", norms, norms));
assertNull(getNorms("foo", norms, noNorms)); assertNull(getNorms("foo", norms, noNorms));

View File

@ -611,14 +611,14 @@ public class TestPayloads extends LuceneTestCase {
RandomIndexWriter writer = new RandomIndexWriter(random, dir, RandomIndexWriter writer = new RandomIndexWriter(random, dir,
new MockAnalyzer(random, MockTokenizer.WHITESPACE, true)); new MockAnalyzer(random, MockTokenizer.WHITESPACE, true));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("hasMaybepayload", TextField.TYPE_STORED, "here we go")); doc.add(new Field("hasMaybepayload", "here we go", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();
writer = new RandomIndexWriter(random, dir, writer = new RandomIndexWriter(random, dir,
new MockAnalyzer(random, MockTokenizer.WHITESPACE, true)); new MockAnalyzer(random, MockTokenizer.WHITESPACE, true));
doc = new Document(); doc = new Document();
doc.add(new Field("hasMaybepayload2", TextField.TYPE_STORED, "here we go")); doc.add(new Field("hasMaybepayload2", "here we go", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.addDocument(doc); writer.addDocument(doc);
writer.optimize(); writer.optimize();

View File

@ -41,7 +41,7 @@ public class TestSameTokenSamePosition extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter riw = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new BugReproAnalyzer())); RandomIndexWriter riw = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new BugReproAnalyzer()));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("eng", TextField.TYPE_STORED, "Six drunken" /*This shouldn't matter. */)); doc.add(new Field("eng", "Six drunken", TextField.TYPE_STORED /*This shouldn't matter. */));
riw.addDocument(doc); riw.addDocument(doc);
riw.close(); riw.close();
dir.close(); dir.close();
@ -54,7 +54,7 @@ public class TestSameTokenSamePosition extends LuceneTestCase {
Directory dir = newDirectory(); Directory dir = newDirectory();
RandomIndexWriter riw = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new BugReproAnalyzer())); RandomIndexWriter riw = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new BugReproAnalyzer()));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("eng", TextField.TYPE_STORED, "Six drunken" /*This shouldn't matter. */)); doc.add(new Field("eng", "Six drunken", TextField.TYPE_STORED /*This shouldn't matter. */));
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
riw.addDocument(doc); riw.addDocument(doc);
} }

View File

@ -32,7 +32,7 @@ public class TestSegmentInfo extends LuceneTestCase {
IndexWriter writer = new IndexWriter(dir, conf); IndexWriter writer = new IndexWriter(dir, conf);
writer.setInfoStream(VERBOSE ? System.out : null); writer.setInfoStream(VERBOSE ? System.out : null);
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("a", TextField.TYPE_STORED, "value")); doc.add(new Field("a", "value", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();

View File

@ -117,7 +117,7 @@ public class TestTermVectorsReader extends LuceneTestCase {
else { else {
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
} }
doc.add(new Field(testFields[i], customType, "")); doc.add(new Field(testFields[i], "", customType));
} }
//Create 5 documents for testing, they all have the same //Create 5 documents for testing, they all have the same

View File

@ -143,7 +143,7 @@ public class TestTermVectorsWriter extends LuceneTestCase {
customType.setStoreTermVectors(true); customType.setStoreTermVectors(true);
customType.setStoreTermVectorPositions(true); customType.setStoreTermVectorPositions(true);
customType.setStoreTermVectorOffsets(true); customType.setStoreTermVectorOffsets(true);
Field f = new Field("field", customType, stream); Field f = new Field("field", stream, customType);
doc.add(f); doc.add(f);
doc.add(f); doc.add(f);
w.addDocument(doc); w.addDocument(doc);

View File

@ -536,7 +536,7 @@ public class TestDocValuesIndexing extends LuceneTestCase {
} }
} }
doc.removeFields("id"); doc.removeFields("id");
doc.add(new Field("id", StringField.TYPE_STORED, idBase + i)); doc.add(new Field("id", idBase + i, StringField.TYPE_STORED));
w.addDocument(doc); w.addDocument(doc);
if (i % 7 == 0) { if (i % 7 == 0) {

View File

@ -162,7 +162,7 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
Document doc = new Document(); Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_UNSTORED); FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true); customType.setOmitNorms(true);
Field f = new Field("f", customType, docText); Field f = new Field("f", docText, customType);
doc.add(f); doc.add(f);
return doc; return doc;
} }
@ -237,7 +237,7 @@ public class TestSloppyPhraseQuery extends LuceneTestCase {
RandomIndexWriter iw = new RandomIndexWriter(random, dir); RandomIndexWriter iw = new RandomIndexWriter(random, dir);
FieldType customType = new FieldType(TextField.TYPE_UNSTORED); FieldType customType = new FieldType(TextField.TYPE_UNSTORED);
customType.setOmitNorms(true); customType.setOmitNorms(true);
Field f = new Field("lyrics", customType, ""); Field f = new Field("lyrics", "", customType);
Document doc = new Document(); Document doc = new Document();
doc.add(f); doc.add(f);
f.setValue("drug drug"); f.setValue("drug drug");

View File

@ -132,7 +132,7 @@ public class TestSort extends LuceneTestCase {
for (int i=0; i<data.length; ++i) { for (int i=0; i<data.length; ++i) {
if (((i%2)==0 && even) || ((i%2)==1 && odd)) { if (((i%2)==0 && even) || ((i%2)==1 && odd)) {
Document doc = new Document(); Document doc = new Document();
doc.add (new Field ("tracer", ft1, data[i][0])); doc.add (new Field ("tracer", data[i][0], ft1));
doc.add (new TextField ("contents", data[i][1])); doc.add (new TextField ("contents", data[i][1]));
if (data[i][2] != null) { if (data[i][2] != null) {
Field f = new StringField ("int", data[i][2]); Field f = new StringField ("int", data[i][2]);
@ -196,12 +196,12 @@ public class TestSort extends LuceneTestCase {
for (int i=0; i<NUM_STRINGS; i++) { for (int i=0; i<NUM_STRINGS; i++) {
Document doc = new Document(); Document doc = new Document();
String num = getRandomCharString(getRandomNumber(2, 8), 48, 52); String num = getRandomCharString(getRandomNumber(2, 8), 48, 52);
doc.add (new Field ("tracer", customType, num)); doc.add (new Field ("tracer", num, customType));
//doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED)); //doc.add (new Field ("contents", Integer.toString(i), Field.Store.NO, Field.Index.ANALYZED));
doc.add (new StringField ("string", num)); doc.add (new StringField ("string", num));
String num2 = getRandomCharString(getRandomNumber(1, 4), 48, 50); String num2 = getRandomCharString(getRandomNumber(1, 4), 48, 50);
doc.add (new StringField ("string2", num2)); doc.add (new StringField ("string2", num2));
doc.add (new Field ("tracer2", customType, num2)); doc.add (new Field ("tracer2", num2, customType));
for(IndexableField f : doc.getFields()) { for(IndexableField f : doc.getFields()) {
((Field) f).setBoost(2.0f); ((Field) f).setBoost(2.0f);
} }

View File

@ -67,9 +67,9 @@ public class TestTermVectors extends LuceneTestCase {
} else { } else {
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
} }
doc.add(new Field("field", ft, English.intToEnglish(i))); doc.add(new Field("field", English.intToEnglish(i), ft));
//test no term vectors too //test no term vectors too
doc.add(new Field("noTV", TextField.TYPE_STORED, English.intToEnglish(i))); doc.add(new Field("noTV", English.intToEnglish(i), TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
} }
reader = writer.getReader(); reader = writer.getReader();

View File

@ -125,9 +125,9 @@ public class PayloadHelper {
// writer.infoStream = System.out; // writer.infoStream = System.out;
for (int i = 0; i < numDocs; i++) { for (int i = 0; i < numDocs; i++) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field(FIELD, TextField.TYPE_STORED, English.intToEnglish(i))); doc.add(new Field(FIELD, English.intToEnglish(i), TextField.TYPE_STORED));
doc.add(new Field(MULTI_FIELD, TextField.TYPE_STORED, English.intToEnglish(i) + " " + English.intToEnglish(i))); doc.add(new Field(MULTI_FIELD, English.intToEnglish(i) + " " + English.intToEnglish(i), TextField.TYPE_STORED));
doc.add(new Field(NO_PAYLOAD_FIELD, TextField.TYPE_STORED, English.intToEnglish(i))); doc.add(new Field(NO_PAYLOAD_FIELD, English.intToEnglish(i), TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
} }
reader = IndexReader.open(writer, true); reader = IndexReader.open(writer, true);

View File

@ -48,8 +48,8 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT))); TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("partnum", StringField.TYPE_STORED, "Q36")); doc.add(new Field("partnum", "Q36", StringField.TYPE_STORED));
doc.add(new Field("description", TextField.TYPE_STORED, "Illidium Space Modulator")); doc.add(new Field("description", "Illidium Space Modulator", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();
@ -76,10 +76,10 @@ public class TestKeywordAnalyzer extends BaseTokenStreamTestCase {
RAMDirectory dir = new RAMDirectory(); RAMDirectory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer())); IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("partnum", TextField.TYPE_STORED, "Q36")); doc.add(new Field("partnum", "Q36", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(new Field("partnum", TextField.TYPE_STORED, "Q37")); doc.add(new Field("partnum", "Q37", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();

View File

@ -48,8 +48,8 @@ public class QueryAutoStopWordAnalyzerTest extends BaseTokenStreamTestCase {
Document doc = new Document(); Document doc = new Document();
String variedFieldValue = variedFieldValues[i % variedFieldValues.length]; String variedFieldValue = variedFieldValues[i % variedFieldValues.length];
String repetitiveFieldValue = repetitiveFieldValues[i % repetitiveFieldValues.length]; String repetitiveFieldValue = repetitiveFieldValues[i % repetitiveFieldValues.length];
doc.add(new Field("variedField", TextField.TYPE_STORED, variedFieldValue)); doc.add(new Field("variedField", variedFieldValue, TextField.TYPE_STORED));
doc.add(new Field("repetitiveField", TextField.TYPE_STORED, repetitiveFieldValue)); doc.add(new Field("repetitiveField", repetitiveFieldValue, TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
} }
writer.close(); writer.close();

View File

@ -56,15 +56,15 @@ public class ShingleAnalyzerWrapperTest extends BaseTokenStreamTestCase {
Document doc; Document doc;
doc = new Document(); doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "please divide this sentence into shingles")); doc.add(new Field("content", "please divide this sentence into shingles", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "just another test sentence")); doc.add(new Field("content", "just another test sentence", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
doc = new Document(); doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "a sentence which contains no test")); doc.add(new Field("content", "a sentence which contains no test", TextField.TYPE_STORED));
writer.addDocument(doc); writer.addDocument(doc);
writer.close(); writer.close();

View File

@ -94,8 +94,8 @@ public class TestTeeSinkTokenFilter extends BaseTokenStreamTestCase {
ft.setStoreTermVectors(true); ft.setStoreTermVectors(true);
ft.setStoreTermVectorOffsets(true); ft.setStoreTermVectorOffsets(true);
ft.setStoreTermVectorPositions(true); ft.setStoreTermVectorPositions(true);
Field f1 = new Field("field", ft, tee); Field f1 = new Field("field", tee, ft);
Field f2 = new Field("field", ft, sink); Field f2 = new Field("field", sink, ft);
doc.add(f1); doc.add(f1);
doc.add(f2); doc.add(f2);
w.addDocument(doc); w.addDocument(doc);

View File

@ -103,11 +103,11 @@ public class DocMaker {
numericFields = new HashMap<String,NumericField>(); numericFields = new HashMap<String,NumericField>();
// Initialize the map with the default fields. // Initialize the map with the default fields.
fields.put(BODY_FIELD, new Field(BODY_FIELD, bodyFt, "")); fields.put(BODY_FIELD, new Field(BODY_FIELD, "", bodyFt));
fields.put(TITLE_FIELD, new Field(TITLE_FIELD, ft, "")); fields.put(TITLE_FIELD, new Field(TITLE_FIELD, "", ft));
fields.put(DATE_FIELD, new Field(DATE_FIELD, ft, "")); fields.put(DATE_FIELD, new Field(DATE_FIELD, "", ft));
fields.put(ID_FIELD, new Field(ID_FIELD, StringField.TYPE_STORED, "")); fields.put(ID_FIELD, new Field(ID_FIELD, "", StringField.TYPE_STORED));
fields.put(NAME_FIELD, new Field(NAME_FIELD, ft, "")); fields.put(NAME_FIELD, new Field(NAME_FIELD, "", ft));
numericFields.put(DATE_MSEC_FIELD, new NumericField(DATE_MSEC_FIELD)); numericFields.put(DATE_MSEC_FIELD, new NumericField(DATE_MSEC_FIELD));
numericFields.put(TIME_SEC_FIELD, new NumericField(TIME_SEC_FIELD)); numericFields.put(TIME_SEC_FIELD, new NumericField(TIME_SEC_FIELD));
@ -127,12 +127,12 @@ public class DocMaker {
*/ */
Field getField(String name, FieldType ft) { Field getField(String name, FieldType ft) {
if (!reuseFields) { if (!reuseFields) {
return new Field(name, ft, ""); return new Field(name, "", ft);
} }
Field f = fields.get(name); Field f = fields.get(name);
if (f == null) { if (f == null) {
f = new Field(name, ft, ""); f = new Field(name, "", ft);
fields.put(name, f); fields.put(name, f);
} }
return f; return f;

View File

@ -93,7 +93,7 @@ public class AssociationIndexer {
// create a plain Lucene document and add some regular Lucene fields // create a plain Lucene document and add some regular Lucene fields
// to it // to it
Document doc = new Document(); Document doc = new Document();
doc.add(new Field(SimpleUtils.TITLE, TextField.TYPE_STORED, SimpleUtils.docTitles[docNum])); doc.add(new Field(SimpleUtils.TITLE, SimpleUtils.docTitles[docNum], TextField.TYPE_STORED));
doc.add(new TextField(SimpleUtils.TEXT, SimpleUtils.docTexts[docNum])); doc.add(new TextField(SimpleUtils.TEXT, SimpleUtils.docTexts[docNum]));
// invoke the category document builder for adding categories to the // invoke the category document builder for adding categories to the

View File

@ -173,7 +173,7 @@ public class MultiCLIndexer {
// create a plain Lucene document and add some regular Lucene fields // create a plain Lucene document and add some regular Lucene fields
// to it // to it
Document doc = new Document(); Document doc = new Document();
doc.add(new Field(SimpleUtils.TITLE, TextField.TYPE_STORED, docTitles[docNum])); doc.add(new Field(SimpleUtils.TITLE, docTitles[docNum], TextField.TYPE_STORED));
doc.add(new TextField(SimpleUtils.TEXT, docTexts[docNum])); doc.add(new TextField(SimpleUtils.TEXT, docTexts[docNum]));
// finally add the document to the index // finally add the document to the index

View File

@ -70,7 +70,7 @@ public class SimpleIndexer {
// create a plain Lucene document and add some regular Lucene fields to it // create a plain Lucene document and add some regular Lucene fields to it
Document doc = new Document(); Document doc = new Document();
doc.add(new Field(SimpleUtils.TITLE, TextField.TYPE_STORED, SimpleUtils.docTitles[docNum])); doc.add(new Field(SimpleUtils.TITLE, SimpleUtils.docTitles[docNum], TextField.TYPE_STORED));
doc.add(new TextField(SimpleUtils.TEXT, SimpleUtils.docTexts[docNum])); doc.add(new TextField(SimpleUtils.TEXT, SimpleUtils.docTexts[docNum]));
// invoke the category document builder for adding categories to the document and, // invoke the category document builder for adding categories to the document and,

View File

@ -187,7 +187,7 @@ public class CategoryDocumentBuilder implements DocumentBuilder {
// super.build()) // super.build())
FieldType ft = new FieldType(TextField.TYPE_UNSTORED); FieldType ft = new FieldType(TextField.TYPE_UNSTORED);
ft.setOmitNorms(true); ft.setOmitNorms(true);
fieldList.add(new Field(e.getKey(), ft, stream)); fieldList.add(new Field(e.getKey(), stream, ft));
} }
return this; return this;

View File

@ -181,8 +181,8 @@ public class LuceneTaxonomyWriter implements TaxonomyWriter {
FieldType ft = new FieldType(TextField.TYPE_UNSTORED); FieldType ft = new FieldType(TextField.TYPE_UNSTORED);
ft.setOmitNorms(true); ft.setOmitNorms(true);
parentStreamField = new Field(Consts.FIELD_PAYLOADS, ft, parentStream); parentStreamField = new Field(Consts.FIELD_PAYLOADS, parentStream, ft);
fullPathField = new Field(Consts.FULL, StringField.TYPE_STORED, ""); fullPathField = new Field(Consts.FULL, "", StringField.TYPE_STORED);
this.nextID = indexWriter.maxDoc(); this.nextID = indexWriter.maxDoc();

View File

@ -245,7 +245,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
CategoryDocumentBuilder builder = new CategoryDocumentBuilder(tw, iParams); CategoryDocumentBuilder builder = new CategoryDocumentBuilder(tw, iParams);
builder.setCategoryPaths(categories); builder.setCategoryPaths(categories);
builder.build(d); builder.build(d);
d.add(new Field("content", TextField.TYPE_STORED, content)); d.add(new Field("content", content, TextField.TYPE_STORED));
iw.addDocument(d); iw.addDocument(d);
} }

View File

@ -128,7 +128,7 @@ public class FacetTestUtils {
cps.add(cp); cps.add(cp);
Document d = new Document(); Document d = new Document();
new CategoryDocumentBuilder(tw, iParams).setCategoryPaths(cps).build(d); new CategoryDocumentBuilder(tw, iParams).setCategoryPaths(cps).build(d);
d.add(new Field("content", TextField.TYPE_STORED, "alpha")); d.add(new Field("content", "alpha", TextField.TYPE_STORED));
iw.addDocument(d); iw.addDocument(d);
} }

View File

@ -328,7 +328,7 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
cps.add(cp); cps.add(cp);
Document d = new Document(); Document d = new Document();
new CategoryDocumentBuilder(tw, iParams).setCategoryPaths(cps).build(d); new CategoryDocumentBuilder(tw, iParams).setCategoryPaths(cps).build(d);
d.add(new Field("content", TextField.TYPE_STORED, "alpha")); d.add(new Field("content", "alpha", TextField.TYPE_STORED));
iw.addDocument(d); iw.addDocument(d);
} }

View File

@ -210,7 +210,7 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
// assert that those docs are not returned by all-scored-doc-IDs. // assert that those docs are not returned by all-scored-doc-IDs.
FieldType ft = new FieldType(); FieldType ft = new FieldType();
ft.setStored(true); ft.setStored(true);
doc.add(new Field("del", ft, Integer.toString(docNum))); doc.add(new Field("del", Integer.toString(docNum), ft));
} }
if (haveAlpha(docNum)) { if (haveAlpha(docNum)) {

View File

@ -45,51 +45,51 @@ public class TermAllGroupsCollectorTest extends LuceneTestCase {
new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
// 0 // 0
Document doc = new Document(); Document doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author1")); doc.add(new Field(groupField, "author1", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "random text")); doc.add(new Field("content", "random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "1")); doc.add(new Field("id", "1", customType));
w.addDocument(doc); w.addDocument(doc);
// 1 // 1
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author1")); doc.add(new Field(groupField, "author1", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some more random text blob")); doc.add(new Field("content", "some more random text blob", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "2")); doc.add(new Field("id", "2", customType));
w.addDocument(doc); w.addDocument(doc);
// 2 // 2
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author1")); doc.add(new Field(groupField, "author1", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some more random textual data")); doc.add(new Field("content", "some more random textual data", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "3")); doc.add(new Field("id", "3", customType));
w.addDocument(doc); w.addDocument(doc);
w.commit(); // To ensure a second segment w.commit(); // To ensure a second segment
// 3 // 3
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author2")); doc.add(new Field(groupField, "author2", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some random text")); doc.add(new Field("content", "some random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "4")); doc.add(new Field("id", "4", customType));
w.addDocument(doc); w.addDocument(doc);
// 4 // 4
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author3")); doc.add(new Field(groupField, "author3", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some more random text")); doc.add(new Field("content", "some more random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "5")); doc.add(new Field("id", "5", customType));
w.addDocument(doc); w.addDocument(doc);
// 5 // 5
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author3")); doc.add(new Field(groupField, "author3", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "random blob")); doc.add(new Field("content", "random blob", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "6")); doc.add(new Field("id", "6", customType));
w.addDocument(doc); w.addDocument(doc);
// 6 -- no author field // 6 -- no author field
doc = new Document(); doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "random word stuck in alot of other text")); doc.add(new Field("content", "random word stuck in alot of other text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "6")); doc.add(new Field("id", "6", customType));
w.addDocument(doc); w.addDocument(doc);
IndexSearcher indexSearcher = new IndexSearcher(w.getReader()); IndexSearcher indexSearcher = new IndexSearcher(w.getReader());

View File

@ -61,50 +61,50 @@ public class TestGrouping extends LuceneTestCase {
new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy())); new MockAnalyzer(random)).setMergePolicy(newLogMergePolicy()));
// 0 // 0
Document doc = new Document(); Document doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author1")); doc.add(new Field(groupField, "author1", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "random text")); doc.add(new Field("content", "random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "1")); doc.add(new Field("id", "1", customType));
w.addDocument(doc); w.addDocument(doc);
// 1 // 1
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author1")); doc.add(new Field(groupField, "author1", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some more random text")); doc.add(new Field("content", "some more random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "2")); doc.add(new Field("id", "2", customType));
w.addDocument(doc); w.addDocument(doc);
// 2 // 2
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author1")); doc.add(new Field(groupField, "author1", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some more random textual data")); doc.add(new Field("content", "some more random textual data", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "3")); doc.add(new Field("id", "3", customType));
w.addDocument(doc); w.addDocument(doc);
// 3 // 3
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author2")); doc.add(new Field(groupField, "author2", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some random text")); doc.add(new Field("content", "some random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "4")); doc.add(new Field("id", "4", customType));
w.addDocument(doc); w.addDocument(doc);
// 4 // 4
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author3")); doc.add(new Field(groupField, "author3", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "some more random text")); doc.add(new Field("content", "some more random text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "5")); doc.add(new Field("id", "5", customType));
w.addDocument(doc); w.addDocument(doc);
// 5 // 5
doc = new Document(); doc = new Document();
doc.add(new Field(groupField, TextField.TYPE_STORED, "author3")); doc.add(new Field(groupField, "author3", TextField.TYPE_STORED));
doc.add(new Field("content", TextField.TYPE_STORED, "random")); doc.add(new Field("content", "random", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "6")); doc.add(new Field("id", "6", customType));
w.addDocument(doc); w.addDocument(doc);
// 6 -- no author field // 6 -- no author field
doc = new Document(); doc = new Document();
doc.add(new Field("content", TextField.TYPE_STORED, "random word stuck in alot of other text")); doc.add(new Field("content", "random word stuck in alot of other text", TextField.TYPE_STORED));
doc.add(new Field("id", customType, "6")); doc.add(new Field("id", "6", customType));
w.addDocument(doc); w.addDocument(doc);
IndexSearcher indexSearcher = new IndexSearcher(w.getReader()); IndexSearcher indexSearcher = new IndexSearcher(w.getReader());

View File

@ -590,7 +590,7 @@ public class SpellChecker implements java.io.Closeable {
Document doc = new Document(); Document doc = new Document();
// the word field is never queried on... its indexed so it can be quickly // the word field is never queried on... its indexed so it can be quickly
// checked for rebuild (and stored for retrieval). Doesn't need norms or TF/pos // checked for rebuild (and stored for retrieval). Doesn't need norms or TF/pos
Field f = new Field(F_WORD, StringField.TYPE_STORED, text); Field f = new Field(F_WORD, text, StringField.TYPE_STORED);
doc.add(f); // orig term doc.add(f); // orig term
addGram(text, doc, ng1, ng2); addGram(text, doc, ng1, ng2);
return doc; return doc;
@ -605,7 +605,7 @@ public class SpellChecker implements java.io.Closeable {
String gram = text.substring(i, i + ng); String gram = text.substring(i, i + ng);
FieldType ft = new FieldType(StringField.TYPE_UNSTORED); FieldType ft = new FieldType(StringField.TYPE_UNSTORED);
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
Field ngramField = new Field(key, ft, gram); Field ngramField = new Field(key, gram, ft);
// spellchecker does not use positional queries, but we want freqs // spellchecker does not use positional queries, but we want freqs
// for scoring these multivalued n-gram fields. // for scoring these multivalued n-gram fields.
doc.add(ngramField); doc.add(ngramField);

View File

@ -273,7 +273,7 @@ public abstract class FieldType extends FieldProperties {
* @return the {@link org.apache.lucene.index.IndexableField}. * @return the {@link org.apache.lucene.index.IndexableField}.
*/ */
protected IndexableField createField(String name, String val, org.apache.lucene.document.FieldType type, float boost){ protected IndexableField createField(String name, String val, org.apache.lucene.document.FieldType type, float boost){
Field f = new Field(name, type, val); Field f = new Field(name, val, type);
f.setBoost(boost); f.setBoost(boost);
return f; return f;
} }

View File

@ -106,8 +106,8 @@ public class TestArbitraryIndexDir extends AbstractSolrTestCase{
new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40)) new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40))
); );
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("id", TextField.TYPE_STORED, "2")); doc.add(new Field("id", "2", TextField.TYPE_STORED));
doc.add(new Field("name", TextField.TYPE_STORED, "name2")); doc.add(new Field("name", "name2", TextField.TYPE_STORED));
iw.addDocument(doc); iw.addDocument(doc);
iw.commit(); iw.commit();
iw.close(); iw.close();

View File

@ -150,8 +150,8 @@ public class TestSort extends SolrTestCaseJ4 {
public void testSort() throws Exception { public void testSort() throws Exception {
Directory dir = new RAMDirectory(); Directory dir = new RAMDirectory();
Field f = new Field("f", StringField.TYPE_UNSTORED,"0"); Field f = new Field("f", "0", StringField.TYPE_UNSTORED);
Field f2 = new Field("f2", StringField.TYPE_UNSTORED,"0"); Field f2 = new Field("f2", "0", StringField.TYPE_UNSTORED);
for (int iterCnt = 0; iterCnt<iter; iterCnt++) { for (int iterCnt = 0; iterCnt<iter; iterCnt++) {
IndexWriter iw = new IndexWriter( IndexWriter iw = new IndexWriter(

View File

@ -290,7 +290,7 @@ public class IndexBasedSpellCheckerTest extends SolrTestCaseJ4 {
); );
for (int i = 0; i < ALT_DOCS.length; i++) { for (int i = 0; i < ALT_DOCS.length; i++) {
Document doc = new Document(); Document doc = new Document();
doc.add(new Field("title", TextField.TYPE_STORED, ALT_DOCS[i])); doc.add(new Field("title", ALT_DOCS[i], TextField.TYPE_STORED));
iw.addDocument(doc); iw.addDocument(doc);
} }
iw.optimize(); iw.optimize();