don't call the deprecated Field API

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@150492 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Daniel Naber 2004-09-06 22:01:49 +00:00
parent 16239c65ca
commit e49eb20a32
5 changed files with 23 additions and 21 deletions

View File

@ -67,7 +67,7 @@ public class TestBooleanPrefixQuery extends TestCase {
WhitespaceAnalyzer(), true); WhitespaceAnalyzer(), true);
for (int i = 0; i < categories.length; i++) { for (int i = 0; i < categories.length; i++) {
Document doc = new Document(); Document doc = new Document();
doc.add(Field.Keyword("category", categories[i])); doc.add(new Field("category", categories[i], Field.Store.YES, Field.Index.UN_TOKENIZED));
writer.addDocument(doc); writer.addDocument(doc);
} }
writer.close(); writer.close();

View File

@ -40,8 +40,8 @@ public class TestDocBoost extends TestCase {
RAMDirectory store = new RAMDirectory(); RAMDirectory store = new RAMDirectory();
IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true); IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(), true);
Field f1 = Field.Text("field", "word"); Field f1 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
Field f2 = Field.Text("field", "word"); Field f2 = new Field("field", "word", Field.Store.YES, Field.Index.TOKENIZED);
f2.setBoost(2.0f); f2.setBoost(2.0f);
Document d1 = new Document(); Document d1 = new Document();

View File

@ -51,23 +51,23 @@ extends TestCase {
IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(), true); IndexWriter writer = new IndexWriter (directory, new WhitespaceAnalyzer(), true);
Document doc = new Document(); Document doc = new Document();
doc.add (Field.Text ("field", "one two three four five")); doc.add (new Field("field", "one two three four five", Field.Store.YES, Field.Index.TOKENIZED));
doc.add (Field.Text ("sorter", "b")); doc.add (new Field("sorter", "b", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument (doc); writer.addDocument (doc);
doc = new Document(); doc = new Document();
doc.add (Field.Text ("field", "one two three four")); doc.add (new Field("field", "one two three four", Field.Store.YES, Field.Index.TOKENIZED));
doc.add (Field.Text ("sorter", "d")); doc.add (new Field("sorter", "d", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument (doc); writer.addDocument (doc);
doc = new Document(); doc = new Document();
doc.add (Field.Text ("field", "one two three y")); doc.add (new Field("field", "one two three y", Field.Store.YES, Field.Index.TOKENIZED));
doc.add (Field.Text ("sorter", "a")); doc.add (new Field("sorter", "a", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument (doc); writer.addDocument (doc);
doc = new Document(); doc = new Document();
doc.add (Field.Text ("field", "one two x")); doc.add (new Field("field", "one two x", Field.Store.YES, Field.Index.TOKENIZED));
doc.add (Field.Text ("sorter", "c")); doc.add (new Field("sorter", "c", Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument (doc); writer.addDocument (doc);
writer.optimize (); writer.optimize ();

View File

@ -148,7 +148,7 @@ public class TestFuzzyQuery extends TestCase {
private void addDoc(String text, IndexWriter writer) throws IOException { private void addDoc(String text, IndexWriter writer) throws IOException {
Document doc = new Document(); Document doc = new Document();
doc.add(Field.Text("field", text)); doc.add(new Field("field", text, Field.Store.YES, Field.Index.TOKENIZED));
writer.addDocument(doc); writer.addDocument(doc);
} }

View File

@ -60,21 +60,23 @@ public class TestMultiSearcher extends TestCase
// creating a document to store // creating a document to store
Document lDoc = new Document(); Document lDoc = new Document();
lDoc.add(Field.Text("fulltext", "Once upon a time.....")); lDoc.add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.TOKENIZED));
lDoc.add(Field.Keyword("id", "doc1")); lDoc.add(new Field("id", "doc1", Field.Store.YES, Field.Index.UN_TOKENIZED));
lDoc.add(Field.Keyword("handle", "1")); lDoc.add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating a document to store // creating a document to store
Document lDoc2 = new Document(); Document lDoc2 = new Document();
lDoc2.add(Field.Text("fulltext", "in a galaxy far far away.....")); lDoc2.add(new Field("fulltext", "in a galaxy far far away.....",
lDoc2.add(Field.Keyword("id", "doc2")); Field.Store.YES, Field.Index.TOKENIZED));
lDoc2.add(Field.Keyword("handle", "1")); lDoc2.add(new Field("id", "doc2", Field.Store.YES, Field.Index.UN_TOKENIZED));
lDoc2.add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating a document to store // creating a document to store
Document lDoc3 = new Document(); Document lDoc3 = new Document();
lDoc3.add(Field.Text("fulltext", "a bizarre bug manifested itself....")); lDoc3.add(new Field("fulltext", "a bizarre bug manifested itself....",
lDoc3.add(Field.Keyword("id", "doc3")); Field.Store.YES, Field.Index.TOKENIZED));
lDoc3.add(Field.Keyword("handle", "1")); lDoc3.add(new Field("id", "doc3", Field.Store.YES, Field.Index.UN_TOKENIZED));
lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.UN_TOKENIZED));
// creating an index writer for the first index // creating an index writer for the first index
IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true); IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(), true);