mirror of https://github.com/apache/lucene.git
stop using the deprecated Field constructor
git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@150468 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0acf48a99e
commit
a0ad582c29
|
@ -183,12 +183,12 @@ public class TestPhraseQuery extends TestCase {
|
||||||
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
|
IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
|
||||||
|
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("source", "marketing info", true, true, true));
|
doc.add(new Field("source", "marketing info", Field.Store.YES, Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field("contents", "foobar", true, true, true));
|
doc.add(new Field("contents", "foobar", Field.Store.YES, Field.Index.TOKENIZED));
|
||||||
doc.add(new Field("source", "marketing info", true, true, true));
|
doc.add(new Field("source", "marketing info", Field.Store.YES, Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
|
@ -213,15 +213,15 @@ public class TestPhraseQuery extends TestCase {
|
||||||
|
|
||||||
writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
|
writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true);
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field("contents", "map entry woo", true, true, true));
|
doc.add(new Field("contents", "map entry woo", Field.Store.YES, Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field("contents", "woo map entry", true, true, true));
|
doc.add(new Field("contents", "woo map entry", Field.Store.YES, Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
||||||
doc = new Document();
|
doc = new Document();
|
||||||
doc.add(new Field("contents", "map foobarword entry woo", true, true, true));
|
doc.add(new Field("contents", "map foobarword entry woo", Field.Store.YES, Field.Index.TOKENIZED));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
|
|
||||||
writer.optimize();
|
writer.optimize();
|
||||||
|
|
|
@ -113,13 +113,13 @@ implements Serializable {
|
||||||
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(), true);
|
IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(), true);
|
||||||
for (int i=0; i<data.length; ++i) {
|
for (int i=0; i<data.length; ++i) {
|
||||||
if (((i%2)==0 && even) || ((i%2)==1 && odd)) {
|
if (((i%2)==0 && even) || ((i%2)==1 && odd)) {
|
||||||
Document doc = new Document(); // store, index, token
|
Document doc = new Document();
|
||||||
doc.add (new Field ("tracer", data[i][0], true, false, false));
|
doc.add (new Field ("tracer", data[i][0], Field.Store.YES, Field.Index.NO));
|
||||||
doc.add (new Field ("contents", data[i][1], false, true, true));
|
doc.add (new Field ("contents", data[i][1], Field.Store.NO, Field.Index.TOKENIZED));
|
||||||
if (data[i][2] != null) doc.add (new Field ("int", data[i][2], false, true, false));
|
if (data[i][2] != null) doc.add (new Field ("int", data[i][2], Field.Store.NO, Field.Index.UN_TOKENIZED));
|
||||||
if (data[i][3] != null) doc.add (new Field ("float", data[i][3], false, true, false));
|
if (data[i][3] != null) doc.add (new Field ("float", data[i][3], Field.Store.NO, Field.Index.UN_TOKENIZED));
|
||||||
if (data[i][4] != null) doc.add (new Field ("string", data[i][4], false, true, false));
|
if (data[i][4] != null) doc.add (new Field ("string", data[i][4], Field.Store.NO, Field.Index.UN_TOKENIZED));
|
||||||
if (data[i][5] != null) doc.add (new Field ("custom", data[i][5], false, true, false));
|
if (data[i][5] != null) doc.add (new Field ("custom", data[i][5], Field.Store.NO, Field.Index.UN_TOKENIZED));
|
||||||
writer.addDocument (doc);
|
writer.addDocument (doc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue