lucene4: remove DocumentBuilder and FieldBuilder
This commit is contained in:
parent
594598f493
commit
5ad40205c2
|
@ -1,89 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.elasticsearch.common.lucene.uid.UidField;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class DocumentBuilder {
|
||||
|
||||
public static final Document EMPTY = new Document();
|
||||
|
||||
public static DocumentBuilder doc() {
|
||||
return new DocumentBuilder();
|
||||
}
|
||||
|
||||
public static Field uidField(String value) {
|
||||
return uidField(value, 0);
|
||||
}
|
||||
|
||||
public static Field uidField(String value, long version) {
|
||||
return new UidField("_uid", value, version);
|
||||
}
|
||||
|
||||
public static FieldBuilder field(String name, String value) {
|
||||
return field(name, value, Field.Store.YES, Field.Index.ANALYZED);
|
||||
}
|
||||
|
||||
public static FieldBuilder field(String name, String value, Field.Store store, Field.Index index) {
|
||||
return new FieldBuilder(name, value, store, index);
|
||||
}
|
||||
|
||||
public static FieldBuilder field(String name, String value, Field.Store store, Field.Index index, Field.TermVector termVector) {
|
||||
return new FieldBuilder(name, value, store, index, termVector);
|
||||
}
|
||||
|
||||
public static FieldBuilder field(String name, byte[] value, Field.Store store) {
|
||||
return new FieldBuilder(name, value, store);
|
||||
}
|
||||
|
||||
public static FieldBuilder field(String name, byte[] value, int offset, int length, Field.Store store) {
|
||||
return new FieldBuilder(name, value, offset, length, store);
|
||||
}
|
||||
|
||||
private final Document document;
|
||||
|
||||
private DocumentBuilder() {
|
||||
this.document = new Document();
|
||||
}
|
||||
|
||||
public DocumentBuilder boost(float boost) {
|
||||
document.setBoost(boost);
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentBuilder add(Field field) {
|
||||
document.add(field);
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentBuilder add(FieldBuilder fieldBuilder) {
|
||||
document.add(fieldBuilder.build());
|
||||
return this;
|
||||
}
|
||||
|
||||
public Document build() {
|
||||
return document;
|
||||
}
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class FieldBuilder {
|
||||
|
||||
private final Field field;
|
||||
|
||||
FieldBuilder(String name, String value, Field.Store store, Field.Index index) {
|
||||
field = new Field(name, value, store, index);
|
||||
}
|
||||
|
||||
FieldBuilder(String name, String value, Field.Store store, Field.Index index, Field.TermVector termVector) {
|
||||
field = new Field(name, value, store, index, termVector);
|
||||
}
|
||||
|
||||
FieldBuilder(String name, byte[] value, Field.Store store) {
|
||||
FieldType fieldType = new FieldType();
|
||||
fieldType.setStored(store == Field.Store.YES);
|
||||
field = new Field(name, value, fieldType);
|
||||
}
|
||||
|
||||
FieldBuilder(String name, byte[] value, int offset, int length, Field.Store store) {
|
||||
FieldType fieldType = new FieldType();
|
||||
fieldType.setStored(store == Field.Store.YES);
|
||||
field = new Field(name, value, offset, length, fieldType);
|
||||
}
|
||||
|
||||
public FieldBuilder boost(float boost) {
|
||||
field.setBoost(boost);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FieldBuilder omitNorms(boolean omitNorms) {
|
||||
field.fieldType().setOmitNorms(omitNorms);
|
||||
return this;
|
||||
}
|
||||
|
||||
public FieldBuilder omitTermFreqAndPositions(boolean omitTermFreqAndPositions) {
|
||||
if (omitTermFreqAndPositions) {
|
||||
field.fieldType().setIndexOptions(FieldInfo.IndexOptions.DOCS_ONLY);
|
||||
} else {
|
||||
field.fieldType().setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Field build() {
|
||||
return field;
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test.unit.common.lucene.search;
|
||||
|
||||
import org.apache.lucene.document.*;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -30,8 +31,6 @@ import org.elasticsearch.common.lucene.Lucene;
|
|||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,8 +44,15 @@ public class MatchAllDocsFilterTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("text", "lucene")).build());
|
||||
indexWriter.addDocument(doc().add(field("_id", "2")).add(field("text", "lucene release")).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new TextField("text", "lucene", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
document = new Document();
|
||||
document.add(new TextField("_id", "2", Field.Store.YES));
|
||||
document.add(new TextField("text", "lucene release", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.common.lucene.search;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -29,8 +32,6 @@ import org.elasticsearch.common.lucene.Lucene;
|
|||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -47,8 +48,15 @@ public class MoreLikeThisQueryTests {
|
|||
indexWriter.commit();
|
||||
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("text", "lucene")).build());
|
||||
indexWriter.addDocument(doc().add(field("_id", "2")).add(field("text", "lucene release")).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new TextField("text", "lucene", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
document = new Document();
|
||||
document.add(new TextField("_id", "2", Field.Store.YES));
|
||||
document.add(new TextField("text", "lucene release", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
|
|
@ -32,8 +32,6 @@ import org.testng.annotations.Test;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -47,7 +45,9 @@ public class SimpleLuceneTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
indexWriter.addDocument(doc().add(field("str", new String(new char[]{(char) (97 + i), (char) (97 + i)}))).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("str", new String(new char[]{(char) (97 + i), (char) (97 + i)}), Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
@ -62,8 +62,9 @@ public class SimpleLuceneTests {
|
|||
public void testAddDocAfterPrepareCommit() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("_id", "1")).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
DirectoryReader reader = IndexReader.open(indexWriter, true);
|
||||
assertThat(reader.numDocs(), equalTo(1));
|
||||
|
||||
|
@ -71,8 +72,9 @@ public class SimpleLuceneTests {
|
|||
reader = DirectoryReader.openIfChanged(reader);
|
||||
assertThat(reader.numDocs(), equalTo(1));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("_id", "2")).build());
|
||||
document = new Document();
|
||||
document.add(new TextField("_id", "2", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
indexWriter.commit();
|
||||
reader = DirectoryReader.openIfChanged(reader);
|
||||
assertThat(reader.numDocs(), equalTo(2));
|
||||
|
@ -83,7 +85,10 @@ public class SimpleLuceneTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(new IntField("test", 2, IntField.TYPE_STORED)).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new IntField("test", 2, IntField.TYPE_STORED));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
@ -112,9 +117,10 @@ public class SimpleLuceneTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("_id", "1"))
|
||||
.add(field("#id", "1")).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new TextField("#id", "1", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
@ -146,10 +152,11 @@ public class SimpleLuceneTests {
|
|||
for (int j = 0; j < i; j++) {
|
||||
value.append(" ").append("value");
|
||||
}
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("id", Integer.toString(i)))
|
||||
.add(field("value", value.toString()))
|
||||
.boost(i).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", Integer.toString(i), Field.Store.YES));
|
||||
document.add(new TextField("value", value.toString(), Field.Store.YES));
|
||||
document.boost(i);
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
|
@ -173,9 +180,10 @@ public class SimpleLuceneTests {
|
|||
DirectoryReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("id", Integer.toString(i)))
|
||||
.boost(i).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", Integer.toString(i), Field.Store.YES));
|
||||
document.boost(i);
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
reader = refreshReader(reader);
|
||||
|
||||
|
|
|
@ -19,7 +19,10 @@
|
|||
|
||||
package org.elasticsearch.test.unit.deps.lucene;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -32,8 +35,6 @@ import org.apache.lucene.store.RAMDirectory;
|
|||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
|
@ -48,7 +49,10 @@ public class VectorHighlighterTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
@ -68,7 +72,10 @@ public class VectorHighlighterTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
@ -107,7 +114,10 @@ public class VectorHighlighterTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("content", "the big bad dog", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new Field("content", "the big bad dog", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
@ -126,7 +136,10 @@ public class VectorHighlighterTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc().add(field("_id", "1")).add(field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO)).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("_id", "1", Field.Store.YES));
|
||||
document.add(new Field("content", "the big bad dog", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
|
|
@ -19,10 +19,10 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.cache.filter;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
@ -35,8 +35,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -55,12 +53,12 @@ public class FilterCacheTests {
|
|||
private void verifyCache(FilterCache filterCache) throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
DirectoryReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("id", Integer.toString(i)))
|
||||
.boost(i).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("id", Integer.toString(i), Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
}
|
||||
|
||||
reader = refreshReader(reader);
|
||||
|
@ -82,9 +80,9 @@ public class FilterCacheTests {
|
|||
indexWriter.close();
|
||||
}
|
||||
|
||||
private IndexReader refreshReader(IndexReader reader) throws IOException {
|
||||
private DirectoryReader refreshReader(DirectoryReader reader) throws IOException {
|
||||
IndexReader oldReader = reader;
|
||||
reader = reader.reopen();
|
||||
reader = DirectoryReader.openIfChanged(reader);
|
||||
if (reader != oldReader) {
|
||||
oldReader.close();
|
||||
}
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.deletionpolicy;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
@ -33,8 +36,6 @@ import org.testng.annotations.BeforeClass;
|
|||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.apache.lucene.index.IndexReader.listCommits;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -64,29 +65,35 @@ public class SnapshotDeletionPolicyTests {
|
|||
indexWriter.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private Document testDocument() {
|
||||
Document document = new Document();
|
||||
document.add(new TextField("test", "1", Field.Store.YES));
|
||||
return document;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleSnapshot() throws Exception {
|
||||
// add a document and commit, resulting in one commit point
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
|
||||
// add another document and commit, resulting again in one commit point
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
|
||||
// snapshot the last commit, and then add a document and commit, now we should have two commit points
|
||||
SnapshotIndexCommit snapshot = deletionPolicy.snapshot();
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(2));
|
||||
|
||||
// release the commit, add a document and commit, now we should be back to one commit point
|
||||
assertThat(snapshot.release(), equalTo(true));
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
}
|
||||
|
@ -94,7 +101,7 @@ public class SnapshotDeletionPolicyTests {
|
|||
@Test
|
||||
public void testMultiSnapshot() throws Exception {
|
||||
// add a document and commit, resulting in one commit point
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
|
||||
|
@ -103,19 +110,19 @@ public class SnapshotDeletionPolicyTests {
|
|||
SnapshotIndexCommit snapshot2 = deletionPolicy.snapshot();
|
||||
|
||||
// we should have two commits points
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(2));
|
||||
|
||||
// release one snapshot, we should still have two commit points
|
||||
assertThat(snapshot1.release(), equalTo(true));
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(2));
|
||||
|
||||
// release the second snapshot, we should be back to one commit
|
||||
assertThat(snapshot2.release(), equalTo(true));
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
}
|
||||
|
@ -123,7 +130,7 @@ public class SnapshotDeletionPolicyTests {
|
|||
@Test
|
||||
public void testMultiReleaseException() throws Exception {
|
||||
// add a document and commit, resulting in one commit point
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
|
||||
|
@ -136,18 +143,18 @@ public class SnapshotDeletionPolicyTests {
|
|||
@Test
|
||||
public void testSimpleSnapshots() throws Exception {
|
||||
// add a document and commit, resulting in one commit point
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
|
||||
// add another document and commit, resulting again in one commint point
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
|
||||
// snapshot the last commit, and then add a document and commit, now we should have two commit points
|
||||
SnapshotIndexCommit snapshot = deletionPolicy.snapshot();
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(2));
|
||||
|
||||
|
@ -159,13 +166,13 @@ public class SnapshotDeletionPolicyTests {
|
|||
// we should have 3 commits points since we are holding onto the first two with snapshots
|
||||
// and we are using the keep only last
|
||||
assertThat(snapshot.release(), equalTo(true));
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(3));
|
||||
|
||||
// now release the snapshots, we should be back to a single commit point
|
||||
assertThat(snapshots.release(), equalTo(true));
|
||||
indexWriter.addDocument(doc().add(field("test", "1")).build());
|
||||
indexWriter.addDocument(testDocument());
|
||||
indexWriter.commit();
|
||||
assertThat(listCommits(dir).size(), equalTo(1));
|
||||
}
|
||||
|
|
|
@ -19,13 +19,16 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.engine;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.IndexDeletionPolicy;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
import org.elasticsearch.common.lucene.uid.UidField;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.deletionpolicy.KeepOnlyLastDeletionPolicy;
|
||||
|
@ -60,7 +63,6 @@ import java.util.concurrent.ExecutorService;
|
|||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.*;
|
||||
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
|
@ -106,6 +108,18 @@ public abstract class AbstractSimpleEngineTests {
|
|||
threadPool.shutdownNow();
|
||||
}
|
||||
}
|
||||
|
||||
private Document testDocumentWithTextField(String id) {
|
||||
Document document = testDocument(id);
|
||||
document.add(new TextField("value", "test", Field.Store.YES));
|
||||
return document;
|
||||
}
|
||||
|
||||
private Document testDocument(String id) {
|
||||
Document document = new Document();
|
||||
document.add(new UidField("_uid", id, 0));
|
||||
return document;
|
||||
}
|
||||
|
||||
protected Store createStore() throws IOException {
|
||||
return new Store(shardId, EMPTY_SETTINGS, null, new IndexSettingsService(shardId.index(), EMPTY_SETTINGS), new RamDirectoryService(shardId, EMPTY_SETTINGS));
|
||||
|
@ -151,10 +165,12 @@ public abstract class AbstractSimpleEngineTests {
|
|||
assertThat(segments.isEmpty(), equalTo(true));
|
||||
|
||||
// create a doc and refresh
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).add(field(SourceFieldMapper.NAME, B_1.toBytes(), Field.Store.YES)).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Document document = testDocumentWithTextField("1");
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), TextField.TYPE_STORED));
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, doc().add(uidField("2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField("2"), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
engine.refresh(new Engine.Refresh(true));
|
||||
|
||||
|
@ -175,7 +191,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
assertThat(segments.get(0).deletedDocs(), equalTo(0));
|
||||
|
||||
|
||||
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", null, -1, -1, doc().add(uidField("3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField("3"), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
||||
engine.refresh(new Engine.Refresh(true));
|
||||
|
||||
|
@ -216,7 +232,9 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// create a document
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).add(field(SourceFieldMapper.NAME, B_1.toBytes(), Field.Store.YES)).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Document document = testDocumentWithTextField("1");
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), TextField.TYPE_STORED));
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
|
||||
// its not there...
|
||||
|
@ -250,7 +268,10 @@ public abstract class AbstractSimpleEngineTests {
|
|||
assertThat(getResult.docIdAndVersion(), notNullValue());
|
||||
|
||||
// now do an update
|
||||
doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test1")).add(field(SourceFieldMapper.NAME, B_2.toBytes(), Field.Store.YES)).build(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
document = testDocument("1");
|
||||
document.add(new TextField("value", "test1", Field.Store.YES));
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), TextField.TYPE_STORED));
|
||||
doc = new ParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.index(new Engine.Index(null, newUid("1"), doc));
|
||||
|
||||
// its not updated yet...
|
||||
|
@ -299,7 +320,9 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// add it back
|
||||
doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).add(field(SourceFieldMapper.NAME, B_1.toBytes(), Field.Store.YES)).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
document = testDocumentWithTextField("1");
|
||||
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), TextField.TYPE_STORED));
|
||||
doc = new ParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
|
||||
// its not there...
|
||||
|
@ -331,7 +354,9 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
// make sure we can still work with the engine
|
||||
// now do an update
|
||||
doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
document = testDocument("1");
|
||||
document.add(new TextField("value", "test1", Field.Store.YES));
|
||||
doc = new ParsedDocument("1", "1", "test", null, -1, -1, document, Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.index(new Engine.Index(null, newUid("1"), doc));
|
||||
|
||||
// its not updated yet...
|
||||
|
@ -360,7 +385,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
searchResult.release();
|
||||
|
||||
// create a document
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
|
||||
// its not there...
|
||||
|
@ -394,7 +419,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
@Test
|
||||
public void testSimpleSnapshot() throws Exception {
|
||||
// create a document
|
||||
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc1));
|
||||
|
||||
final ExecutorService executorService = Executors.newCachedThreadPool();
|
||||
|
@ -412,10 +437,10 @@ public abstract class AbstractSimpleEngineTests {
|
|||
@Override
|
||||
public Object call() throws Exception {
|
||||
engine.flush(new Engine.Flush());
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, doc().add(uidField("2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField("2"), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
engine.flush(new Engine.Flush());
|
||||
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", null, -1, -1, doc().add(uidField("3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField("3"), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
||||
return null;
|
||||
}
|
||||
|
@ -452,7 +477,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testSimpleRecover() throws Exception {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc));
|
||||
engine.flush(new Engine.Flush());
|
||||
|
||||
|
@ -497,10 +522,10 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testRecoverWithOperationsBetweenPhase1AndPhase2() throws Exception {
|
||||
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc1));
|
||||
engine.flush(new Engine.Flush());
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, doc().add(uidField("2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField("2"), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
|
@ -528,10 +553,10 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testRecoverWithOperationsBetweenPhase1AndPhase2AndPhase3() throws Exception {
|
||||
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc1 = new ParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
engine.create(new Engine.Create(null, newUid("1"), doc1));
|
||||
engine.flush(new Engine.Flush());
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, doc().add(uidField("2")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
ParsedDocument doc2 = new ParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField("2"), Lucene.STANDARD_ANALYZER, B_2, false);
|
||||
engine.create(new Engine.Create(null, newUid("2"), doc2));
|
||||
|
||||
engine.recover(new Engine.RecoveryHandler() {
|
||||
|
@ -547,7 +572,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
assertThat(create.source().toBytesArray(), equalTo(B_2));
|
||||
|
||||
// add for phase3
|
||||
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", null, -1, -1, doc().add(uidField("3")).add(field("value", "test")).build(), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
ParsedDocument doc3 = new ParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField("3"), Lucene.STANDARD_ANALYZER, B_3, false);
|
||||
engine.create(new Engine.Create(null, newUid("3"), doc3));
|
||||
}
|
||||
|
||||
|
@ -566,7 +591,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningNewCreate() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
|
@ -578,7 +603,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testExternalVersioningNewCreate() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc).versionType(VersionType.EXTERNAL).version(12);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(12l));
|
||||
|
@ -590,7 +615,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningNewIndex() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
@ -602,7 +627,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testExternalVersioningNewIndex() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc).versionType(VersionType.EXTERNAL).version(12);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
|
@ -614,7 +639,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningIndexConflict() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
@ -643,7 +668,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testExternalVersioningIndexConflict() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc).versionType(VersionType.EXTERNAL).version(12);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
|
@ -663,7 +688,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningIndexConflictWithFlush() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
@ -694,7 +719,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testExternalVersioningIndexConflictWithFlush() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc).versionType(VersionType.EXTERNAL).version(12);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(12l));
|
||||
|
@ -716,7 +741,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningDeleteConflict() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
@ -767,7 +792,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningDeleteConflictWithFlush() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
@ -824,7 +849,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningCreateExistsException() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
|
@ -840,7 +865,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningCreateExistsExceptionWithFlush() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Create create = new Engine.Create(null, newUid("1"), doc);
|
||||
engine.create(create);
|
||||
assertThat(create.version(), equalTo(1l));
|
||||
|
@ -858,7 +883,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningReplicaConflict1() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
@ -893,7 +918,7 @@ public abstract class AbstractSimpleEngineTests {
|
|||
|
||||
@Test
|
||||
public void testVersioningReplicaConflict2() {
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, doc().add(uidField("1")).build(), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
ParsedDocument doc = new ParsedDocument("1", "1", "test", null, -1, -1, testDocument("1"), Lucene.STANDARD_ANALYZER, B_1, false);
|
||||
Engine.Index index = new Engine.Index(null, newUid("1"), doc);
|
||||
engine.index(index);
|
||||
assertThat(index.version(), equalTo(1l));
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.field.data.doubles;
|
||||
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoubleField;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -31,7 +33,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,28 +46,28 @@ public class DoubleFieldDataTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setDoubleValue(4))
|
||||
.add(new NumericField("mvalue").setDoubleValue(104))
|
||||
.build());
|
||||
Document document = new Document();
|
||||
document.add(new DoubleField("svalue", 4, Field.Store.NO));
|
||||
document.add(new DoubleField("mvalue", 104, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setDoubleValue(3))
|
||||
.add(new NumericField("mvalue").setDoubleValue(104))
|
||||
.add(new NumericField("mvalue").setDoubleValue(105))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new DoubleField("svalue", 3, Field.Store.NO));
|
||||
document.add(new DoubleField("mvalue", 104, Field.Store.NO));
|
||||
document.add(new DoubleField("mvalue", 105, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setDoubleValue(7))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new DoubleField("svalue", 7, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("mvalue").setDoubleValue(102))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new DoubleField("mvalue", 102, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setDoubleValue(4))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new DoubleField("svalue", 4, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.field.data.floats;
|
||||
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FloatField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -31,7 +33,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,28 +46,28 @@ public class FloatFieldDataTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setFloatValue(4))
|
||||
.add(new NumericField("mvalue").setFloatValue(104))
|
||||
.build());
|
||||
Document document = new Document();
|
||||
document.add(new FloatField("svalue", 4, Field.Store.NO));
|
||||
document.add(new FloatField("mvalue", 104, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setFloatValue(3))
|
||||
.add(new NumericField("mvalue").setFloatValue(104))
|
||||
.add(new NumericField("mvalue").setFloatValue(105))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new FloatField("svalue", 3, Field.Store.NO));
|
||||
document.add(new FloatField("mvalue", 104, Field.Store.NO));
|
||||
document.add(new FloatField("mvalue", 105, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setFloatValue(7))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new FloatField("svalue", 7, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("mvalue").setFloatValue(102))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new FloatField("mvalue", 102, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setFloatValue(4))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new FloatField("svalue", 4, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.field.data.ints;
|
||||
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.IntField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -31,7 +33,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,28 +46,28 @@ public class IntFieldDataTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(4))
|
||||
.add(new NumericField("mvalue").setIntValue(104))
|
||||
.build());
|
||||
Document document = new Document();
|
||||
document.add(new IntField("svalue", 4, Field.Store.NO));
|
||||
document.add(new IntField("mvalue", 104, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(3))
|
||||
.add(new NumericField("mvalue").setIntValue(104))
|
||||
.add(new NumericField("mvalue").setIntValue(105))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("svalue", 3, Field.Store.NO));
|
||||
document.add(new IntField("mvalue", 104, Field.Store.NO));
|
||||
document.add(new IntField("mvalue", 105, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(7))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("svalue", 7, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("mvalue").setIntValue(102))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("mvalue", 102, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(4))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("svalue", 4, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.field.data.longs;
|
||||
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.LongField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -31,7 +33,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,28 +46,28 @@ public class LongFieldDataTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setLongValue(4))
|
||||
.add(new NumericField("mvalue").setLongValue(104))
|
||||
.build());
|
||||
Document document = new Document();
|
||||
document.add(new LongField("svalue", 4, Field.Store.NO));
|
||||
document.add(new LongField("mvalue", 104, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setLongValue(3))
|
||||
.add(new NumericField("mvalue").setLongValue(104))
|
||||
.add(new NumericField("mvalue").setLongValue(105))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new LongField("svalue", 3, Field.Store.NO));
|
||||
document.add(new LongField("mvalue", 104, Field.Store.NO));
|
||||
document.add(new LongField("mvalue", 105, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setLongValue(7))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new LongField("svalue", 7, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("mvalue").setLongValue(102))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new LongField("mvalue", 102, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setLongValue(4))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new LongField("svalue", 4, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
|
|
|
@ -19,7 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.field.data.shorts;
|
||||
|
||||
import org.apache.lucene.document.NumericField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.IntField;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
|
@ -31,7 +33,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,28 +46,28 @@ public class ShortFieldDataTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(4))
|
||||
.add(new NumericField("mvalue").setIntValue(104))
|
||||
.build());
|
||||
Document document = new Document();
|
||||
document.add(new IntField("svalue", 4, Field.Store.NO));
|
||||
document.add(new IntField("mvalue", 104, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(3))
|
||||
.add(new NumericField("mvalue").setIntValue(104))
|
||||
.add(new NumericField("mvalue").setIntValue(105))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("svalue", 3, Field.Store.NO));
|
||||
document.add(new IntField("mvalue", 104, Field.Store.NO));
|
||||
document.add(new IntField("mvalue", 105, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(7))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("svalue", 7, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("mvalue").setIntValue(102))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("mvalue", 102, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(new NumericField("svalue").setIntValue(4))
|
||||
.build());
|
||||
document = new Document();
|
||||
document.add(new IntField("svalue", 4, Field.Store.NO));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
IndexReader reader = IndexReader.open(indexWriter, true);
|
||||
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
|
||||
package org.elasticsearch.test.unit.index.field.data.strings;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.TextField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
@ -30,8 +33,6 @@ import org.testng.annotations.Test;
|
|||
|
||||
import java.util.ArrayList;
|
||||
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.doc;
|
||||
import static org.elasticsearch.common.lucene.DocumentBuilder.field;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -45,22 +46,27 @@ public class StringFieldDataTests {
|
|||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("svalue", "zzz"))
|
||||
.add(field("mvalue", "111")).build());
|
||||
Document document = new Document();
|
||||
document.add(new TextField("svalue", "zzz", Field.Store.YES));
|
||||
document.add(new TextField("mvalue", "111", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("svalue", "xxx"))
|
||||
.add(field("mvalue", "222 333")).build());
|
||||
document = new Document();
|
||||
document.add(new TextField("svalue", "xxx", Field.Store.YES));
|
||||
document.add(new TextField("mvalue", "222 333", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("mvalue", "333 444")).build());
|
||||
document = new Document();
|
||||
document.add(new TextField("mvalue", "333 444", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("svalue", "aaa")).build());
|
||||
document = new Document();
|
||||
document.add(new TextField("svalue", "aaa", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
indexWriter.addDocument(doc()
|
||||
.add(field("svalue", "aaa")).build());
|
||||
document = new Document();
|
||||
document.add(new TextField("svalue", "aaa", Field.Store.YES));
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
AtomicReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter, false));
|
||||
|
||||
|
|
Loading…
Reference in New Issue