lucene4: unit tests cleanup

This commit is contained in:
Igor Motov 2012-10-31 14:29:11 -04:00 committed by Shay Banon
parent 5a553a1924
commit bb76542068
12 changed files with 244 additions and 132 deletions

View File

@ -0,0 +1,89 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.elasticsearch.common.lucene.uid.UidField;
/**
*
*/
public class DocumentBuilder {
public static final Document EMPTY = new Document();
public static DocumentBuilder doc() {
return new DocumentBuilder();
}
public static Field uidField(String value) {
return uidField(value, 0);
}
public static Field uidField(String value, long version) {
return new UidField("_uid", value, version);
}
public static FieldBuilder field(String name, String value) {
return field(name, value, Field.Store.YES, Field.Index.ANALYZED);
}
public static FieldBuilder field(String name, String value, Field.Store store, Field.Index index) {
return new FieldBuilder(name, value, store, index);
}
public static FieldBuilder field(String name, String value, Field.Store store, Field.Index index, Field.TermVector termVector) {
return new FieldBuilder(name, value, store, index, termVector);
}
public static FieldBuilder field(String name, byte[] value, Field.Store store) {
return new FieldBuilder(name, value, store);
}
public static FieldBuilder field(String name, byte[] value, int offset, int length, Field.Store store) {
return new FieldBuilder(name, value, offset, length, store);
}
private final Document document;
private DocumentBuilder() {
this.document = new Document();
}
public DocumentBuilder boost(float boost) {
document.setBoost(boost);
return this;
}
public DocumentBuilder add(Field field) {
document.add(field);
return this;
}
public DocumentBuilder add(FieldBuilder fieldBuilder) {
document.add(fieldBuilder.build());
return this;
}
public Document build() {
return document;
}
}

View File

@ -0,0 +1,75 @@
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.FieldInfo;
/**
*
*/
public class FieldBuilder {
private final Field field;
FieldBuilder(String name, String value, Field.Store store, Field.Index index) {
field = new Field(name, value, store, index);
}
FieldBuilder(String name, String value, Field.Store store, Field.Index index, Field.TermVector termVector) {
field = new Field(name, value, store, index, termVector);
}
FieldBuilder(String name, byte[] value, Field.Store store) {
FieldType fieldType = new FieldType();
fieldType.setStored(store == Field.Store.YES);
field = new Field(name, value, fieldType);
}
FieldBuilder(String name, byte[] value, int offset, int length, Field.Store store) {
FieldType fieldType = new FieldType();
fieldType.setStored(store == Field.Store.YES);
field = new Field(name, value, offset, length, fieldType);
}
public FieldBuilder boost(float boost) {
field.setBoost(boost);
return this;
}
public FieldBuilder omitNorms(boolean omitNorms) {
field.fieldType().setOmitNorms(omitNorms);
return this;
}
public FieldBuilder omitTermFreqAndPositions(boolean omitTermFreqAndPositions) {
if (omitTermFreqAndPositions) {
field.fieldType().setIndexOptions(FieldInfo.IndexOptions.DOCS_ONLY);
} else {
field.fieldType().setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
}
return this;
}
public Field build() {
return field;
}
}

View File

@ -1,50 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.unit.common.bloom;
import com.google.common.base.Charsets;
import org.elasticsearch.common.bloom.BloomFilter;
import org.elasticsearch.common.bloom.BloomFilterFactory;
import org.testng.annotations.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
/**
*
*/
@Test
public class BoomFilterTests {
@Test
public void testSimpleOps() {
BloomFilter filter = BloomFilterFactory.getFilter(10, 15);
filter.add(wrap("1"), 0, wrap("1").length);
assertThat(filter.isPresent(wrap("1"), 0, wrap("1").length), equalTo(true));
assertThat(filter.isPresent(wrap("2"), 0, wrap("2").length), equalTo(false));
filter.add(wrap("2"), 0, wrap("2").length);
assertThat(filter.isPresent(wrap("1"), 0, wrap("1").length), equalTo(true));
assertThat(filter.isPresent(wrap("2"), 0, wrap("2").length), equalTo(true));
}
private byte[] wrap(String key) {
return key.getBytes(Charsets.UTF_8);
}
}

View File

@ -21,16 +21,11 @@ package org.elasticsearch.test.unit.common.compress;
import jsr166y.ThreadLocalRandom;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.MapFieldSelector;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.index.*;
import org.apache.lucene.store.*;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.RandomStringGenerator;
import org.elasticsearch.common.compress.CompressedDirectory;
import org.elasticsearch.common.compress.CompressedIndexInput;
@ -88,10 +83,10 @@ public class CompressIndexInputOutputTests {
private void empty(Compressor compressor) throws Exception {
Directory dir = new RAMDirectory();
IndexOutput out = compressor.indexOutput(dir.createOutput("test"));
IndexOutput out = compressor.indexOutput(dir.createOutput("test", IOContext.DEFAULT));
out.close();
IndexInput in = compressor.indexInput(dir.openInput("test"));
IndexInput in = compressor.indexInput(dir.openInput("test", IOContext.DEFAULT));
try {
in.readByte();
assert false;
@ -110,7 +105,7 @@ public class CompressIndexInputOutputTests {
private void simple(Compressor compressor) throws Exception {
Directory dir = new RAMDirectory();
IndexOutput out = compressor.indexOutput(dir.createOutput("test"));
IndexOutput out = compressor.indexOutput(dir.createOutput("test", IOContext.DEFAULT));
long pos1 = out.getFilePointer();
out.writeInt(1);
long pos2 = out.getFilePointer();
@ -124,7 +119,7 @@ public class CompressIndexInputOutputTests {
out.writeString("test2");
out.close();
IndexInput in = compressor.indexInput(dir.openInput("test"));
IndexInput in = compressor.indexInput(dir.openInput("test", IOContext.DEFAULT));
assertThat(in.readInt(), equalTo(1));
assertThat(in.readString(), equalTo("test1"));
assertThat(in.readString(), equalTo(largeString));
@ -157,7 +152,7 @@ public class CompressIndexInputOutputTests {
private void seek1(boolean compressed, Compressor compressor) throws Exception {
Directory dir = new RAMDirectory();
IndexOutput out = compressed ? compressor.indexOutput(dir.createOutput("test")) : dir.createOutput("test");
IndexOutput out = compressed ? compressor.indexOutput(dir.createOutput("test", IOContext.DEFAULT)) : dir.createOutput("test", IOContext.DEFAULT);
long pos1 = out.getFilePointer();
out.writeVInt(4);
out.writeInt(1);
@ -182,7 +177,7 @@ public class CompressIndexInputOutputTests {
out.close();
//IndexInput in = dir.openInput("test");
IndexInput in = compressed ? compressor.indexInput(dir.openInput("test")) : dir.openInput("test");
IndexInput in = compressed ? compressor.indexInput(dir.openInput("test", IOContext.DEFAULT)) : dir.openInput("test", IOContext.DEFAULT);
in.seek(pos2);
// now "skip"
int numBytes = in.readVInt();
@ -200,7 +195,7 @@ public class CompressIndexInputOutputTests {
private void copyBytes(Compressor compressor) throws Exception {
Directory dir = new RAMDirectory();
IndexOutput out = compressor.indexOutput(dir.createOutput("test"));
IndexOutput out = compressor.indexOutput(dir.createOutput("test", IOContext.DEFAULT));
long pos1 = out.getFilePointer();
out.writeInt(1);
long pos2 = out.getFilePointer();
@ -217,17 +212,17 @@ public class CompressIndexInputOutputTests {
long length = out.length();
out.close();
CompressedIndexOutput out2 = compressor.indexOutput(dir.createOutput("test2"));
CompressedIndexOutput out2 = compressor.indexOutput(dir.createOutput("test2", IOContext.DEFAULT));
out2.writeString("mergeStart");
long startMergePos = out2.getFilePointer();
CompressedIndexInput testInput = compressor.indexInput(dir.openInput("test"));
CompressedIndexInput testInput = compressor.indexInput(dir.openInput("test", IOContext.DEFAULT));
assertThat(testInput.length(), equalTo(length));
out2.copyBytes(testInput, testInput.length());
long endMergePos = out2.getFilePointer();
out2.writeString("mergeEnd");
out2.close();
IndexInput in = compressor.indexInput(dir.openInput("test2"));
IndexInput in = compressor.indexInput(dir.openInput("test2", IOContext.DEFAULT));
assertThat(in.readString(), equalTo("mergeStart"));
assertThat(in.readInt(), equalTo(1));
assertThat(in.readString(), equalTo("test1"));
@ -276,24 +271,29 @@ public class CompressIndexInputOutputTests {
CheckIndex checkIndex = new CheckIndex(writer.getDirectory());
CheckIndex.Status status = checkIndex.checkIndex();
assertThat(status.clean, equalTo(true));
IndexReader reader = IndexReader.open(writer, true);
IndexReader reader = DirectoryReader.open(writer, true);
final Bits liveDocs = MultiFields.getLiveDocs(reader);
for (int i = 0; i < reader.maxDoc(); i++) {
if (reader.isDeleted(i)) {
if (liveDocs != null && !liveDocs.get(i)) {
continue;
}
Document document = reader.document(i);
checkDoc(document);
document = reader.document(i, new MapFieldSelector("id", "field", "count"));
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor("id", "field", "count");
reader.document(i, visitor);
document = visitor.getDocument();
checkDoc(document);
}
for (int i = 0; i < 100; i++) {
int doc = ThreadLocalRandom.current().nextInt(reader.maxDoc());
if (reader.isDeleted(i)) {
if (liveDocs != null && !liveDocs.get(i)) {
continue;
}
Document document = reader.document(doc);
checkDoc(document);
document = reader.document(doc, new MapFieldSelector("id", "field", "count"));
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor("id", "field", "count");
reader.document(i, visitor);
document = visitor.getDocument();
checkDoc(document);
}
}

View File

@ -2,7 +2,7 @@ package org.elasticsearch.test.unit.common.lucene.spatial.prefix;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.Shape;
import org.apache.lucene.analysis.KeywordAnalyzer;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
@ -75,7 +75,7 @@ public class TermQueryPrefixTreeStrategyTests {
Set<String> foundIDs = new HashSet<String>();
for (ScoreDoc doc : topDocs.scoreDocs) {
Document foundDocument = indexSearcher.doc(doc.doc);
foundIDs.add(foundDocument.getFieldable("id").stringValue());
foundIDs.add(foundDocument.getField("id").stringValue());
}
for (String id : ids) {
@ -157,6 +157,6 @@ public class TermQueryPrefixTreeStrategyTests {
@AfterTest
public void tearDown() throws IOException {
IOUtils.close(indexSearcher, indexReader, directory);
IOUtils.close(indexReader, directory);
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test.unit.common.lucene.store;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RAMDirectory;
@ -39,7 +40,7 @@ public class InputStreamIndexInputTests {
@Test
public void testSingleReadSingleByteLimit() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test");
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
@ -49,7 +50,7 @@ public class InputStreamIndexInputTests {
output.close();
IndexInput input = dir.openInput("test");
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
@ -76,7 +77,7 @@ public class InputStreamIndexInputTests {
@Test
public void testReadMultiSingleByteLimit1() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test");
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
@ -86,7 +87,7 @@ public class InputStreamIndexInputTests {
output.close();
IndexInput input = dir.openInput("test");
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
byte[] read = new byte[2];
@ -115,7 +116,7 @@ public class InputStreamIndexInputTests {
@Test
public void testSingleReadTwoBytesLimit() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test");
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
@ -125,7 +126,7 @@ public class InputStreamIndexInputTests {
output.close();
IndexInput input = dir.openInput("test");
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
assertThat(input.getFilePointer(), lessThan(input.length()));
InputStreamIndexInput is = new InputStreamIndexInput(input, 2);
@ -157,7 +158,7 @@ public class InputStreamIndexInputTests {
@Test
public void testReadMultiTwoBytesLimit1() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test");
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
@ -167,7 +168,7 @@ public class InputStreamIndexInputTests {
output.close();
IndexInput input = dir.openInput("test");
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
byte[] read = new byte[2];
@ -201,7 +202,7 @@ public class InputStreamIndexInputTests {
@Test
public void testReadMultiFourBytesLimit() throws IOException {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test");
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
@ -211,7 +212,7 @@ public class InputStreamIndexInputTests {
output.close();
IndexInput input = dir.openInput("test");
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
byte[] read = new byte[4];
@ -240,7 +241,7 @@ public class InputStreamIndexInputTests {
@Test
public void testMarkRest() throws Exception {
RAMDirectory dir = new RAMDirectory();
IndexOutput output = dir.createOutput("test");
IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
for (int i = 0; i < 3; i++) {
output.writeByte((byte) 1);
}
@ -250,7 +251,7 @@ public class InputStreamIndexInputTests {
output.close();
IndexInput input = dir.openInput("test");
IndexInput input = dir.openInput("test", IOContext.DEFAULT);
InputStreamIndexInput is = new InputStreamIndexInput(input, 4);
assertThat(is.markSupported(), equalTo(true));
assertThat(is.read(), equalTo(1));

View File

@ -20,10 +20,8 @@
package org.elasticsearch.test.unit.deps.lucene;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.NumericField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.document.IntField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
@ -49,16 +47,16 @@ public class LuceneFieldCacheTests {
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
NumericField field = new NumericField("int1").setIntValue(1);
IntField field = new IntField("int1", 1, IntField.TYPE_NOT_STORED);
doc.add(field);
field = new NumericField("int1").setIntValue(2);
field = new IntField("int1", 2, IntField.TYPE_NOT_STORED);
doc.add(field);
indexWriter.addDocument(doc);
IndexReader reader = IndexReader.open(indexWriter, true);
int[] ints = FieldCache.DEFAULT.getInts(reader, "int1");
AtomicReader reader = SlowCompositeReaderWrapper.wrap(IndexReader.open(indexWriter, true));
int[] ints = FieldCache.DEFAULT.getInts(reader, "int1", false);
assertThat(ints.length, equalTo(1));
assertThat(ints[0], equalTo(2));
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.lucene.Lucene;
import org.testng.annotations.Test;
@ -50,7 +51,7 @@ public class SimpleLuceneTests {
}
IndexReader reader = IndexReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), null, 10, new Sort(new SortField("str", SortField.STRING)));
TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), null, 10, new Sort(new SortField("str", SortField.Type.STRING)));
for (int i = 0; i < 10; i++) {
FieldDoc fieldDoc = (FieldDoc) docs.scoreDocs[i];
assertThat(fieldDoc.fields[0].toString(), equalTo(new String(new char[]{(char) (97 + i), (char) (97 + i)})));
@ -63,17 +64,17 @@ public class SimpleLuceneTests {
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
indexWriter.addDocument(doc()
.add(field("_id", "1")).build());
IndexReader reader = IndexReader.open(indexWriter, true);
DirectoryReader reader = IndexReader.open(indexWriter, true);
assertThat(reader.numDocs(), equalTo(1));
indexWriter.prepareCommit();
reader = reader.reopen();
reader = DirectoryReader.openIfChanged(reader);
assertThat(reader.numDocs(), equalTo(1));
indexWriter.addDocument(doc()
.add(field("_id", "2")).build());
indexWriter.commit();
reader = reader.reopen();
reader = DirectoryReader.openIfChanged(reader);
assertThat(reader.numDocs(), equalTo(2));
}
@ -82,18 +83,20 @@ public class SimpleLuceneTests {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
indexWriter.addDocument(doc().add(field("_id", "1")).add(new NumericField("test", Field.Store.YES, true).setIntValue(2)).build());
indexWriter.addDocument(doc().add(field("_id", "1")).add(new IntField("test", 2, IntField.TYPE_STORED)).build());
IndexReader reader = IndexReader.open(indexWriter, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
Document doc = searcher.doc(topDocs.scoreDocs[0].doc);
Fieldable f = doc.getFieldable("test");
IndexableField f = doc.getField("test");
assertThat(f.stringValue(), equalTo("2"));
topDocs = searcher.search(new TermQuery(new Term("test", NumericUtils.intToPrefixCoded(2))), 1);
BytesRef bytes = new BytesRef();
NumericUtils.intToPrefixCoded(2, 0, bytes);
topDocs = searcher.search(new TermQuery(new Term("test", bytes)), 1);
doc = searcher.doc(topDocs.scoreDocs[0].doc);
f = doc.getFieldable("test");
f = doc.getField("test");
assertThat(f.stringValue(), equalTo("2"));
indexWriter.close();
@ -117,11 +120,11 @@ public class SimpleLuceneTests {
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1);
final ArrayList<String> fieldsOrder = new ArrayList<String>();
Document doc = searcher.doc(topDocs.scoreDocs[0].doc, new FieldSelector() {
searcher.doc(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() {
@Override
public FieldSelectorResult accept(String fieldName) {
fieldsOrder.add(fieldName);
return FieldSelectorResult.LOAD;
public Status needsField(FieldInfo fieldInfo) throws IOException {
fieldsOrder.add(fieldInfo.name);
return Status.YES;
}
});
@ -167,7 +170,7 @@ public class SimpleLuceneTests {
public void testNRTSearchOnClosedWriter() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
IndexReader reader = IndexReader.open(indexWriter, true);
DirectoryReader reader = IndexReader.open(indexWriter, true);
for (int i = 0; i < 100; i++) {
indexWriter.addDocument(doc()
@ -192,22 +195,18 @@ public class SimpleLuceneTests {
IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
Document doc = new Document();
NumericField field = new NumericField("int1").setIntValue(1);
field.setOmitNorms(true);
field.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS);
FieldType type = IntField.TYPE_STORED;
type.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS);
IntField field = new IntField("int1", 1, type);
doc.add(field);
field = new NumericField("int1").setIntValue(1);
field = new IntField("int1", 1, type);
doc.add(field);
field = new NumericField("int2").setIntValue(1);
field.setOmitNorms(true);
field.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS);
field = new IntField("int2", 1, type);
doc.add(field);
field = new NumericField("int2").setIntValue(1);
field.setOmitNorms(true);
field.setIndexOptions(FieldInfo.IndexOptions.DOCS_AND_FREQS);
field = new IntField("int2", 1, type);
doc.add(field);
indexWriter.addDocument(doc);
@ -232,9 +231,9 @@ public class SimpleLuceneTests {
indexWriter.close();
}
private IndexReader refreshReader(IndexReader reader) throws IOException {
IndexReader oldReader = reader;
reader = reader.reopen();
private DirectoryReader refreshReader(DirectoryReader reader) throws IOException {
DirectoryReader oldReader = reader;
reader = DirectoryReader.openIfChanged(reader);;
if (reader != oldReader) {
oldReader.close();
}

View File

@ -21,7 +21,7 @@ package org.elasticsearch.test.unit.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.lucene.all.AllEntries;
@ -98,11 +98,11 @@ public class CompoundAnalysisTests {
allEntries.reset();
TokenStream stream = AllTokenStream.allTokenStream("_all", allEntries, analyzer);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
List<String> terms = new ArrayList<String>();
while (stream.incrementToken()) {
String tokText = termAtt.term();
String tokText = termAtt.toString();
terms.add(tokText);
}
return terms;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.test.unit.index.analysis;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.WhitespaceTokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.util.Version;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;

View File

@ -19,9 +19,9 @@
package org.elasticsearch.test.unit.index.analysis.filter1;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.core.StopFilter;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.test.unit.index.analysis.synonyms;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.ModulesBuilder;
import org.elasticsearch.common.logging.ESLogger;
@ -90,11 +90,11 @@ public class SynonymsAnalysisTest {
allEntries.reset();
TokenStream stream = AllTokenStream.allTokenStream("_all", allEntries, analyzer);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
CharTermAttribute termAtt = stream.addAttribute(CharTermAttribute.class);
StringBuilder sb = new StringBuilder();
while (stream.incrementToken()) {
sb.append(termAtt.term()).append(" ");
sb.append(termAtt.toString()).append(" ");
}
MatcherAssert.assertThat(target, equalTo(sb.toString().trim()));