lucene4: more unit test cleanup

This commit is contained in:
Igor Motov 2012-10-31 22:20:57 -04:00 committed by Shay Banon
parent 5ad40205c2
commit 787b7a3900
13 changed files with 53 additions and 58 deletions

View File

@ -20,6 +20,7 @@
package org.apache.lucene.analysis.miscellaneous;
import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.common.lucene.Lucene;
import org.testng.annotations.Test;
@ -38,7 +39,7 @@ public class TruncateTokenFilterTests {
@Test
public void simpleTest() throws IOException {
Analyzer analyzer = new ReusableAnalyzerBase() {
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName,
Reader reader) {
@ -47,7 +48,7 @@ public class TruncateTokenFilterTests {
}
};
TokenStream test = analyzer.reusableTokenStream("test", new StringReader("a bb ccc dddd eeeee"));
TokenStream test = analyzer.tokenStream("test", new StringReader("a bb ccc dddd eeeee"));
CharTermAttribute termAttribute = test.addAttribute(CharTermAttribute.class);
assertThat(test.incrementToken(), equalTo(true));
assertThat(termAttribute.toString(), equalTo("a"));

View File

@ -20,6 +20,7 @@
package org.apache.lucene.analysis.miscellaneous;
import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.common.lucene.Lucene;
import org.testng.annotations.Test;
@ -38,7 +39,7 @@ public class UniqueTokenFilterTests {
@Test
public void simpleTest() throws IOException {
Analyzer analyzer = new ReusableAnalyzerBase() {
Analyzer analyzer = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName,
Reader reader) {
@ -47,7 +48,7 @@ public class UniqueTokenFilterTests {
}
};
TokenStream test = analyzer.reusableTokenStream("test", new StringReader("this test with test"));
TokenStream test = analyzer.tokenStream("test", new StringReader("this test with test"));
CharTermAttribute termAttribute = test.addAttribute(CharTermAttribute.class);
assertThat(test.incrementToken(), equalTo(true));
assertThat(termAttribute.toString(), equalTo("this"));

View File

@ -23,6 +23,7 @@ import jsr166y.ThreadLocalRandom;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.*;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.StopWatch;
import org.elasticsearch.common.lucene.Lucene;
@ -68,21 +69,21 @@ public class LuceneUidScanBenchmark {
try {
for (long i = 0; i < SCAN_COUNT; i++) {
long id = startUid + (Math.abs(ThreadLocalRandom.current().nextInt()) % INDEX_COUNT);
TermPositions uid = reader.termPositions(new Term("_uid", Long.toString(id)));
uid.next();
DocsAndPositionsEnum uid = MultiFields.getTermPositionsEnum(reader,
MultiFields.getLiveDocs(reader),
"_uid",
new BytesRef(Long.toString(id)));
uid.nextDoc();
uid.nextPosition();
if (!uid.isPayloadAvailable()) {
uid.close();
if (uid.getPayload() == null) {
System.err.println("no payload...");
break;
}
byte[] payload = uid.getPayload(new byte[8], 0);
if (Numbers.bytesToLong(payload) != id) {
uid.close();
BytesRef payload = uid.getPayload();
if (Numbers.bytesToLong(BytesRef.deepCopyOf(payload).bytes) != id) {
System.err.println("wrong id...");
break;
}
uid.close();
}
} catch (Exception e) {
e.printStackTrace();

View File

@ -19,13 +19,10 @@
package org.elasticsearch.test.unit.common.lucene.search;
import org.apache.lucene.analysis.KeywordAnalyzer;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.*;
import org.apache.lucene.search.XTermsFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
@ -58,19 +55,19 @@ public class TermsFilterTests {
w.commit();
}
}
IndexReader reader = w.getReader();
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(w, true));
w.close();
TermFilter tf = new TermFilter(new Term(fieldName, "19"));
FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader);
FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits, nullValue());
tf = new TermFilter(new Term(fieldName, "20"));
bits = (FixedBitSet) tf.getDocIdSet(reader);
bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits.cardinality(), equalTo(1));
tf = new TermFilter(new Term("all", "xxx"));
bits = (FixedBitSet) tf.getDocIdSet(reader);
bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits.cardinality(), equalTo(100));
reader.close();
@ -92,23 +89,23 @@ public class TermsFilterTests {
w.commit();
}
}
IndexReader reader = w.getReader();
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(w, true));
w.close();
XTermsFilter tf = new XTermsFilter(new Term[]{new Term(fieldName, "19")});
FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader);
FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits, nullValue());
tf = new XTermsFilter(new Term[]{new Term(fieldName, "19"), new Term(fieldName, "20")});
bits = (FixedBitSet) tf.getDocIdSet(reader);
bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits.cardinality(), equalTo(1));
tf = new XTermsFilter(new Term[]{new Term(fieldName, "19"), new Term(fieldName, "20"), new Term(fieldName, "10")});
bits = (FixedBitSet) tf.getDocIdSet(reader);
bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits.cardinality(), equalTo(2));
tf = new XTermsFilter(new Term[]{new Term(fieldName, "19"), new Term(fieldName, "20"), new Term(fieldName, "10"), new Term(fieldName, "00")});
bits = (FixedBitSet) tf.getDocIdSet(reader);
bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
assertThat(bits.cardinality(), equalTo(2));
reader.close();

View File

@ -23,7 +23,9 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.deletionpolicy.KeepOnlyLastDeletionPolicy;
@ -35,7 +37,7 @@ import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.apache.lucene.index.IndexReader.listCommits;
import static org.apache.lucene.index.DirectoryReader.listCommits;
import static org.elasticsearch.common.settings.ImmutableSettings.Builder.EMPTY_SETTINGS;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
@ -57,7 +59,10 @@ public class SnapshotDeletionPolicyTests {
public void setUp() throws Exception {
dir = new RAMDirectory();
deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastDeletionPolicy(shardId, EMPTY_SETTINGS));
indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, deletionPolicy, IndexWriter.MaxFieldLength.UNLIMITED);
// LUCENE 4 UPGRADE: Not sure about version.
indexWriter = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_31, Lucene.STANDARD_ANALYZER)
.setIndexDeletionPolicy(deletionPolicy)
.setOpenMode(IndexWriterConfig.OpenMode.CREATE));
}
@AfterClass

View File

@ -20,7 +20,6 @@
package org.elasticsearch.test.unit.index.engine.robin;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bloom.none.NoneBloomCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.robin.RobinEngine;
import org.elasticsearch.index.indexing.ShardIndexingService;
@ -39,6 +38,6 @@ public class SimpleRobinEngineTests extends AbstractSimpleEngineTests {
protected Engine createEngine(Store store, Translog translog) {
return new RobinEngine(shardId, EMPTY_SETTINGS, threadPool, new IndexSettingsService(shardId.index(), EMPTY_SETTINGS), new ShardIndexingService(shardId, EMPTY_SETTINGS), null, store, createSnapshotDeletionPolicy(), translog, createMergePolicy(), createMergeScheduler(),
new AnalysisService(shardId.index()), new SimilarityService(shardId.index()), new NoneBloomCache(shardId.index()));
new AnalysisService(shardId.index()), new SimilarityService(shardId.index()));
}
}

View File

@ -22,9 +22,7 @@ package org.elasticsearch.test.unit.index.field.data.doubles;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.lucene.Lucene;
@ -69,7 +67,7 @@ public class DoubleFieldDataTests {
document.add(new DoubleField("svalue", 4, Field.Store.NO));
indexWriter.addDocument(document);
IndexReader reader = IndexReader.open(indexWriter, true);
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(indexWriter, true));
DoubleFieldData sFieldData = DoubleFieldData.load(reader, "svalue");
DoubleFieldData mFieldData = DoubleFieldData.load(reader, "mvalue");

View File

@ -22,9 +22,7 @@ package org.elasticsearch.test.unit.index.field.data.floats;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.lucene.Lucene;
@ -69,7 +67,7 @@ public class FloatFieldDataTests {
document.add(new FloatField("svalue", 4, Field.Store.NO));
indexWriter.addDocument(document);
IndexReader reader = IndexReader.open(indexWriter, true);
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(indexWriter, true));
FloatFieldData sFieldData = FloatFieldData.load(reader, "svalue");
FloatFieldData mFieldData = FloatFieldData.load(reader, "mvalue");

View File

@ -22,9 +22,7 @@ package org.elasticsearch.test.unit.index.field.data.ints;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.lucene.Lucene;
@ -69,7 +67,7 @@ public class IntFieldDataTests {
document.add(new IntField("svalue", 4, Field.Store.NO));
indexWriter.addDocument(document);
IndexReader reader = IndexReader.open(indexWriter, true);
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(indexWriter, true));
IntFieldData sFieldData = IntFieldData.load(reader, "svalue");
IntFieldData mFieldData = IntFieldData.load(reader, "mvalue");

View File

@ -22,9 +22,7 @@ package org.elasticsearch.test.unit.index.field.data.longs;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.lucene.Lucene;
@ -69,7 +67,7 @@ public class LongFieldDataTests {
document.add(new LongField("svalue", 4, Field.Store.NO));
indexWriter.addDocument(document);
IndexReader reader = IndexReader.open(indexWriter, true);
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(indexWriter, true));
LongFieldData sFieldData = LongFieldData.load(reader, "svalue");
LongFieldData mFieldData = LongFieldData.load(reader, "mvalue");

View File

@ -22,9 +22,7 @@ package org.elasticsearch.test.unit.index.field.data.shorts;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.common.lucene.Lucene;
@ -69,7 +67,7 @@ public class ShortFieldDataTests {
document.add(new IntField("svalue", 4, Field.Store.NO));
indexWriter.addDocument(document);
IndexReader reader = IndexReader.open(indexWriter, true);
AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(indexWriter, true));
ShortFieldData sFieldData = ShortFieldData.load(reader, "svalue");
ShortFieldData mFieldData = ShortFieldData.load(reader, "mvalue");

View File

@ -57,13 +57,13 @@ public class CustomBoostMappingTests {
.startObject("date_field").field("value", "20100101").field("boost", 9.0f).endObject()
.endObject().bytes());
assertThat(doc.rootDoc().getFieldable("s_field").getBoost(), equalTo(2.0f));
assertThat(doc.rootDoc().getFieldable("l_field").getBoost(), equalTo(3.0f));
assertThat(doc.rootDoc().getFieldable("i_field").getBoost(), equalTo(4.0f));
assertThat(doc.rootDoc().getFieldable("sh_field").getBoost(), equalTo(5.0f));
assertThat(doc.rootDoc().getFieldable("b_field").getBoost(), equalTo(6.0f));
assertThat(doc.rootDoc().getFieldable("d_field").getBoost(), equalTo(7.0f));
assertThat(doc.rootDoc().getFieldable("f_field").getBoost(), equalTo(8.0f));
assertThat(doc.rootDoc().getFieldable("date_field").getBoost(), equalTo(9.0f));
assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f));
assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f));
assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f));
assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f));
assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f));
assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f));
assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f));
assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f));
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.test.unit.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.BoostingQuery;
import org.apache.lucene.sandbox.queries.FuzzyLikeThisQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.search.spans.*;