mirror of https://github.com/apache/lucene.git
LUCENE-3264: crank up faceting module tests
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1141629 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
842d97edac
commit
cec86dbc06
|
@ -11,7 +11,8 @@ import java.util.Map;
|
||||||
|
|
||||||
import org.apache.lucene.DocumentBuilder.DocumentBuilderException;
|
import org.apache.lucene.DocumentBuilder.DocumentBuilderException;
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.Field.Index;
|
import org.apache.lucene.document.Field.Index;
|
||||||
|
@ -20,21 +21,19 @@ import org.apache.lucene.document.Field.TermVector;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.DocsEnum;
|
import org.apache.lucene.index.DocsEnum;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.index.MultiFields;
|
import org.apache.lucene.index.MultiFields;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.index.Terms;
|
import org.apache.lucene.index.Terms;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
import org.apache.lucene.index.TermsEnum;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.FSDirectory;
|
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.apache.lucene.facet.index.CategoryDocumentBuilder;
|
import org.apache.lucene.facet.index.CategoryDocumentBuilder;
|
||||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||||
import org.apache.lucene.facet.index.params.DefaultFacetIndexingParams;
|
import org.apache.lucene.facet.index.params.DefaultFacetIndexingParams;
|
||||||
|
@ -131,15 +130,15 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (onDisk) {
|
if (onDisk) {
|
||||||
File indexFile = new File(TEMP_DIR,"index");
|
File indexFile = _TestUtil.getTempDir("index");
|
||||||
indexDir = FSDirectory.open(indexFile);
|
indexDir = newFSDirectory(indexFile);
|
||||||
taxoDir = FSDirectory.open(new File(indexFile,"facets"));
|
taxoDir = newFSDirectory(new File(indexFile,"facets"));
|
||||||
} else {
|
} else {
|
||||||
indexDir = new RAMDirectory();
|
indexDir = newDirectory();
|
||||||
taxoDir = new RAMDirectory();
|
taxoDir = newDirectory();
|
||||||
}
|
}
|
||||||
|
|
||||||
IndexWriter iw = new IndexWriter(indexDir, new IndexWriterConfig(TEST_VERSION_CURRENT, getAnalyzer()));
|
RandomIndexWriter iw = new RandomIndexWriter(random, indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, getAnalyzer()));
|
||||||
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||||
|
|
||||||
populateIndex(iw, taxo, getFacetIndexingParams(partitionSize));
|
populateIndex(iw, taxo, getFacetIndexingParams(partitionSize));
|
||||||
|
@ -153,7 +152,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
||||||
// prepare for searching
|
// prepare for searching
|
||||||
taxoReader = new LuceneTaxonomyReader(taxoDir);
|
taxoReader = new LuceneTaxonomyReader(taxoDir);
|
||||||
indexReader = IndexReader.open(indexDir);
|
indexReader = IndexReader.open(indexDir);
|
||||||
searcher = new IndexSearcher(indexReader);
|
searcher = newSearcher(indexReader);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Returns a default facet indexing params */
|
/** Returns a default facet indexing params */
|
||||||
|
@ -187,7 +186,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
||||||
* Populate the test index+taxonomy for this test.
|
* Populate the test index+taxonomy for this test.
|
||||||
* <p>Subclasses can override this to test different scenarios
|
* <p>Subclasses can override this to test different scenarios
|
||||||
*/
|
*/
|
||||||
protected void populateIndex(IndexWriter iw, TaxonomyWriter taxo, FacetIndexingParams iParams)
|
protected void populateIndex(RandomIndexWriter iw, TaxonomyWriter taxo, FacetIndexingParams iParams)
|
||||||
throws IOException, DocumentBuilderException, CorruptIndexException {
|
throws IOException, DocumentBuilderException, CorruptIndexException {
|
||||||
// add test documents
|
// add test documents
|
||||||
int numDocsToIndex = numDocsToIndex();
|
int numDocsToIndex = numDocsToIndex();
|
||||||
|
@ -211,7 +210,9 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
||||||
indexReader = null;
|
indexReader = null;
|
||||||
searcher.close();
|
searcher.close();
|
||||||
searcher = null;
|
searcher = null;
|
||||||
|
indexDir.close();
|
||||||
indexDir = null;
|
indexDir = null;
|
||||||
|
taxoDir.close();
|
||||||
taxoDir = null;
|
taxoDir = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,7 +221,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
||||||
* Sub classes should override in order to test with different analyzer.
|
* Sub classes should override in order to test with different analyzer.
|
||||||
*/
|
*/
|
||||||
protected Analyzer getAnalyzer() {
|
protected Analyzer getAnalyzer() {
|
||||||
return new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** convenience method: convert sub results to an array */
|
/** convenience method: convert sub results to an array */
|
||||||
|
@ -233,7 +234,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** utility Create a dummy document with specified categories and content */
|
/** utility Create a dummy document with specified categories and content */
|
||||||
protected final void indexDoc(FacetIndexingParams iParams, IndexWriter iw,
|
protected final void indexDoc(FacetIndexingParams iParams, RandomIndexWriter iw,
|
||||||
TaxonomyWriter tw, String content, List<CategoryPath> categories) throws IOException,
|
TaxonomyWriter tw, String content, List<CategoryPath> categories) throws IOException,
|
||||||
CorruptIndexException {
|
CorruptIndexException {
|
||||||
Document d = new Document();
|
Document d = new Document();
|
||||||
|
|
|
@ -14,12 +14,12 @@ import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.search.Collector;
|
import org.apache.lucene.search.Collector;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
import org.apache.lucene.search.TopScoreDocCollector;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
|
|
||||||
import org.apache.lucene.search.MultiCollector;
|
import org.apache.lucene.search.MultiCollector;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -55,11 +55,11 @@ import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
||||||
|
|
||||||
public class FacetTestUtils {
|
public class FacetTestUtils {
|
||||||
|
|
||||||
public static Directory[][] createIndexTaxonomyDirs(int number) {
|
public static Directory[][] createIndexTaxonomyDirs(int number) throws IOException {
|
||||||
Directory[][] dirs = new Directory[number][2];
|
Directory[][] dirs = new Directory[number][2];
|
||||||
for (int i = 0; i < number; i++) {
|
for (int i = 0; i < number; i++) {
|
||||||
dirs[i][0] = new RAMDirectory();
|
dirs[i][0] = LuceneTestCase.newDirectory();
|
||||||
dirs[i][1] = new RAMDirectory();
|
dirs[i][1] = LuceneTestCase.newDirectory();
|
||||||
}
|
}
|
||||||
return dirs;
|
return dirs;
|
||||||
}
|
}
|
||||||
|
@ -122,7 +122,7 @@ public class FacetTestUtils {
|
||||||
return collectors;
|
return collectors;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void add(FacetIndexingParams iParams, IndexWriter iw,
|
public static void add(FacetIndexingParams iParams, RandomIndexWriter iw,
|
||||||
TaxonomyWriter tw, String... strings) throws IOException,
|
TaxonomyWriter tw, String... strings) throws IOException,
|
||||||
CorruptIndexException {
|
CorruptIndexException {
|
||||||
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
|
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
|
||||||
|
|
|
@ -5,7 +5,6 @@ import java.io.IOException;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -46,8 +45,8 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void buildAssociationIndex() throws Exception {
|
public static void buildAssociationIndex() throws Exception {
|
||||||
// create Directories for the search index and for the taxonomy index
|
// create Directories for the search index and for the taxonomy index
|
||||||
indexDir = new RAMDirectory();
|
indexDir = newDirectory();
|
||||||
taxoDir = new RAMDirectory();
|
taxoDir = newDirectory();
|
||||||
|
|
||||||
// index the sample documents
|
// index the sample documents
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
@ -73,6 +72,7 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
||||||
assertTrue("Missing instance of tags/lucene in doc 1", iterator.setdoc(1));
|
assertTrue("Missing instance of tags/lucene in doc 1", iterator.setdoc(1));
|
||||||
assoc = (Integer) iterator.getCategoryData(associationEnhancement);
|
assoc = (Integer) iterator.getCategoryData(associationEnhancement);
|
||||||
assertEquals("Unexpected association value for tags/lucene in doc 1", 1, assoc, 1E-5);
|
assertEquals("Unexpected association value for tags/lucene in doc 1", 1, assoc, 1E-5);
|
||||||
|
indexReader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -84,6 +84,7 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
||||||
assertTrue("Unexpected failure of init()", iterator.init());
|
assertTrue("Unexpected failure of init()", iterator.init());
|
||||||
assertFalse("Unexpected payload for root/a/f2 in doc 0", iterator.setdoc(0));
|
assertFalse("Unexpected payload for root/a/f2 in doc 0", iterator.setdoc(0));
|
||||||
assertFalse("Unexpected instance of root/a/f2 in doc 1", iterator.setdoc(1));
|
assertFalse("Unexpected instance of root/a/f2 in doc 1", iterator.setdoc(1));
|
||||||
|
indexReader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -98,11 +99,14 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
||||||
float assoc = Float.intBitsToFloat((Integer) iterator
|
float assoc = Float.intBitsToFloat((Integer) iterator
|
||||||
.getCategoryData(associationEnhancement));
|
.getCategoryData(associationEnhancement));
|
||||||
assertEquals("Unexpected association value for genre/computing in doc 1", 0.34f, assoc, 0.001);
|
assertEquals("Unexpected association value for genre/computing in doc 1", 0.34f, assoc, 0.001);
|
||||||
|
indexReader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void closeDirectories() throws IOException {
|
public static void closeDirectories() throws IOException {
|
||||||
indexDir.close();
|
indexDir.close();
|
||||||
|
indexDir = null;
|
||||||
taxoDir.close();
|
taxoDir.close();
|
||||||
|
taxoDir = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,14 +4,13 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -45,8 +44,8 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTwoEmptyAndNonEmptyByteArrays() throws Exception {
|
public void testTwoEmptyAndNonEmptyByteArrays() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
Directory taxoDir = new RAMDirectory();
|
Directory taxoDir = newDirectory();
|
||||||
|
|
||||||
EnhancementsIndexingParams indexingParams =
|
EnhancementsIndexingParams indexingParams =
|
||||||
new DefaultEnhancementsIndexingParams(
|
new DefaultEnhancementsIndexingParams(
|
||||||
|
@ -57,8 +56,8 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>();
|
List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>();
|
||||||
categoryPaths.add(new CategoryPath("a", "b"));
|
categoryPaths.add(new CategoryPath("a", "b"));
|
||||||
|
|
||||||
IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(
|
RandomIndexWriter indexWriter = new RandomIndexWriter(random, indexDir, newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir);
|
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir);
|
||||||
|
|
||||||
// a category document builder will add the categories to a document
|
// a category document builder will add the categories to a document
|
||||||
|
@ -67,9 +66,9 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
indexWriter.addDocument(new EnhancementsDocumentBuilder(taxo,
|
indexWriter.addDocument(new EnhancementsDocumentBuilder(taxo,
|
||||||
indexingParams).setCategoryPaths(categoryPaths).build(doc));
|
indexingParams).setCategoryPaths(categoryPaths).build(doc));
|
||||||
|
|
||||||
|
IndexReader indexReader = indexWriter.getReader();
|
||||||
indexWriter.close();
|
indexWriter.close();
|
||||||
|
|
||||||
IndexReader indexReader = IndexReader.open(indexDir);
|
|
||||||
Term term = DrillDown.term(indexingParams, new CategoryPath("a","b"));
|
Term term = DrillDown.term(indexingParams, new CategoryPath("a","b"));
|
||||||
EnhancementsPayloadIterator iterator = new EnhancementsPayloadIterator(
|
EnhancementsPayloadIterator iterator = new EnhancementsPayloadIterator(
|
||||||
indexingParams.getCategoryEnhancements(), indexReader, term);
|
indexingParams.getCategoryEnhancements(), indexReader, term);
|
||||||
|
@ -82,13 +81,17 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
.getCategoryData(new CategoryEnhancementDummy3());
|
.getCategoryData(new CategoryEnhancementDummy3());
|
||||||
assertTrue("Bad array returned for CategoryEnhancementDummy3", Arrays
|
assertTrue("Bad array returned for CategoryEnhancementDummy3", Arrays
|
||||||
.equals(dummy3, CategoryEnhancementDummy3.CATEGORY_TOKEN_BYTES));
|
.equals(dummy3, CategoryEnhancementDummy3.CATEGORY_TOKEN_BYTES));
|
||||||
|
indexReader.close();
|
||||||
|
indexDir.close();
|
||||||
|
taxo.close();
|
||||||
|
taxoDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTwoNonEmptyByteArrays() throws Exception {
|
public void testTwoNonEmptyByteArrays() throws Exception {
|
||||||
// add document with a category containing data for both enhancements
|
// add document with a category containing data for both enhancements
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
Directory taxoDir = new RAMDirectory();
|
Directory taxoDir = newDirectory();
|
||||||
|
|
||||||
EnhancementsIndexingParams indexingParams =
|
EnhancementsIndexingParams indexingParams =
|
||||||
new DefaultEnhancementsIndexingParams(
|
new DefaultEnhancementsIndexingParams(
|
||||||
|
@ -98,8 +101,8 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>();
|
List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>();
|
||||||
categoryPaths.add(new CategoryPath("a", "b"));
|
categoryPaths.add(new CategoryPath("a", "b"));
|
||||||
|
|
||||||
IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(
|
RandomIndexWriter indexWriter = new RandomIndexWriter(random, indexDir, newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir);
|
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir);
|
||||||
|
|
||||||
// a category document builder will add the categories to a document
|
// a category document builder will add the categories to a document
|
||||||
|
@ -108,9 +111,9 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
indexWriter.addDocument(new EnhancementsDocumentBuilder(taxo,
|
indexWriter.addDocument(new EnhancementsDocumentBuilder(taxo,
|
||||||
indexingParams).setCategoryPaths(categoryPaths).build(doc));
|
indexingParams).setCategoryPaths(categoryPaths).build(doc));
|
||||||
|
|
||||||
|
IndexReader indexReader = indexWriter.getReader();
|
||||||
indexWriter.close();
|
indexWriter.close();
|
||||||
|
|
||||||
IndexReader indexReader = IndexReader.open(indexDir);
|
|
||||||
Term term = DrillDown.term(indexingParams, new CategoryPath("a","b"));
|
Term term = DrillDown.term(indexingParams, new CategoryPath("a","b"));
|
||||||
EnhancementsPayloadIterator iterator = new EnhancementsPayloadIterator(
|
EnhancementsPayloadIterator iterator = new EnhancementsPayloadIterator(
|
||||||
indexingParams.getCategoryEnhancements(), indexReader, term);
|
indexingParams.getCategoryEnhancements(), indexReader, term);
|
||||||
|
@ -125,5 +128,9 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
||||||
.getCategoryData(new CategoryEnhancementDummy3());
|
.getCategoryData(new CategoryEnhancementDummy3());
|
||||||
assertTrue("Bad array returned for CategoryEnhancementDummy3", Arrays
|
assertTrue("Bad array returned for CategoryEnhancementDummy3", Arrays
|
||||||
.equals(dummy3, CategoryEnhancementDummy3.CATEGORY_TOKEN_BYTES));
|
.equals(dummy3, CategoryEnhancementDummy3.CATEGORY_TOKEN_BYTES));
|
||||||
|
indexReader.close();
|
||||||
|
taxo.close();
|
||||||
|
indexDir.close();
|
||||||
|
taxoDir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
package org.apache.lucene.facet.enhancements.association;
|
package org.apache.lucene.facet.enhancements.association;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -55,10 +54,11 @@ public class CustomAssociationPropertyTest extends LuceneTestCase {
|
||||||
EnhancementsIndexingParams iParams = new DefaultEnhancementsIndexingParams(
|
EnhancementsIndexingParams iParams = new DefaultEnhancementsIndexingParams(
|
||||||
new AssociationEnhancement());
|
new AssociationEnhancement());
|
||||||
|
|
||||||
Directory iDir = new RAMDirectory();
|
Directory iDir = newDirectory();
|
||||||
Directory tDir = new RAMDirectory();
|
Directory tDir = newDirectory();
|
||||||
|
|
||||||
IndexWriter w = new IndexWriter(iDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
RandomIndexWriter w = new RandomIndexWriter(random, iDir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||||
LuceneTaxonomyWriter taxoW = new LuceneTaxonomyWriter(tDir);
|
LuceneTaxonomyWriter taxoW = new LuceneTaxonomyWriter(tDir);
|
||||||
|
|
||||||
CategoryContainer cc = new CategoryContainer();
|
CategoryContainer cc = new CategoryContainer();
|
||||||
|
@ -72,9 +72,9 @@ public class CustomAssociationPropertyTest extends LuceneTestCase {
|
||||||
builder.setCategories(cc);
|
builder.setCategories(cc);
|
||||||
w.addDocument(builder.build(new Document()));
|
w.addDocument(builder.build(new Document()));
|
||||||
taxoW.close();
|
taxoW.close();
|
||||||
|
IndexReader reader = w.getReader();
|
||||||
w.close();
|
w.close();
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(iDir);
|
|
||||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(tDir);
|
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(tDir);
|
||||||
String field = iParams.getCategoryListParams(new CategoryPath("0")).getTerm().field();
|
String field = iParams.getCategoryListParams(new CategoryPath("0")).getTerm().field();
|
||||||
AssociationsPayloadIterator api = new AssociationsPayloadIterator(reader, field);
|
AssociationsPayloadIterator api = new AssociationsPayloadIterator(reader, field);
|
||||||
|
@ -93,5 +93,10 @@ public class CustomAssociationPropertyTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
assertTrue("No categories found for doc #0", flag);
|
assertTrue("No categories found for doc #0", flag);
|
||||||
|
|
||||||
|
reader.close();
|
||||||
|
taxo.close();
|
||||||
|
iDir.close();
|
||||||
|
tDir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,15 +4,15 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -49,23 +49,27 @@ public class FacetsPayloadProcessorProviderTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTaxonomyMergeUtils() throws Exception {
|
public void testTaxonomyMergeUtils() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
Directory taxDir = new RAMDirectory();
|
Directory taxDir = newDirectory();
|
||||||
buildIndexWithFacets(dir, taxDir, true);
|
buildIndexWithFacets(dir, taxDir, true);
|
||||||
|
|
||||||
Directory dir1 = new RAMDirectory();
|
Directory dir1 = newDirectory();
|
||||||
Directory taxDir1 = new RAMDirectory();
|
Directory taxDir1 = newDirectory();
|
||||||
buildIndexWithFacets(dir1, taxDir1, false);
|
buildIndexWithFacets(dir1, taxDir1, false);
|
||||||
|
|
||||||
TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1);
|
TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1);
|
||||||
|
|
||||||
verifyResults(dir1, taxDir1);
|
verifyResults(dir1, taxDir1);
|
||||||
|
dir1.close();
|
||||||
|
taxDir1.close();
|
||||||
|
dir.close();
|
||||||
|
taxDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verifyResults(Directory dir, Directory taxDir) throws IOException {
|
private void verifyResults(Directory dir, Directory taxDir) throws IOException {
|
||||||
IndexReader reader1 = IndexReader.open(dir);
|
IndexReader reader1 = IndexReader.open(dir);
|
||||||
LuceneTaxonomyReader taxReader = new LuceneTaxonomyReader(taxDir);
|
LuceneTaxonomyReader taxReader = new LuceneTaxonomyReader(taxDir);
|
||||||
IndexSearcher searcher = new IndexSearcher(reader1);
|
IndexSearcher searcher = newSearcher(reader1);
|
||||||
FacetSearchParams fsp = new FacetSearchParams();
|
FacetSearchParams fsp = new FacetSearchParams();
|
||||||
fsp.addFacetRequest(new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
|
fsp.addFacetRequest(new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
|
||||||
FacetsCollector collector = new FacetsCollector(fsp, reader1, taxReader);
|
FacetsCollector collector = new FacetsCollector(fsp, reader1, taxReader);
|
||||||
|
@ -81,11 +85,14 @@ public class FacetsPayloadProcessorProviderTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
assertEquals(NUM_DOCS ,weight);
|
assertEquals(NUM_DOCS ,weight);
|
||||||
}
|
}
|
||||||
|
reader1.close();
|
||||||
|
taxReader.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc) throws IOException {
|
private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc) throws IOException {
|
||||||
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
IndexWriter writer = new IndexWriter(dir, config);
|
new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||||
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
|
||||||
|
|
||||||
LuceneTaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(taxDir);
|
LuceneTaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(taxDir);
|
||||||
for (int i = 1; i <= NUM_DOCS; i++) {
|
for (int i = 1; i <= NUM_DOCS; i++) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
package org.apache.lucene.facet.index.categorypolicy;
|
package org.apache.lucene.facet.index.categorypolicy;
|
||||||
|
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -46,8 +46,9 @@ public class OrdinalPolicyTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNonTopLevelOrdinalPolicy() throws Exception {
|
public void testNonTopLevelOrdinalPolicy() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
TaxonomyWriter taxonomy = null;
|
TaxonomyWriter taxonomy = null;
|
||||||
taxonomy = new LuceneTaxonomyWriter(new RAMDirectory());
|
taxonomy = new LuceneTaxonomyWriter(dir);
|
||||||
|
|
||||||
int[] topLevelOrdinals = new int[10];
|
int[] topLevelOrdinals = new int[10];
|
||||||
String[] topLevelStrings = new String[10];
|
String[] topLevelStrings = new String[10];
|
||||||
|
@ -85,6 +86,8 @@ public class OrdinalPolicyTest extends LuceneTestCase {
|
||||||
|
|
||||||
// check illegal ordinal
|
// check illegal ordinal
|
||||||
assertFalse("Should not add illegal ordinal", ordinalPolicy.shouldAdd(100000));
|
assertFalse("Should not add illegal ordinal", ordinalPolicy.shouldAdd(100000));
|
||||||
|
taxonomy.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
package org.apache.lucene.facet.index.categorypolicy;
|
package org.apache.lucene.facet.index.categorypolicy;
|
||||||
|
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -52,8 +52,9 @@ public class PathPolicyTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNonTopLevelPathPolicy() throws Exception {
|
public void testNonTopLevelPathPolicy() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
TaxonomyWriter taxonomy = null;
|
TaxonomyWriter taxonomy = null;
|
||||||
taxonomy = new LuceneTaxonomyWriter(new RAMDirectory());
|
taxonomy = new LuceneTaxonomyWriter(dir);
|
||||||
|
|
||||||
CategoryPath[] topLevelPaths = new CategoryPath[10];
|
CategoryPath[] topLevelPaths = new CategoryPath[10];
|
||||||
String[] topLevelStrings = new String[10];
|
String[] topLevelStrings = new String[10];
|
||||||
|
@ -88,5 +89,7 @@ public class PathPolicyTest extends LuceneTestCase {
|
||||||
+ nonTopLevelPaths[i],
|
+ nonTopLevelPaths[i],
|
||||||
pathPolicy.shouldAdd(nonTopLevelPaths[i]));
|
pathPolicy.shouldAdd(nonTopLevelPaths[i]));
|
||||||
}
|
}
|
||||||
|
taxonomy.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ package org.apache.lucene.facet.index.streaming;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.facet.FacetException;
|
import org.apache.lucene.facet.FacetException;
|
||||||
|
@ -49,8 +49,9 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testStreamDefaultParams() throws IOException {
|
public void testStreamDefaultParams() throws IOException {
|
||||||
|
Directory directory = newDirectory();
|
||||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||||
new RAMDirectory());
|
directory);
|
||||||
CategoryParentsStream stream = new CategoryParentsStream(
|
CategoryParentsStream stream = new CategoryParentsStream(
|
||||||
new CategoryAttributesStream(categoryContainer),
|
new CategoryAttributesStream(categoryContainer),
|
||||||
taxonomyWriter, new DefaultFacetIndexingParams());
|
taxonomyWriter, new DefaultFacetIndexingParams());
|
||||||
|
@ -63,6 +64,7 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
assertEquals("Wrong number of tokens", 6, nTokens);
|
assertEquals("Wrong number of tokens", 6, nTokens);
|
||||||
|
|
||||||
taxonomyWriter.close();
|
taxonomyWriter.close();
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -74,8 +76,9 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testStreamNonTopLevelParams() throws IOException {
|
public void testStreamNonTopLevelParams() throws IOException {
|
||||||
|
Directory directory = newDirectory();
|
||||||
final TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
final TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||||
new RAMDirectory());
|
directory);
|
||||||
FacetIndexingParams indexingParams = new DefaultFacetIndexingParams() {
|
FacetIndexingParams indexingParams = new DefaultFacetIndexingParams() {
|
||||||
@Override
|
@Override
|
||||||
protected OrdinalPolicy fixedOrdinalPolicy() {
|
protected OrdinalPolicy fixedOrdinalPolicy() {
|
||||||
|
@ -102,6 +105,7 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
assertEquals("Wrong number of tokens", 4, nTokens);
|
assertEquals("Wrong number of tokens", 4, nTokens);
|
||||||
|
|
||||||
taxonomyWriter.close();
|
taxonomyWriter.close();
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -113,7 +117,8 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testNoRetainableAttributes() throws IOException, FacetException {
|
public void testNoRetainableAttributes() throws IOException, FacetException {
|
||||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(new RAMDirectory());
|
Directory directory = newDirectory();
|
||||||
|
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(directory);
|
||||||
|
|
||||||
new CategoryParentsStream(new CategoryAttributesStream(categoryContainer),
|
new CategoryParentsStream(new CategoryAttributesStream(categoryContainer),
|
||||||
taxonomyWriter, new DefaultFacetIndexingParams());
|
taxonomyWriter, new DefaultFacetIndexingParams());
|
||||||
|
@ -133,6 +138,8 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
}
|
}
|
||||||
assertEquals("Wrong number of tokens with attributes", 1, nAttributes);
|
assertEquals("Wrong number of tokens with attributes", 1, nAttributes);
|
||||||
|
|
||||||
|
taxonomyWriter.close();
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -144,8 +151,9 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testRetainableAttributes() throws IOException, FacetException {
|
public void testRetainableAttributes() throws IOException, FacetException {
|
||||||
|
Directory directory = newDirectory();
|
||||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||||
new RAMDirectory());
|
directory);
|
||||||
|
|
||||||
FacetIndexingParams indexingParams = new DefaultFacetIndexingParams();
|
FacetIndexingParams indexingParams = new DefaultFacetIndexingParams();
|
||||||
new CategoryParentsStream(new CategoryAttributesStream(
|
new CategoryParentsStream(new CategoryAttributesStream(
|
||||||
|
@ -176,6 +184,7 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
||||||
assertEquals("Wrong number of tokens with attributes", 3, nAttributes);
|
assertEquals("Wrong number of tokens with attributes", 3, nAttributes);
|
||||||
|
|
||||||
taxonomyWriter.close();
|
taxonomyWriter.close();
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private final class MyCategoryListTokenizer extends CategoryListTokenizer {
|
private final class MyCategoryListTokenizer extends CategoryListTokenizer {
|
||||||
|
|
|
@ -7,7 +7,7 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.facet.index.CategoryContainerTestBase;
|
import org.apache.lucene.facet.index.CategoryContainerTestBase;
|
||||||
|
@ -46,8 +46,9 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testTokensDefaultParams() throws IOException {
|
public void testTokensDefaultParams() throws IOException {
|
||||||
|
Directory directory = newDirectory();
|
||||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||||
new RAMDirectory());
|
directory);
|
||||||
DefaultFacetIndexingParams indexingParams = new DefaultFacetIndexingParams();
|
DefaultFacetIndexingParams indexingParams = new DefaultFacetIndexingParams();
|
||||||
CategoryTokenizer tokenizer = new CategoryTokenizer(
|
CategoryTokenizer tokenizer = new CategoryTokenizer(
|
||||||
new CategoryAttributesStream(categoryContainer),
|
new CategoryAttributesStream(categoryContainer),
|
||||||
|
@ -73,6 +74,7 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
||||||
assertEquals("Wrong number of tokens", 3, nTokens);
|
assertEquals("Wrong number of tokens", 3, nTokens);
|
||||||
|
|
||||||
taxonomyWriter.close();
|
taxonomyWriter.close();
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -83,8 +85,9 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testLongCategoryPath() throws IOException {
|
public void testLongCategoryPath() throws IOException {
|
||||||
|
Directory directory = newDirectory();
|
||||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||||
new RAMDirectory());
|
directory);
|
||||||
|
|
||||||
List<CategoryPath> longCategory = new ArrayList<CategoryPath>();
|
List<CategoryPath> longCategory = new ArrayList<CategoryPath>();
|
||||||
longCategory.add(new CategoryPath("one", "two", "three", "four",
|
longCategory.add(new CategoryPath("one", "two", "three", "four",
|
||||||
|
@ -107,5 +110,6 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
||||||
assertFalse("Unexpected token", tokenizer.incrementToken());
|
assertFalse("Unexpected token", tokenizer.incrementToken());
|
||||||
|
|
||||||
taxonomyWriter.close();
|
taxonomyWriter.close();
|
||||||
|
directory.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@ import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.DocumentBuilder.DocumentBuilderException;
|
import org.apache.lucene.DocumentBuilder.DocumentBuilderException;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
|
|
||||||
import org.apache.lucene.facet.FacetTestBase;
|
import org.apache.lucene.facet.FacetTestBase;
|
||||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||||
|
@ -48,7 +48,7 @@ public abstract class BaseTestTopK extends FacetTestBase {
|
||||||
private int nextInt;
|
private int nextInt;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void populateIndex(IndexWriter iw, TaxonomyWriter taxo,
|
protected void populateIndex(RandomIndexWriter iw, TaxonomyWriter taxo,
|
||||||
FacetIndexingParams iParams) throws IOException,
|
FacetIndexingParams iParams) throws IOException,
|
||||||
DocumentBuilderException, CorruptIndexException {
|
DocumentBuilderException, CorruptIndexException {
|
||||||
currDoc = -1;
|
currDoc = -1;
|
||||||
|
|
|
@ -1,22 +1,23 @@
|
||||||
package org.apache.lucene.facet.search;
|
package org.apache.lucene.facet.search;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.Payload;
|
import org.apache.lucene.index.Payload;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -95,20 +96,20 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPayloadIntDecodingIterator() throws Exception {
|
public void testPayloadIntDecodingIterator() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
|
DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
|
||||||
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
||||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
|
new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||||
for (int i = 0; i < data.length; i++) {
|
for (int i = 0; i < data.length; i++) {
|
||||||
dts.setIdx(i);
|
dts.setIdx(i);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
doc.add(new Field("f", dts));
|
doc.add(new Field("f", dts));
|
||||||
writer.addDocument(doc);
|
writer.addDocument(doc);
|
||||||
}
|
}
|
||||||
writer.commit();
|
IndexReader reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
|
||||||
CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
|
CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
|
||||||
"f","1"), dts.encoder.createMatchingDecoder());
|
"f","1"), dts.encoder.createMatchingDecoder());
|
||||||
cli.init();
|
cli.init();
|
||||||
|
@ -127,6 +128,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
assertEquals("Missing categories!",10,totalCategories);
|
assertEquals("Missing categories!",10,totalCategories);
|
||||||
reader.close();
|
reader.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -139,12 +141,21 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testPayloadIteratorWithInvalidDoc() throws Exception {
|
public void testPayloadIteratorWithInvalidDoc() throws Exception {
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
|
DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
|
||||||
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
||||||
DataTokenStream dts2 = new DataTokenStream("2",new SortingIntEncoder(
|
DataTokenStream dts2 = new DataTokenStream("2",new SortingIntEncoder(
|
||||||
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
||||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
// this test requires that no payloads ever be randomly present!
|
||||||
|
final Analyzer noPayloadsAnalyzer = new Analyzer() {
|
||||||
|
@Override
|
||||||
|
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||||
|
return new MockTokenizer(reader, MockTokenizer.KEYWORD, false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// NOTE: test is wired to LogMP... because test relies on certain docids having payloads
|
||||||
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, noPayloadsAnalyzer).setMergePolicy(newLogMergePolicy()));
|
||||||
for (int i = 0; i < data.length; i++) {
|
for (int i = 0; i < data.length; i++) {
|
||||||
dts.setIdx(i);
|
dts.setIdx(i);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
|
@ -170,10 +181,9 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.commit();
|
IndexReader reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
|
||||||
CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
|
CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
|
||||||
"f","1"), dts.encoder.createMatchingDecoder());
|
"f","1"), dts.encoder.createMatchingDecoder());
|
||||||
cli.init();
|
cli.init();
|
||||||
|
@ -202,6 +212,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
||||||
// Ok.. went through the first 4 docs, now lets try the 6th doc (docid 5)
|
// Ok.. went through the first 4 docs, now lets try the 6th doc (docid 5)
|
||||||
assertFalse("Doc #6 (docid=5) should not have a payload!",cli.skipTo(5));
|
assertFalse("Doc #6 (docid=5) should not have a payload!",cli.skipTo(5));
|
||||||
reader.close();
|
reader.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,15 +3,16 @@ package org.apache.lucene.facet.search;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.Field.Index;
|
import org.apache.lucene.document.Field.Index;
|
||||||
import org.apache.lucene.document.Field.Store;
|
import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
@ -19,7 +20,6 @@ import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -58,6 +58,8 @@ public class DrillDownTest extends LuceneTestCase {
|
||||||
private FacetSearchParams nonDefaultParams;
|
private FacetSearchParams nonDefaultParams;
|
||||||
private static IndexReader reader;
|
private static IndexReader reader;
|
||||||
private static LuceneTaxonomyReader taxo;
|
private static LuceneTaxonomyReader taxo;
|
||||||
|
private static Directory dir;
|
||||||
|
private static Directory taxoDir;
|
||||||
|
|
||||||
public DrillDownTest() throws IOException {
|
public DrillDownTest() throws IOException {
|
||||||
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams();
|
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams();
|
||||||
|
@ -71,10 +73,11 @@ public class DrillDownTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void createIndexes() throws CorruptIndexException, LockObtainFailedException, IOException {
|
public static void createIndexes() throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
Directory dir = new RAMDirectory();
|
dir = newDirectory();
|
||||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||||
|
|
||||||
Directory taxoDir = new RAMDirectory();
|
taxoDir = newDirectory();
|
||||||
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
||||||
|
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
|
@ -98,10 +101,9 @@ public class DrillDownTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
taxoWriter.close();
|
taxoWriter.close();
|
||||||
writer.commit();
|
reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
reader = IndexReader.open(dir, true);
|
|
||||||
taxo = new LuceneTaxonomyReader(taxoDir);
|
taxo = new LuceneTaxonomyReader(taxoDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,7 +129,7 @@ public class DrillDownTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQuery() throws IOException {
|
public void testQuery() throws IOException {
|
||||||
IndexSearcher searcher = new IndexSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
// Making sure the query yields 25 documents with the facet "a"
|
// Making sure the query yields 25 documents with the facet "a"
|
||||||
Query q = DrillDown.query(defaultParams, new CategoryPath("a"));
|
Query q = DrillDown.query(defaultParams, new CategoryPath("a"));
|
||||||
|
@ -155,7 +157,7 @@ public class DrillDownTest extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testQueryImplicitDefaultParams() throws IOException {
|
public void testQueryImplicitDefaultParams() throws IOException {
|
||||||
IndexSearcher searcher = new IndexSearcher(reader);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
|
||||||
// Create the base query to start with
|
// Create the base query to start with
|
||||||
Query q = DrillDown.query(defaultParams, new CategoryPath("a"));
|
Query q = DrillDown.query(defaultParams, new CategoryPath("a"));
|
||||||
|
@ -178,11 +180,16 @@ public class DrillDownTest extends LuceneTestCase {
|
||||||
public static void closeIndexes() throws IOException {
|
public static void closeIndexes() throws IOException {
|
||||||
if (reader != null) {
|
if (reader != null) {
|
||||||
reader.close();
|
reader.close();
|
||||||
|
reader = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (taxo != null) {
|
if (taxo != null) {
|
||||||
taxo.close();
|
taxo.close();
|
||||||
|
taxo = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
taxoDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -100,8 +100,6 @@ public class TestFacetsAccumulatorWithComplement extends FacetTestBase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void doTestComplements() throws Exception {
|
private void doTestComplements() throws Exception {
|
||||||
assertTrue("Would like to test this with deletions!",indexReader.hasDeletions());
|
|
||||||
assertTrue("Would like to test this with deletions!",indexReader.numDeletedDocs()>0);
|
|
||||||
Query q = new MatchAllDocsQuery(); //new TermQuery(new Term(TEXT,"white"));
|
Query q = new MatchAllDocsQuery(); //new TermQuery(new Term(TEXT,"white"));
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("Query: "+q);
|
System.out.println("Query: "+q);
|
||||||
|
|
|
@ -4,14 +4,15 @@ import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.DocsEnum;
|
import org.apache.lucene.index.DocsEnum;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.MultiFields;
|
import org.apache.lucene.index.MultiFields;
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
|
@ -22,6 +23,7 @@ import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.search.MultiCollector;
|
import org.apache.lucene.search.MultiCollector;
|
||||||
import org.apache.lucene.facet.FacetTestUtils;
|
import org.apache.lucene.facet.FacetTestUtils;
|
||||||
|
@ -62,8 +64,8 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
public void testDefault() throws Exception {
|
public void testDefault() throws Exception {
|
||||||
Directory[][] dirs = getDirs();
|
Directory[][] dirs = getDirs();
|
||||||
// create and open an index writer
|
// create and open an index writer
|
||||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
// create and open a taxonomy writer
|
// create and open a taxonomy writer
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||||
|
|
||||||
|
@ -74,15 +76,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
|
|
||||||
seedIndex(iw, tw, iParams);
|
seedIndex(iw, tw, iParams);
|
||||||
|
|
||||||
iw.commit();
|
IndexReader ir = iw.getReader();
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
|
||||||
// prepare index reader and taxonomy.
|
// prepare index reader and taxonomy.
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
|
||||||
|
|
||||||
// prepare searcher to search against
|
// prepare searcher to search against
|
||||||
IndexSearcher searcher = new IndexSearcher(ir);
|
IndexSearcher searcher = newSearcher(ir);
|
||||||
|
|
||||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||||
searcher);
|
searcher);
|
||||||
|
@ -98,14 +99,15 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
searcher.close();
|
searcher.close();
|
||||||
iw.close();
|
iw.close();
|
||||||
tw.close();
|
tw.close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCustom() throws Exception {
|
public void testCustom() throws Exception {
|
||||||
Directory[][] dirs = getDirs();
|
Directory[][] dirs = getDirs();
|
||||||
// create and open an index writer
|
// create and open an index writer
|
||||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
// create and open a taxonomy writer
|
// create and open a taxonomy writer
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
||||||
OpenMode.CREATE);
|
OpenMode.CREATE);
|
||||||
|
@ -115,15 +117,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
new CategoryListParams(new Term("$author", "Authors")));
|
new CategoryListParams(new Term("$author", "Authors")));
|
||||||
seedIndex(iw, tw, iParams);
|
seedIndex(iw, tw, iParams);
|
||||||
|
|
||||||
iw.commit();
|
IndexReader ir = iw.getReader();
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
|
||||||
// prepare index reader and taxonomy.
|
// prepare index reader and taxonomy.
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
|
||||||
|
|
||||||
// prepare searcher to search against
|
// prepare searcher to search against
|
||||||
IndexSearcher searcher = new IndexSearcher(ir);
|
IndexSearcher searcher = newSearcher(ir);
|
||||||
|
|
||||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||||
searcher);
|
searcher);
|
||||||
|
@ -139,14 +140,15 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
searcher.close();
|
searcher.close();
|
||||||
iw.close();
|
iw.close();
|
||||||
tw.close();
|
tw.close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTwoCustomsSameField() throws Exception {
|
public void testTwoCustomsSameField() throws Exception {
|
||||||
Directory[][] dirs = getDirs();
|
Directory[][] dirs = getDirs();
|
||||||
// create and open an index writer
|
// create and open an index writer
|
||||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
// create and open a taxonomy writer
|
// create and open a taxonomy writer
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
||||||
OpenMode.CREATE);
|
OpenMode.CREATE);
|
||||||
|
@ -158,15 +160,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
new CategoryListParams(new Term("$music", "Composers")));
|
new CategoryListParams(new Term("$music", "Composers")));
|
||||||
seedIndex(iw, tw, iParams);
|
seedIndex(iw, tw, iParams);
|
||||||
|
|
||||||
iw.commit();
|
IndexReader ir = iw.getReader();
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
|
||||||
// prepare index reader and taxonomy.
|
// prepare index reader and taxonomy.
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
|
||||||
|
|
||||||
// prepare searcher to search against
|
// prepare searcher to search against
|
||||||
IndexSearcher searcher = new IndexSearcher(ir);
|
IndexSearcher searcher = newSearcher(ir);
|
||||||
|
|
||||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||||
searcher);
|
searcher);
|
||||||
|
@ -183,6 +184,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
searcher.close();
|
searcher.close();
|
||||||
iw.close();
|
iw.close();
|
||||||
tw.close();
|
tw.close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertPostingListExists(String field, String text, IndexReader ir) throws IOException {
|
private void assertPostingListExists(String field, String text, IndexReader ir) throws IOException {
|
||||||
|
@ -194,8 +196,8 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
public void testDifferentFieldsAndText() throws Exception {
|
public void testDifferentFieldsAndText() throws Exception {
|
||||||
Directory[][] dirs = getDirs();
|
Directory[][] dirs = getDirs();
|
||||||
// create and open an index writer
|
// create and open an index writer
|
||||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
// create and open a taxonomy writer
|
// create and open a taxonomy writer
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||||
|
|
||||||
|
@ -206,15 +208,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
new CategoryListParams(new Term("$composers", "Composers")));
|
new CategoryListParams(new Term("$composers", "Composers")));
|
||||||
seedIndex(iw, tw, iParams);
|
seedIndex(iw, tw, iParams);
|
||||||
|
|
||||||
iw.commit();
|
IndexReader ir = iw.getReader();
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
|
||||||
// prepare index reader and taxonomy.
|
// prepare index reader and taxonomy.
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
|
||||||
|
|
||||||
// prepare searcher to search against
|
// prepare searcher to search against
|
||||||
IndexSearcher searcher = new IndexSearcher(ir);
|
IndexSearcher searcher = newSearcher(ir);
|
||||||
|
|
||||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||||
searcher);
|
searcher);
|
||||||
|
@ -229,14 +230,15 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
searcher.close();
|
searcher.close();
|
||||||
iw.close();
|
iw.close();
|
||||||
tw.close();
|
tw.close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSomeSameSomeDifferent() throws Exception {
|
public void testSomeSameSomeDifferent() throws Exception {
|
||||||
Directory[][] dirs = getDirs();
|
Directory[][] dirs = getDirs();
|
||||||
// create and open an index writer
|
// create and open an index writer
|
||||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
// create and open a taxonomy writer
|
// create and open a taxonomy writer
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
||||||
OpenMode.CREATE);
|
OpenMode.CREATE);
|
||||||
|
@ -251,15 +253,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
|
|
||||||
seedIndex(iw, tw, iParams);
|
seedIndex(iw, tw, iParams);
|
||||||
|
|
||||||
iw.commit();
|
IndexReader ir = iw.getReader();
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
|
||||||
// prepare index reader and taxonomy.
|
// prepare index reader and taxonomy.
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
|
||||||
|
|
||||||
// prepare searcher to search against
|
// prepare searcher to search against
|
||||||
IndexSearcher searcher = new IndexSearcher(ir);
|
IndexSearcher searcher = newSearcher(ir);
|
||||||
|
|
||||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||||
searcher);
|
searcher);
|
||||||
|
@ -274,6 +275,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
searcher.close();
|
searcher.close();
|
||||||
iw.close();
|
iw.close();
|
||||||
tw.close();
|
tw.close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Directory[][] getDirs() throws IOException {
|
private Directory[][] getDirs() throws IOException {
|
||||||
|
@ -358,7 +360,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
||||||
return facetsCollector;
|
return facetsCollector;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void seedIndex(IndexWriter iw, TaxonomyWriter tw,
|
private void seedIndex(RandomIndexWriter iw, TaxonomyWriter tw,
|
||||||
FacetIndexingParams iParams) throws IOException, CorruptIndexException {
|
FacetIndexingParams iParams) throws IOException, CorruptIndexException {
|
||||||
FacetTestUtils.add(iParams, iw, tw, "Author", "Mark Twain");
|
FacetTestUtils.add(iParams, iw, tw, "Author", "Mark Twain");
|
||||||
FacetTestUtils.add(iParams, iw, tw, "Author", "Stephen King");
|
FacetTestUtils.add(iParams, iw, tw, "Author", "Stephen King");
|
||||||
|
|
|
@ -59,8 +59,6 @@ public class TestScoredDocIdCollector extends FacetTestBase {
|
||||||
@Test
|
@Test
|
||||||
public void testConstantScore() throws Exception {
|
public void testConstantScore() throws Exception {
|
||||||
// test that constant score works well
|
// test that constant score works well
|
||||||
assertTrue("Would like to test this with deletions!",indexReader.hasDeletions());
|
|
||||||
assertTrue("Would like to test this with deletions!",indexReader.numDeletedDocs()>0);
|
|
||||||
|
|
||||||
Query q = new TermQuery(new Term(CONTENT_FIELD, "white"));
|
Query q = new TermQuery(new Term(CONTENT_FIELD, "white"));
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
|
|
|
@ -4,22 +4,21 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.Field.Index;
|
import org.apache.lucene.document.Field.Index;
|
||||||
import org.apache.lucene.document.Field.Store;
|
import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.document.Field.TermVector;
|
import org.apache.lucene.document.Field.TermVector;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -70,8 +69,8 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
||||||
Integer.MAX_VALUE };
|
Integer.MAX_VALUE };
|
||||||
|
|
||||||
for (int partitionSize : partitionSizes) {
|
for (int partitionSize : partitionSizes) {
|
||||||
Directory iDir = new RAMDirectory();
|
Directory iDir = newDirectory();
|
||||||
Directory tDir = new RAMDirectory();
|
Directory tDir = newDirectory();
|
||||||
|
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("Partition Size: " + partitionSize);
|
System.out.println("Partition Size: " + partitionSize);
|
||||||
|
@ -85,9 +84,9 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
IndexWriter iw = new IndexWriter(iDir,
|
RandomIndexWriter iw = new RandomIndexWriter(random, iDir,
|
||||||
new IndexWriterConfig(TEST_VERSION_CURRENT,
|
newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
|
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(tDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(tDir);
|
||||||
prvt_add(iParams, iw, tw, "a", "b");
|
prvt_add(iParams, iw, tw, "a", "b");
|
||||||
prvt_add(iParams, iw, tw, "a", "b", "1");
|
prvt_add(iParams, iw, tw, "a", "b", "1");
|
||||||
|
@ -106,12 +105,12 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
||||||
prvt_add(iParams, iw, tw, "a", "d");
|
prvt_add(iParams, iw, tw, "a", "d");
|
||||||
prvt_add(iParams, iw, tw, "a", "e");
|
prvt_add(iParams, iw, tw, "a", "e");
|
||||||
|
|
||||||
iw.commit();
|
IndexReader ir = iw.getReader();
|
||||||
iw.close();
|
iw.close();
|
||||||
tw.commit();
|
tw.commit();
|
||||||
tw.close();
|
tw.close();
|
||||||
|
|
||||||
IndexSearcher is = new IndexSearcher(iDir);
|
IndexSearcher is = newSearcher(ir);
|
||||||
LuceneTaxonomyReader tr = new LuceneTaxonomyReader(tDir);
|
LuceneTaxonomyReader tr = new LuceneTaxonomyReader(tDir);
|
||||||
|
|
||||||
// Get all of the documents and run the query, then do different
|
// Get all of the documents and run the query, then do different
|
||||||
|
@ -320,11 +319,15 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
||||||
assertFalse("Shouldn't have found anything for a FacetRequest " +
|
assertFalse("Shouldn't have found anything for a FacetRequest " +
|
||||||
"of a facet that doesn't exist in the index.", hasDoctor);
|
"of a facet that doesn't exist in the index.", hasDoctor);
|
||||||
assertEquals("Shouldn't have found more than seven request.", 7, facetResults.size());
|
assertEquals("Shouldn't have found more than seven request.", 7, facetResults.size());
|
||||||
|
ir.close();
|
||||||
|
tr.close();
|
||||||
|
iDir.close();
|
||||||
|
tDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void prvt_add(DefaultFacetIndexingParams iParams, IndexWriter iw,
|
private void prvt_add(DefaultFacetIndexingParams iParams, RandomIndexWriter iw,
|
||||||
TaxonomyWriter tw, String... strings) throws IOException,
|
TaxonomyWriter tw, String... strings) throws IOException,
|
||||||
CorruptIndexException {
|
CorruptIndexException {
|
||||||
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
|
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
|
||||||
|
|
|
@ -140,6 +140,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
|
||||||
assertEquals(6.0, parentRes.getValue(), Double.MIN_VALUE);
|
assertEquals(6.0, parentRes.getValue(), Double.MIN_VALUE);
|
||||||
frn = resultNodesAsArray(parentRes);
|
frn = resultNodesAsArray(parentRes);
|
||||||
assertEquals(1.0, frn[0].getValue(), Double.MIN_VALUE);
|
assertEquals(1.0, frn[0].getValue(), Double.MIN_VALUE);
|
||||||
|
closeAll();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,6 +235,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
|
||||||
assertEquals("Shouldn't have found anything for a FacetRequest "
|
assertEquals("Shouldn't have found anything for a FacetRequest "
|
||||||
+ "of a facet that doesn't exist in the index.", 0, facetResults.size());
|
+ "of a facet that doesn't exist in the index.", 0, facetResults.size());
|
||||||
|
|
||||||
|
closeAll();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,6 +5,7 @@ import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -108,6 +109,7 @@ public class TestTotalFacetCounts extends LuceneTestCase {
|
||||||
++partition;
|
++partition;
|
||||||
}
|
}
|
||||||
readers[0].close();
|
readers[0].close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
tmpFile.delete();
|
tmpFile.delete();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,13 +5,15 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -34,6 +36,7 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.SlowRAMDirectory;
|
import org.apache.lucene.util.SlowRAMDirectory;
|
||||||
import org.apache.lucene.util._TestUtil;
|
import org.apache.lucene.util._TestUtil;
|
||||||
|
|
||||||
|
@ -106,13 +109,23 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
initCache();
|
initCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** runs a few instances of {@link MultiCLSearcher} in parallel */
|
||||||
|
public void testGeneralSynchronization() throws Exception {
|
||||||
|
int numIters = atLeast(2);
|
||||||
|
for (int i = 0; i < numIters; i++) {
|
||||||
|
doTestGeneralSynchronization(_TestUtil.nextInt(random, 2, 4),
|
||||||
|
random.nextBoolean() ? -1 : _TestUtil.nextInt(random, 1, 10),
|
||||||
|
_TestUtil.nextInt(random, 0, 3));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run many instances of {@link MultiCLSearcher} in parallel, results should
|
* Run many instances of {@link MultiCLSearcher} in parallel, results should
|
||||||
* be sane. Each instance has a random delay for reading bytes, to ensure
|
* be sane. Each instance has a random delay for reading bytes, to ensure
|
||||||
* that threads finish in different order than started.
|
* that threads finish in different order than started.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test @Nightly
|
||||||
public void testGeneralSynchronization() throws Exception {
|
public void testGeneralSynchronizationBig() throws Exception {
|
||||||
int[] numThreads = new int[] { 2, 3, 5, 8 };
|
int[] numThreads = new int[] { 2, 3, 5, 8 };
|
||||||
int[] sleepMillis = new int[] { -1, 1, 20, 33 };
|
int[] sleepMillis = new int[] { -1, 1, 20, 33 };
|
||||||
int[] cacheSize = new int[] { 0,1,2,3,5 };
|
int[] cacheSize = new int[] { 0,1,2,3,5 };
|
||||||
|
@ -130,17 +143,20 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
InterruptedException {
|
InterruptedException {
|
||||||
TFC.setCacheSize(cacheSize);
|
TFC.setCacheSize(cacheSize);
|
||||||
SlowRAMDirectory slowIndexDir = new SlowRAMDirectory(-1, random);
|
SlowRAMDirectory slowIndexDir = new SlowRAMDirectory(-1, random);
|
||||||
|
MockDirectoryWrapper indexDir = new MockDirectoryWrapper(random, slowIndexDir);
|
||||||
SlowRAMDirectory slowTaxoDir = new SlowRAMDirectory(-1, random);
|
SlowRAMDirectory slowTaxoDir = new SlowRAMDirectory(-1, random);
|
||||||
|
MockDirectoryWrapper taxoDir = new MockDirectoryWrapper(random, slowTaxoDir);
|
||||||
|
|
||||||
|
|
||||||
// Index documents without the "slowness"
|
// Index documents without the "slowness"
|
||||||
MultiCLIndexer.index(slowIndexDir, slowTaxoDir);
|
MultiCLIndexer.index(indexDir, taxoDir);
|
||||||
|
|
||||||
slowIndexDir.setSleepMillis(sleepMillis);
|
slowIndexDir.setSleepMillis(sleepMillis);
|
||||||
slowTaxoDir.setSleepMillis(sleepMillis);
|
slowTaxoDir.setSleepMillis(sleepMillis);
|
||||||
|
|
||||||
// Open the slow readers
|
// Open the slow readers
|
||||||
IndexReader slowIndexReader = IndexReader.open(slowIndexDir);
|
IndexReader slowIndexReader = IndexReader.open(indexDir);
|
||||||
TaxonomyReader slowTaxoReader = new LuceneTaxonomyReader(slowTaxoDir);
|
TaxonomyReader slowTaxoReader = new LuceneTaxonomyReader(taxoDir);
|
||||||
|
|
||||||
// Class to perform search and return results as threads
|
// Class to perform search and return results as threads
|
||||||
class Multi extends Thread {
|
class Multi extends Thread {
|
||||||
|
@ -221,6 +237,8 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
// we're done, close the index reader and the taxonomy.
|
// we're done, close the index reader and the taxonomy.
|
||||||
slowIndexReader.close();
|
slowIndexReader.close();
|
||||||
slowTaxoReader.close();
|
slowTaxoReader.close();
|
||||||
|
indexDir.close();
|
||||||
|
taxoDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -321,6 +339,7 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
readers[0].close();
|
readers[0].close();
|
||||||
r2.close();
|
r2.close();
|
||||||
outputFile.delete();
|
outputFile.delete();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
private int assertReadFromDisc(TotalFacetCounts totalCounts, int prevGen, String errMsg) {
|
private int assertReadFromDisc(TotalFacetCounts totalCounts, int prevGen, String errMsg) {
|
||||||
|
@ -384,6 +403,9 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
readers[0].indexReader, readers[0].taxReader, iParams, null);
|
readers[0].indexReader, readers[0].taxReader, iParams, null);
|
||||||
assertReadFromDisc(totalCounts, 0, "after reading from disk.");
|
assertReadFromDisc(totalCounts, 0, "after reading from disk.");
|
||||||
outputFile.delete();
|
outputFile.delete();
|
||||||
|
writers[0].close();
|
||||||
|
readers[0].close();
|
||||||
|
IOUtils.closeSafely(false, dirs[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -397,7 +419,7 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
|
|
||||||
// Write index using 'normal' directories
|
// Write index using 'normal' directories
|
||||||
IndexWriter w = new IndexWriter(indexDir, new IndexWriterConfig(
|
IndexWriter w = new IndexWriter(indexDir, new IndexWriterConfig(
|
||||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||||
LuceneTaxonomyWriter tw = new LuceneTaxonomyWriter(taxoDir);
|
LuceneTaxonomyWriter tw = new LuceneTaxonomyWriter(taxoDir);
|
||||||
DefaultFacetIndexingParams iParams = new DefaultFacetIndexingParams();
|
DefaultFacetIndexingParams iParams = new DefaultFacetIndexingParams();
|
||||||
// Add documents and facets
|
// Add documents and facets
|
||||||
|
@ -508,8 +530,13 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
||||||
assertTrue("with cache of size 2 res no. 1 should come from cache",
|
assertTrue("with cache of size 2 res no. 1 should come from cache",
|
||||||
totalCounts1 == TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams, null));
|
totalCounts1 == TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams, null));
|
||||||
|
|
||||||
|
writers[0].close();
|
||||||
|
writers[1].close();
|
||||||
readers[0].close();
|
readers[0].close();
|
||||||
readers[1].close();
|
readers[1].close();
|
||||||
|
for (Directory[] dirset : dirs) {
|
||||||
|
IOUtils.closeSafely(false, dirset);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,19 +3,18 @@ package org.apache.lucene.facet.search.association;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.search.IndexSearcher;
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.facet.enhancements.EnhancementsDocumentBuilder;
|
import org.apache.lucene.facet.enhancements.EnhancementsDocumentBuilder;
|
||||||
import org.apache.lucene.facet.enhancements.association.AssociationEnhancement;
|
import org.apache.lucene.facet.enhancements.association.AssociationEnhancement;
|
||||||
|
@ -53,8 +52,9 @@ import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
||||||
/** Test for associations */
|
/** Test for associations */
|
||||||
public class AssociationsFacetRequestTest extends LuceneTestCase {
|
public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
|
|
||||||
private static Directory dir = new RAMDirectory();
|
private static Directory dir;
|
||||||
private static Directory taxoDir = new RAMDirectory();
|
private static IndexReader reader;
|
||||||
|
private static Directory taxoDir;
|
||||||
|
|
||||||
private static final CategoryPath aint = new CategoryPath("int", "a");
|
private static final CategoryPath aint = new CategoryPath("int", "a");
|
||||||
private static final CategoryPath bint = new CategoryPath("int", "b");
|
private static final CategoryPath bint = new CategoryPath("int", "b");
|
||||||
|
@ -63,8 +63,11 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void beforeClassAssociationsFacetRequestTest() throws Exception {
|
public static void beforeClassAssociationsFacetRequestTest() throws Exception {
|
||||||
|
dir = newDirectory();
|
||||||
|
taxoDir = newDirectory();
|
||||||
// preparations - index, taxonomy, content
|
// preparations - index, taxonomy, content
|
||||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||||
|
new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||||
|
|
||||||
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
||||||
|
|
||||||
|
@ -87,18 +90,22 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
taxoWriter.close();
|
taxoWriter.close();
|
||||||
|
reader = writer.getReader();
|
||||||
writer.close();
|
writer.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void afterClassAssociationsFacetRequestTest() throws Exception {
|
public static void afterClassAssociationsFacetRequestTest() throws Exception {
|
||||||
|
reader.close();
|
||||||
|
reader = null;
|
||||||
dir.close();
|
dir.close();
|
||||||
|
dir = null;
|
||||||
taxoDir.close();
|
taxoDir.close();
|
||||||
|
taxoDir = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testIntSumAssociation() throws Exception {
|
public void testIntSumAssociation() throws Exception {
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
|
||||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||||
|
|
||||||
// facet requests for two facets
|
// facet requests for two facets
|
||||||
|
@ -110,7 +117,8 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
|
|
||||||
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
||||||
|
|
||||||
new IndexSearcher(reader).search(q, fc);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
searcher.search(q, fc);
|
||||||
List<FacetResult> res = fc.getFacetResults();
|
List<FacetResult> res = fc.getFacetResults();
|
||||||
|
|
||||||
assertNotNull("No results!",res);
|
assertNotNull("No results!",res);
|
||||||
|
@ -118,14 +126,12 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
assertEquals("Wrong count for category 'a'!",200, (int) res.get(0).getFacetResultNode().getValue());
|
assertEquals("Wrong count for category 'a'!",200, (int) res.get(0).getFacetResultNode().getValue());
|
||||||
assertEquals("Wrong count for category 'b'!",150, (int) res.get(1).getFacetResultNode().getValue());
|
assertEquals("Wrong count for category 'b'!",150, (int) res.get(1).getFacetResultNode().getValue());
|
||||||
|
|
||||||
|
searcher.close();
|
||||||
taxo.close();
|
taxo.close();
|
||||||
reader.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFloatSumAssociation() throws Exception {
|
public void testFloatSumAssociation() throws Exception {
|
||||||
|
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
|
||||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||||
|
|
||||||
// facet requests for two facets
|
// facet requests for two facets
|
||||||
|
@ -137,7 +143,8 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
|
|
||||||
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
||||||
|
|
||||||
new IndexSearcher(reader).search(q, fc);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
searcher.search(q, fc);
|
||||||
List<FacetResult> res = fc.getFacetResults();
|
List<FacetResult> res = fc.getFacetResults();
|
||||||
|
|
||||||
assertNotNull("No results!",res);
|
assertNotNull("No results!",res);
|
||||||
|
@ -145,8 +152,8 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
assertEquals("Wrong count for category 'a'!",50f, (float) res.get(0).getFacetResultNode().getValue(), 0.00001);
|
assertEquals("Wrong count for category 'a'!",50f, (float) res.get(0).getFacetResultNode().getValue(), 0.00001);
|
||||||
assertEquals("Wrong count for category 'b'!",10f, (float) res.get(1).getFacetResultNode().getValue(), 0.00001);
|
assertEquals("Wrong count for category 'b'!",10f, (float) res.get(1).getFacetResultNode().getValue(), 0.00001);
|
||||||
|
|
||||||
|
searcher.close();
|
||||||
taxo.close();
|
taxo.close();
|
||||||
reader.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -154,7 +161,6 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
// Same category list cannot be aggregated by two different aggregators. If
|
// Same category list cannot be aggregated by two different aggregators. If
|
||||||
// you want to do that, you need to separate the categories into two
|
// you want to do that, you need to separate the categories into two
|
||||||
// category list (you'll still have one association list).
|
// category list (you'll still have one association list).
|
||||||
IndexReader reader = IndexReader.open(dir, true);
|
|
||||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||||
|
|
||||||
// facet requests for two facets
|
// facet requests for two facets
|
||||||
|
@ -168,13 +174,16 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||||
|
|
||||||
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
||||||
|
|
||||||
new IndexSearcher(reader).search(q, fc);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
searcher.search(q, fc);
|
||||||
try {
|
try {
|
||||||
fc.getFacetResults();
|
fc.getFacetResults();
|
||||||
fail("different aggregators for same category list should not be supported");
|
fail("different aggregators for same category list should not be supported");
|
||||||
} catch (RuntimeException e) {
|
} catch (RuntimeException e) {
|
||||||
// ok - expected
|
// ok - expected
|
||||||
}
|
}
|
||||||
|
searcher.close();
|
||||||
|
taxo.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@ package org.apache.lucene.facet.search.params;
|
||||||
|
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.IndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -53,8 +53,8 @@ public class FacetRequestTest extends LuceneTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testGetFacetResultHandlerDifferentTaxonomy() throws Exception {
|
public void testGetFacetResultHandlerDifferentTaxonomy() throws Exception {
|
||||||
FacetRequest fr = new CountFacetRequest(new CategoryPath("a"), 10);
|
FacetRequest fr = new CountFacetRequest(new CategoryPath("a"), 10);
|
||||||
RAMDirectory dir1 = new RAMDirectory();
|
Directory dir1 = newDirectory();
|
||||||
RAMDirectory dir2 = new RAMDirectory();
|
Directory dir2 = newDirectory();
|
||||||
// create empty indexes, so that LTR ctor won't complain about a missing index.
|
// create empty indexes, so that LTR ctor won't complain about a missing index.
|
||||||
new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
||||||
new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
||||||
|
@ -63,6 +63,10 @@ public class FacetRequestTest extends LuceneTestCase {
|
||||||
FacetResultsHandler frh1 = fr.createFacetResultsHandler(tr1);
|
FacetResultsHandler frh1 = fr.createFacetResultsHandler(tr1);
|
||||||
FacetResultsHandler frh2 = fr.createFacetResultsHandler(tr2);
|
FacetResultsHandler frh2 = fr.createFacetResultsHandler(tr2);
|
||||||
assertTrue("should not return the same FacetResultHandler instance for different TaxonomyReader instances", frh1 != frh2);
|
assertTrue("should not return the same FacetResultHandler instance for different TaxonomyReader instances", frh1 != frh2);
|
||||||
|
tr1.close();
|
||||||
|
tr2.close();
|
||||||
|
dir1.close();
|
||||||
|
dir2.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -70,13 +74,15 @@ public class FacetRequestTest extends LuceneTestCase {
|
||||||
// Tests that after a FRH is created by FR, changes to FR are not reflected
|
// Tests that after a FRH is created by FR, changes to FR are not reflected
|
||||||
// in the FRH.
|
// in the FRH.
|
||||||
FacetRequest fr = new CountFacetRequest(new CategoryPath("a"), 10);
|
FacetRequest fr = new CountFacetRequest(new CategoryPath("a"), 10);
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
// create empty indexes, so that LTR ctor won't complain about a missing index.
|
// create empty indexes, so that LTR ctor won't complain about a missing index.
|
||||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
||||||
FacetResultsHandler frh = fr.createFacetResultsHandler(tr);
|
FacetResultsHandler frh = fr.createFacetResultsHandler(tr);
|
||||||
fr.setDepth(10);
|
fr.setDepth(10);
|
||||||
assertEquals(FacetRequest.DEFAULT_DEPTH, frh.getFacetRequest().getDepth());
|
assertEquals(FacetRequest.DEFAULT_DEPTH, frh.getFacetRequest().getDepth());
|
||||||
|
tr.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
package org.apache.lucene.facet.search.params;
|
package org.apache.lucene.facet.search.params;
|
||||||
|
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -36,11 +36,13 @@ public class FacetSearchParamsTest extends LuceneTestCase {
|
||||||
FacetSearchParams fsp = new FacetSearchParams();
|
FacetSearchParams fsp = new FacetSearchParams();
|
||||||
assertEquals("unexpected default facet indexing params class", DefaultFacetIndexingParams.class.getName(), fsp.getFacetIndexingParams().getClass().getName());
|
assertEquals("unexpected default facet indexing params class", DefaultFacetIndexingParams.class.getName(), fsp.getFacetIndexingParams().getClass().getName());
|
||||||
assertEquals("no facet requests should be added by default", 0, fsp.getFacetRequests().size());
|
assertEquals("no facet requests should be added by default", 0, fsp.getFacetRequests().size());
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
new LuceneTaxonomyWriter(dir).close();
|
new LuceneTaxonomyWriter(dir).close();
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
||||||
assertEquals("unexpected partition offset for 0 categories", 1, PartitionsUtils.partitionOffset(fsp, 1, tr));
|
assertEquals("unexpected partition offset for 0 categories", 1, PartitionsUtils.partitionOffset(fsp, 1, tr));
|
||||||
assertEquals("unexpected partition size for 0 categories", 1, PartitionsUtils.partitionSize(fsp,tr));
|
assertEquals("unexpected partition size for 0 categories", 1, PartitionsUtils.partitionSize(fsp,tr));
|
||||||
|
tr.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -53,7 +55,7 @@ public class FacetSearchParamsTest extends LuceneTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testPartitionSizeWithCategories() throws Exception {
|
public void testPartitionSizeWithCategories() throws Exception {
|
||||||
FacetSearchParams fsp = new FacetSearchParams();
|
FacetSearchParams fsp = new FacetSearchParams();
|
||||||
RAMDirectory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(dir);
|
||||||
tw.addCategory(new CategoryPath("a"));
|
tw.addCategory(new CategoryPath("a"));
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
@ -61,6 +63,8 @@ public class FacetSearchParamsTest extends LuceneTestCase {
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
||||||
assertEquals("unexpected partition offset for 1 categories", 2, PartitionsUtils.partitionOffset(fsp, 1, tr));
|
assertEquals("unexpected partition offset for 1 categories", 2, PartitionsUtils.partitionOffset(fsp, 1, tr));
|
||||||
assertEquals("unexpected partition size for 1 categories", 2, PartitionsUtils.partitionSize(fsp,tr));
|
assertEquals("unexpected partition size for 1 categories", 2, PartitionsUtils.partitionSize(fsp,tr));
|
||||||
|
tr.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -4,14 +4,13 @@ import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -90,8 +89,8 @@ public class MultiIteratorsPerCLParamsTest extends LuceneTestCase {
|
||||||
// FacetRequest's dimension
|
// FacetRequest's dimension
|
||||||
CategoryListParams clp = new CategoryListParams();
|
CategoryListParams clp = new CategoryListParams();
|
||||||
FacetIndexingParams iParams = new DefaultFacetIndexingParams(clp);
|
FacetIndexingParams iParams = new DefaultFacetIndexingParams(clp);
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
Directory taxoDir = new RAMDirectory();
|
Directory taxoDir = newDirectory();
|
||||||
populateIndex(iParams, indexDir, taxoDir);
|
populateIndex(iParams, indexDir, taxoDir);
|
||||||
|
|
||||||
TaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
TaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||||
|
@ -122,6 +121,10 @@ public class MultiIteratorsPerCLParamsTest extends LuceneTestCase {
|
||||||
countForbiddenDimension = null;
|
countForbiddenDimension = null;
|
||||||
validateFacetedSearch(iParams, taxo, reader, clCache, allDocs, new String[] {
|
validateFacetedSearch(iParams, taxo, reader, clCache, allDocs, new String[] {
|
||||||
"author", "date" }, new int[] { 5, 5 }, new int[] { 5, 2 });
|
"author", "date" }, new int[] { 5, 5 }, new int[] { 5, 2 });
|
||||||
|
taxo.close();
|
||||||
|
reader.close();
|
||||||
|
indexDir.close();
|
||||||
|
taxoDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void validateFacetedSearch(FacetIndexingParams iParams,
|
private void validateFacetedSearch(FacetIndexingParams iParams,
|
||||||
|
@ -163,7 +166,8 @@ public class MultiIteratorsPerCLParamsTest extends LuceneTestCase {
|
||||||
|
|
||||||
private void populateIndex(FacetIndexingParams iParams, Directory indexDir,
|
private void populateIndex(FacetIndexingParams iParams, Directory indexDir,
|
||||||
Directory taxoDir) throws Exception {
|
Directory taxoDir) throws Exception {
|
||||||
IndexWriter writer = new IndexWriter(indexDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
RandomIndexWriter writer = new RandomIndexWriter(random, indexDir,
|
||||||
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||||
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
||||||
|
|
||||||
for (CategoryPath[] categories : perDocCategories) {
|
for (CategoryPath[] categories : perDocCategories) {
|
||||||
|
|
|
@ -101,6 +101,7 @@ public abstract class BaseSampleTestTopK extends BaseTestTopK {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
closeAll();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -158,13 +158,14 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriter() throws Exception {
|
public void testWriter() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
// Also check TaxonomyWriter.getSize() - see that the taxonomy's size
|
// Also check TaxonomyWriter.getSize() - see that the taxonomy's size
|
||||||
// is what we expect it to be.
|
// is what we expect it to be.
|
||||||
assertEquals(expectedCategories.length, tw.getSize());
|
assertEquals(expectedCategories.length, tw.getSize());
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** testWriterTwice is exactly like testWriter, except that after adding
|
/** testWriterTwice is exactly like testWriter, except that after adding
|
||||||
|
@ -173,7 +174,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterTwice() throws Exception {
|
public void testWriterTwice() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
// run fillTaxonomy again - this will try to add the same categories
|
// run fillTaxonomy again - this will try to add the same categories
|
||||||
|
@ -184,6 +185,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
// extraneous categories were created:
|
// extraneous categories were created:
|
||||||
assertEquals(expectedCategories.length, tw.getSize());
|
assertEquals(expectedCategories.length, tw.getSize());
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** testWriterTwice2 is similar to testWriterTwice, except that the index
|
/** testWriterTwice2 is similar to testWriterTwice, except that the index
|
||||||
|
@ -194,7 +196,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterTwice2() throws Exception {
|
public void testWriterTwice2() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -206,6 +208,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
assertEquals(expectedCategories.length, tw.getSize());
|
assertEquals(expectedCategories.length, tw.getSize());
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -217,7 +220,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterTwice3() throws Exception {
|
public void testWriterTwice3() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
// First, create and fill the taxonomy
|
// First, create and fill the taxonomy
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
|
@ -239,6 +242,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
tw.commit();
|
tw.commit();
|
||||||
assertEquals(expectedCategories.length+1, tw.getSize());
|
assertEquals(expectedCategories.length+1, tw.getSize());
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Another set of tests for the writer, which don't use an array and
|
/** Another set of tests for the writer, which don't use an array and
|
||||||
|
@ -248,7 +252,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterSimpler() throws Exception {
|
public void testWriterSimpler() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
assertEquals(1, tw.getSize()); // the root only
|
assertEquals(1, tw.getSize()); // the root only
|
||||||
// Test that adding a new top-level category works
|
// Test that adding a new top-level category works
|
||||||
|
@ -283,6 +287,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(9, tw.getSize());
|
assertEquals(9, tw.getSize());
|
||||||
|
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Test writing an empty index, and seeing that a reader finds in it
|
/** Test writing an empty index, and seeing that a reader finds in it
|
||||||
|
@ -291,7 +296,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testRootOnly() throws Exception {
|
public void testRootOnly() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
// right after opening the index, it should already contain the
|
// right after opening the index, it should already contain the
|
||||||
// root, so have size 1:
|
// root, so have size 1:
|
||||||
|
@ -303,6 +308,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParent(0));
|
assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParent(0));
|
||||||
assertEquals(0, tr.getOrdinal(new CategoryPath()));
|
assertEquals(0, tr.getOrdinal(new CategoryPath()));
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** The following test is exactly the same as testRootOnly, except we
|
/** The following test is exactly the same as testRootOnly, except we
|
||||||
|
@ -312,7 +318,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testRootOnly2() throws Exception {
|
public void testRootOnly2() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
tw.commit();
|
tw.commit();
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
||||||
|
@ -322,6 +328,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(0, tr.getOrdinal(new CategoryPath()));
|
assertEquals(0, tr.getOrdinal(new CategoryPath()));
|
||||||
tw.close();
|
tw.close();
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Basic tests for TaxonomyReader's category <=> ordinal transformations
|
/** Basic tests for TaxonomyReader's category <=> ordinal transformations
|
||||||
|
@ -331,7 +338,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testReaderBasic() throws Exception {
|
public void testReaderBasic() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -373,6 +380,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(new CategoryPath("Author", "Jules Verne")));
|
assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(new CategoryPath("Author", "Jules Verne")));
|
||||||
|
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Tests for TaxonomyReader's getParent() method.
|
/** Tests for TaxonomyReader's getParent() method.
|
||||||
|
@ -389,7 +397,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testReaderParent() throws Exception {
|
public void testReaderParent() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -436,6 +444,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -453,7 +462,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterParent1() throws Exception {
|
public void testWriterParent1() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -464,11 +473,12 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
|
|
||||||
tw.close();
|
tw.close();
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWriterParent2() throws Exception {
|
public void testWriterParent2() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
@ -478,6 +488,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
|
|
||||||
tw.close();
|
tw.close();
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkWriterParent(TaxonomyReader tr, TaxonomyWriter tw) throws Exception {
|
private void checkWriterParent(TaxonomyReader tr, TaxonomyWriter tw) throws Exception {
|
||||||
|
@ -530,7 +541,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testReaderParentArray() throws Exception {
|
public void testReaderParentArray() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -541,6 +552,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(tr.getParent(i), parents[i]);
|
assertEquals(tr.getParent(i), parents[i]);
|
||||||
}
|
}
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -550,7 +562,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testChildrenArrays() throws Exception {
|
public void testChildrenArrays() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -601,6 +613,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -613,7 +626,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testChildrenArraysInvariants() throws Exception {
|
public void testChildrenArraysInvariants() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
@ -685,6 +698,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -692,7 +706,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testChildrenArraysGrowth() throws Exception {
|
public void testChildrenArraysGrowth() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
tw.addCategory(new CategoryPath("hi", "there"));
|
tw.addCategory(new CategoryPath("hi", "there"));
|
||||||
tw.commit();
|
tw.commit();
|
||||||
|
@ -722,6 +736,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertTrue(Arrays.equals(new int[] { -1, -1, -1, 2, 1 }, ca.getOlderSiblingArray()));
|
assertTrue(Arrays.equals(new int[] { -1, -1, -1, 2, 1 }, ca.getOlderSiblingArray()));
|
||||||
tw.close();
|
tw.close();
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -731,7 +746,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
@Ignore
|
@Ignore
|
||||||
public void testTaxonomyReaderRefreshRaces() throws Exception {
|
public void testTaxonomyReaderRefreshRaces() throws Exception {
|
||||||
// compute base child arrays - after first chunk, and after the other
|
// compute base child arrays - after first chunk, and after the other
|
||||||
Directory indexDirBase = new RAMDirectory();
|
Directory indexDirBase = newDirectory();
|
||||||
TaxonomyWriter twBase = new LuceneTaxonomyWriter(indexDirBase);
|
TaxonomyWriter twBase = new LuceneTaxonomyWriter(indexDirBase);
|
||||||
twBase.addCategory(new CategoryPath("a", "0"));
|
twBase.addCategory(new CategoryPath("a", "0"));
|
||||||
final CategoryPath abPath = new CategoryPath("a", "b");
|
final CategoryPath abPath = new CategoryPath("a", "b");
|
||||||
|
@ -757,6 +772,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
for (int retry=0; retry<100; retry++) {
|
for (int retry=0; retry<100; retry++) {
|
||||||
assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry);
|
assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry);
|
||||||
}
|
}
|
||||||
|
indexDirBase.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertConsistentYoungestChild(final CategoryPath abPath,
|
private void assertConsistentYoungestChild(final CategoryPath abPath,
|
||||||
|
@ -848,7 +864,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testSeparateReaderAndWriter() throws Exception {
|
public void testSeparateReaderAndWriter() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
tw.commit();
|
tw.commit();
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
||||||
|
@ -910,11 +926,12 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(3, tr.getSize());
|
assertEquals(3, tr.getSize());
|
||||||
tw.close();
|
tw.close();
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSeparateReaderAndWriter2() throws Exception {
|
public void testSeparateReaderAndWriter2() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
tw.commit();
|
tw.commit();
|
||||||
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
||||||
|
@ -940,6 +957,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
assertEquals(2, tr.getSize()); // still root only...
|
assertEquals(2, tr.getSize()); // still root only...
|
||||||
tw.close();
|
tw.close();
|
||||||
tr.close();
|
tr.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -948,6 +966,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterLock() throws Exception {
|
public void testWriterLock() throws Exception {
|
||||||
|
// native fslock impl gets angry if we use it, so use RAMDirectory explicitly.
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = new RAMDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
tw.addCategory(new CategoryPath("hi", "there"));
|
tw.addCategory(new CategoryPath("hi", "there"));
|
||||||
|
@ -975,6 +994,8 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
tr.refresh();
|
tr.refresh();
|
||||||
assertEquals(3, tr.getOrdinal(new CategoryPath("hey")));
|
assertEquals(3, tr.getOrdinal(new CategoryPath("hey")));
|
||||||
tr.close();
|
tr.close();
|
||||||
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1032,13 +1053,14 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterCheckPaths() throws Exception {
|
public void testWriterCheckPaths() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomyCheckPaths(tw);
|
fillTaxonomyCheckPaths(tw);
|
||||||
// Also check TaxonomyWriter.getSize() - see that the taxonomy's size
|
// Also check TaxonomyWriter.getSize() - see that the taxonomy's size
|
||||||
// is what we expect it to be.
|
// is what we expect it to be.
|
||||||
assertEquals(expectedCategories.length, tw.getSize());
|
assertEquals(expectedCategories.length, tw.getSize());
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1050,7 +1072,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testWriterCheckPaths2() throws Exception {
|
public void testWriterCheckPaths2() throws Exception {
|
||||||
Directory indexDir = new RAMDirectory();
|
Directory indexDir = newDirectory();
|
||||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
checkPaths(tw);
|
checkPaths(tw);
|
||||||
|
@ -1063,6 +1085,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
||||||
fillTaxonomy(tw);
|
fillTaxonomy(tw);
|
||||||
checkPaths(tw);
|
checkPaths(tw);
|
||||||
tw.close();
|
tw.close();
|
||||||
|
indexDir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO (Facet): test multiple readers, one writer. Have the multiple readers
|
// TODO (Facet): test multiple readers, one writer. Have the multiple readers
|
||||||
|
|
|
@ -3,10 +3,11 @@ package org.apache.lucene.facet.taxonomy.lucene;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.util._TestUtil;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
||||||
|
@ -36,16 +37,16 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test1() throws Exception {
|
public void test1() throws Exception {
|
||||||
Directory dir1 = new RAMDirectory();
|
Directory dir1 = newDirectory();
|
||||||
LuceneTaxonomyWriter tw1 = new LuceneTaxonomyWriter(dir1);
|
LuceneTaxonomyWriter tw1 = new LuceneTaxonomyWriter(dir1);
|
||||||
tw1.addCategory(new CategoryPath("Author", "Mark Twain"));
|
tw1.addCategory(new CategoryPath("Author", "Mark Twain"));
|
||||||
tw1.addCategory(new CategoryPath("Animals", "Dog"));
|
tw1.addCategory(new CategoryPath("Animals", "Dog"));
|
||||||
Directory dir2 = new RAMDirectory();
|
Directory dir2 = newDirectory();
|
||||||
LuceneTaxonomyWriter tw2 = new LuceneTaxonomyWriter(dir2);
|
LuceneTaxonomyWriter tw2 = new LuceneTaxonomyWriter(dir2);
|
||||||
tw2.addCategory(new CategoryPath("Author", "Rob Pike"));
|
tw2.addCategory(new CategoryPath("Author", "Rob Pike"));
|
||||||
tw2.addCategory(new CategoryPath("Aardvarks", "Bob"));
|
tw2.addCategory(new CategoryPath("Aardvarks", "Bob"));
|
||||||
tw2.close();
|
tw2.close();
|
||||||
Directory dir3 = new RAMDirectory();
|
Directory dir3 = newDirectory();
|
||||||
LuceneTaxonomyWriter tw3 = new LuceneTaxonomyWriter(dir3);
|
LuceneTaxonomyWriter tw3 = new LuceneTaxonomyWriter(dir3);
|
||||||
tw3.addCategory(new CategoryPath("Author", "Zebra Smith"));
|
tw3.addCategory(new CategoryPath("Author", "Zebra Smith"));
|
||||||
tw3.addCategory(new CategoryPath("Aardvarks", "Bob"));
|
tw3.addCategory(new CategoryPath("Aardvarks", "Bob"));
|
||||||
|
@ -93,10 +94,26 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
||||||
assertEquals(5, map1[3]);
|
assertEquals(5, map1[3]);
|
||||||
assertEquals(7, map1[4]);
|
assertEquals(7, map1[4]);
|
||||||
assertEquals(6, map1[5]);
|
assertEquals(6, map1[5]);
|
||||||
|
|
||||||
|
tr.close();
|
||||||
|
dir1.close();
|
||||||
|
dir2.close();
|
||||||
|
dir3.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
// a reasonable random test
|
||||||
|
public void testmedium() throws Exception {
|
||||||
|
int numTests = atLeast(3);
|
||||||
|
for (int i = 0; i < numTests; i++) {
|
||||||
|
dotest(_TestUtil.nextInt(random, 1, 10),
|
||||||
|
_TestUtil.nextInt(random, 1, 100),
|
||||||
|
_TestUtil.nextInt(random, 100, 1000),
|
||||||
|
random.nextBoolean());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A more comprehensive and big random test.
|
// A more comprehensive and big random test.
|
||||||
@Test
|
@Test @Nightly
|
||||||
public void testbig() throws Exception {
|
public void testbig() throws Exception {
|
||||||
dotest(2, 1000, 5000, false);
|
dotest(2, 1000, 5000, false);
|
||||||
dotest(10, 10000, 100, false);
|
dotest(10, 10000, 100, false);
|
||||||
|
@ -113,8 +130,8 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
||||||
Directory copydirs[] = new Directory[ntaxonomies];
|
Directory copydirs[] = new Directory[ntaxonomies];
|
||||||
|
|
||||||
for (int i=0; i<ntaxonomies; i++) {
|
for (int i=0; i<ntaxonomies; i++) {
|
||||||
dirs[i] = new RAMDirectory();
|
dirs[i] = newDirectory();
|
||||||
copydirs[i] = new RAMDirectory();
|
copydirs[i] = newDirectory();
|
||||||
LuceneTaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[i]);
|
LuceneTaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[i]);
|
||||||
LuceneTaxonomyWriter copytw = new LuceneTaxonomyWriter(copydirs[i]);
|
LuceneTaxonomyWriter copytw = new LuceneTaxonomyWriter(copydirs[i]);
|
||||||
for (int j=0; j<ncats; j++) {
|
for (int j=0; j<ncats; j++) {
|
||||||
|
@ -135,6 +152,7 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
||||||
if (ntaxonomies>1) {
|
if (ntaxonomies>1) {
|
||||||
for (int i=0; i<ntaxonomies-1; i++) {
|
for (int i=0; i<ntaxonomies-1; i++) {
|
||||||
if (disk) {
|
if (disk) {
|
||||||
|
// TODO: use a LTC tempfile
|
||||||
maps[i] = new DiskOrdinalMap(new File(System.getProperty("java.io.tmpdir"),
|
maps[i] = new DiskOrdinalMap(new File(System.getProperty("java.io.tmpdir"),
|
||||||
"tmpmap"+i));
|
"tmpmap"+i));
|
||||||
} else {
|
} else {
|
||||||
|
@ -193,7 +211,7 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
||||||
int otherord = main.getOrdinal(other.getPath(j));
|
int otherord = main.getOrdinal(other.getPath(j));
|
||||||
assertTrue(otherord != TaxonomyReader.INVALID_ORDINAL);
|
assertTrue(otherord != TaxonomyReader.INVALID_ORDINAL);
|
||||||
}
|
}
|
||||||
tr.close();
|
other.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check that all the new categories in the merged taxonomy exist in
|
// Check that all the new categories in the merged taxonomy exist in
|
||||||
|
@ -229,6 +247,8 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
main.close();
|
main.close();
|
||||||
|
IOUtils.closeSafely(false, dirs);
|
||||||
|
IOUtils.closeSafely(false, copydirs);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import java.io.IOException;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
import org.apache.lucene.index.FilterIndexReader;
|
import org.apache.lucene.index.FilterIndexReader;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
@ -13,10 +12,11 @@ import org.apache.lucene.index.IndexWriterConfig;
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.LockObtainFailedException;
|
import org.apache.lucene.store.LockObtainFailedException;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
||||||
|
@ -50,7 +50,7 @@ public class TestIndexClose extends LuceneTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testLeaks() throws Exception {
|
public void testLeaks() throws Exception {
|
||||||
LeakChecker checker = new LeakChecker();
|
LeakChecker checker = new LeakChecker();
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
LuceneTaxonomyWriter tw = checker.openWriter(dir);
|
LuceneTaxonomyWriter tw = checker.openWriter(dir);
|
||||||
tw.close();
|
tw.close();
|
||||||
assertEquals(0, checker.nopen());
|
assertEquals(0, checker.nopen());
|
||||||
|
@ -88,6 +88,7 @@ public class TestIndexClose extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
tw.close();
|
tw.close();
|
||||||
assertEquals(0, checker.nopen());
|
assertEquals(0, checker.nopen());
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class LeakChecker {
|
private static class LeakChecker {
|
||||||
|
@ -132,7 +133,7 @@ public class TestIndexClose extends LuceneTestCase {
|
||||||
protected void openLuceneIndex (Directory directory, OpenMode openMode)
|
protected void openLuceneIndex (Directory directory, OpenMode openMode)
|
||||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||||
indexWriter = new InstrumentedIndexWriter(directory,
|
indexWriter = new InstrumentedIndexWriter(directory,
|
||||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer())
|
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false))
|
||||||
.setOpenMode(openMode));
|
.setOpenMode(openMode));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ import java.util.Map;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import org.apache.lucene.util.LuceneTestCase;
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
@ -52,7 +51,7 @@ public class TestLuceneTaxonomyWriter extends LuceneTestCase {
|
||||||
public void testCommit() throws Exception {
|
public void testCommit() throws Exception {
|
||||||
// Verifies that nothing is committed to the underlying Directory, if
|
// Verifies that nothing is committed to the underlying Directory, if
|
||||||
// commit() wasn't called.
|
// commit() wasn't called.
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, new NoOpCache());
|
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, new NoOpCache());
|
||||||
assertFalse(IndexReader.indexExists(dir));
|
assertFalse(IndexReader.indexExists(dir));
|
||||||
ltw.commit(); // first commit, so that an index will be created
|
ltw.commit(); // first commit, so that an index will be created
|
||||||
|
@ -68,7 +67,7 @@ public class TestLuceneTaxonomyWriter extends LuceneTestCase {
|
||||||
@Test
|
@Test
|
||||||
public void testCommitUserData() throws Exception {
|
public void testCommitUserData() throws Exception {
|
||||||
// Verifies that committed data is retrievable
|
// Verifies that committed data is retrievable
|
||||||
Directory dir = new RAMDirectory();
|
Directory dir = newDirectory();
|
||||||
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, new NoOpCache());
|
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, new NoOpCache());
|
||||||
assertFalse(IndexReader.indexExists(dir));
|
assertFalse(IndexReader.indexExists(dir));
|
||||||
ltw.commit(); // first commit, so that an index will be created
|
ltw.commit(); // first commit, so that an index will be created
|
||||||
|
|
|
@ -1,16 +1,17 @@
|
||||||
package org.apache.lucene.facet.util;
|
package org.apache.lucene.facet.util;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
import org.apache.lucene.analysis.MockAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.MockTokenizer;
|
||||||
import org.apache.lucene.document.Document;
|
import org.apache.lucene.document.Document;
|
||||||
import org.apache.lucene.document.Field;
|
import org.apache.lucene.document.Field;
|
||||||
import org.apache.lucene.document.Field.Index;
|
import org.apache.lucene.document.Field.Index;
|
||||||
import org.apache.lucene.document.Field.Store;
|
import org.apache.lucene.document.Field.Store;
|
||||||
import org.apache.lucene.index.IndexReader;
|
import org.apache.lucene.index.IndexReader;
|
||||||
import org.apache.lucene.index.IndexWriter;
|
|
||||||
import org.apache.lucene.index.IndexWriterConfig;
|
|
||||||
import org.apache.lucene.index.MultiFields;
|
import org.apache.lucene.index.MultiFields;
|
||||||
|
import org.apache.lucene.index.RandomIndexWriter;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.DocIdSet;
|
import org.apache.lucene.search.DocIdSet;
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
@ -18,7 +19,6 @@ import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.store.Directory;
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.store.RAMDirectory;
|
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.OpenBitSet;
|
import org.apache.lucene.util.OpenBitSet;
|
||||||
import org.apache.lucene.util.OpenBitSetDISI;
|
import org.apache.lucene.util.OpenBitSetDISI;
|
||||||
|
@ -50,7 +50,7 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testComplementIterator() throws Exception {
|
public void testComplementIterator() throws Exception {
|
||||||
final int n = 100000;
|
final int n = atLeast(10000);
|
||||||
final OpenBitSet bits = new OpenBitSet(n);
|
final OpenBitSet bits = new OpenBitSet(n);
|
||||||
for (int i = 0; i < 5 * n; i++) {
|
for (int i = 0; i < 5 * n; i++) {
|
||||||
bits.flip(random.nextInt(n));
|
bits.flip(random.nextInt(n));
|
||||||
|
@ -61,19 +61,22 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
|
|
||||||
ScoredDocIDs scoredDocIDs = ScoredDocIdsUtils.createScoredDocIds(bits, n);
|
ScoredDocIDs scoredDocIDs = ScoredDocIdsUtils.createScoredDocIds(bits, n);
|
||||||
|
|
||||||
IndexReader reader = createReaderWithNDocs(n);
|
Directory dir = newDirectory();
|
||||||
|
IndexReader reader = createReaderWithNDocs(random, n, dir);
|
||||||
try {
|
try {
|
||||||
assertEquals(n - verify.cardinality(), ScoredDocIdsUtils.getComplementSet(scoredDocIDs,
|
assertEquals(n - verify.cardinality(), ScoredDocIdsUtils.getComplementSet(scoredDocIDs,
|
||||||
reader).size());
|
reader).size());
|
||||||
} finally {
|
} finally {
|
||||||
reader.close();
|
reader.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAllDocs() throws Exception {
|
public void testAllDocs() throws Exception {
|
||||||
int maxDoc = 3;
|
int maxDoc = 3;
|
||||||
IndexReader reader = createReaderWithNDocs(maxDoc);
|
Directory dir = newDirectory();
|
||||||
|
IndexReader reader = createReaderWithNDocs(random, maxDoc, dir);
|
||||||
try {
|
try {
|
||||||
ScoredDocIDs all = ScoredDocIdsUtils.createAllDocsScoredDocIDs(reader);
|
ScoredDocIDs all = ScoredDocIdsUtils.createAllDocsScoredDocIDs(reader);
|
||||||
assertEquals("invalid size", maxDoc, all.size());
|
assertEquals("invalid size", maxDoc, all.size());
|
||||||
|
@ -95,6 +98,7 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
assertEquals(2, docIDsIter.advance(0));
|
assertEquals(2, docIDsIter.advance(0));
|
||||||
} finally {
|
} finally {
|
||||||
reader.close();
|
reader.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -119,7 +123,8 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
IndexReader reader = createReaderWithNDocs(N_DOCS, docFactory);
|
Directory dir = newDirectory();
|
||||||
|
IndexReader reader = createReaderWithNDocs(random, N_DOCS, docFactory, dir);
|
||||||
try {
|
try {
|
||||||
int numErasedDocs = reader.numDeletedDocs();
|
int numErasedDocs = reader.numDeletedDocs();
|
||||||
|
|
||||||
|
@ -142,7 +147,9 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
// Get all 'alpha' documents
|
// Get all 'alpha' documents
|
||||||
ScoredDocIdCollector collector = ScoredDocIdCollector.create(reader.maxDoc(), false);
|
ScoredDocIdCollector collector = ScoredDocIdCollector.create(reader.maxDoc(), false);
|
||||||
Query q = new TermQuery(new Term(DocumentFactory.field, DocumentFactory.alphaTxt));
|
Query q = new TermQuery(new Term(DocumentFactory.field, DocumentFactory.alphaTxt));
|
||||||
new IndexSearcher(reader).search(q, collector);
|
IndexSearcher searcher = newSearcher(reader);
|
||||||
|
searcher.search(q, collector);
|
||||||
|
searcher.close();
|
||||||
|
|
||||||
ScoredDocIDs scoredDocIds = collector.getScoredDocIDs();
|
ScoredDocIDs scoredDocIds = collector.getScoredDocIDs();
|
||||||
OpenBitSet resultSet = new OpenBitSetDISI(scoredDocIds.getDocIDs().iterator(), reader.maxDoc());
|
OpenBitSet resultSet = new OpenBitSetDISI(scoredDocIds.getDocIDs().iterator(), reader.maxDoc());
|
||||||
|
@ -171,15 +178,15 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
reader.close();
|
reader.close();
|
||||||
|
dir.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates an index with n documents, this method is meant for testing purposes ONLY
|
* Creates an index with n documents, this method is meant for testing purposes ONLY
|
||||||
* Node that this reader is NOT read-only and document can be deleted.
|
|
||||||
*/
|
*/
|
||||||
static IndexReader createReaderWithNDocs(int nDocs) throws IOException {
|
static IndexReader createReaderWithNDocs(Random random, int nDocs, Directory directory) throws IOException {
|
||||||
return createReaderWithNDocs(nDocs, new DocumentFactory(nDocs));
|
return createReaderWithNDocs(random, nDocs, new DocumentFactory(nDocs), directory);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class DocumentFactory {
|
private static class DocumentFactory {
|
||||||
|
@ -217,23 +224,21 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static IndexReader createReaderWithNDocs(int nDocs, DocumentFactory docFactory) throws IOException {
|
static IndexReader createReaderWithNDocs(Random random, int nDocs, DocumentFactory docFactory, Directory dir) throws IOException {
|
||||||
Directory ramDir = new RAMDirectory();
|
|
||||||
|
|
||||||
// Create the index
|
// Create the index
|
||||||
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
|
||||||
|
new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||||
for (int docNum = 0; docNum < nDocs; docNum++) {
|
for (int docNum = 0; docNum < nDocs; docNum++) {
|
||||||
writer.addDocument(docFactory.getDoc(docNum));
|
writer.addDocument(docFactory.getDoc(docNum));
|
||||||
}
|
}
|
||||||
writer.commit();
|
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
// Delete documents marked for deletion
|
// Delete documents marked for deletion
|
||||||
IndexReader reader = IndexReader.open(ramDir, false);
|
IndexReader reader = IndexReader.open(dir, false);
|
||||||
reader.deleteDocuments(new Term(DocumentFactory.field, DocumentFactory.delTxt));
|
reader.deleteDocuments(new Term(DocumentFactory.field, DocumentFactory.delTxt));
|
||||||
reader.close();
|
reader.close();
|
||||||
|
|
||||||
// Open a fresh read-only reader with the deletions in place
|
// Open a fresh read-only reader with the deletions in place
|
||||||
return IndexReader.open(ramDir, true);
|
return IndexReader.open(dir, true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
|
||||||
/**
|
/**
|
||||||
* Test utility - slow directory
|
* Test utility - slow directory
|
||||||
*/
|
*/
|
||||||
|
// TODO: move to test-framework and sometimes use in tests?
|
||||||
public class SlowRAMDirectory extends RAMDirectory {
|
public class SlowRAMDirectory extends RAMDirectory {
|
||||||
|
|
||||||
private static final int IO_SLEEP_THRESHOLD = 50;
|
private static final int IO_SLEEP_THRESHOLD = 50;
|
||||||
|
|
Loading…
Reference in New Issue