mirror of https://github.com/apache/lucene.git
LUCENE-3264: crank up faceting module tests
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1141629 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
842d97edac
commit
cec86dbc06
|
@ -11,7 +11,8 @@ import java.util.Map;
|
|||
|
||||
import org.apache.lucene.DocumentBuilder.DocumentBuilderException;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
|
@ -20,21 +21,19 @@ import org.apache.lucene.document.Field.TermVector;
|
|||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.DocsEnum;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.facet.index.CategoryDocumentBuilder;
|
||||
import org.apache.lucene.facet.index.params.CategoryListParams;
|
||||
import org.apache.lucene.facet.index.params.DefaultFacetIndexingParams;
|
||||
|
@ -131,15 +130,15 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
if (onDisk) {
|
||||
File indexFile = new File(TEMP_DIR,"index");
|
||||
indexDir = FSDirectory.open(indexFile);
|
||||
taxoDir = FSDirectory.open(new File(indexFile,"facets"));
|
||||
File indexFile = _TestUtil.getTempDir("index");
|
||||
indexDir = newFSDirectory(indexFile);
|
||||
taxoDir = newFSDirectory(new File(indexFile,"facets"));
|
||||
} else {
|
||||
indexDir = new RAMDirectory();
|
||||
taxoDir = new RAMDirectory();
|
||||
indexDir = newDirectory();
|
||||
taxoDir = newDirectory();
|
||||
}
|
||||
|
||||
IndexWriter iw = new IndexWriter(indexDir, new IndexWriterConfig(TEST_VERSION_CURRENT, getAnalyzer()));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, getAnalyzer()));
|
||||
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir, OpenMode.CREATE);
|
||||
|
||||
populateIndex(iw, taxo, getFacetIndexingParams(partitionSize));
|
||||
|
@ -153,7 +152,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||
// prepare for searching
|
||||
taxoReader = new LuceneTaxonomyReader(taxoDir);
|
||||
indexReader = IndexReader.open(indexDir);
|
||||
searcher = new IndexSearcher(indexReader);
|
||||
searcher = newSearcher(indexReader);
|
||||
}
|
||||
|
||||
/** Returns a default facet indexing params */
|
||||
|
@ -187,7 +186,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||
* Populate the test index+taxonomy for this test.
|
||||
* <p>Subclasses can override this to test different scenarios
|
||||
*/
|
||||
protected void populateIndex(IndexWriter iw, TaxonomyWriter taxo, FacetIndexingParams iParams)
|
||||
protected void populateIndex(RandomIndexWriter iw, TaxonomyWriter taxo, FacetIndexingParams iParams)
|
||||
throws IOException, DocumentBuilderException, CorruptIndexException {
|
||||
// add test documents
|
||||
int numDocsToIndex = numDocsToIndex();
|
||||
|
@ -211,7 +210,9 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||
indexReader = null;
|
||||
searcher.close();
|
||||
searcher = null;
|
||||
indexDir.close();
|
||||
indexDir = null;
|
||||
taxoDir.close();
|
||||
taxoDir = null;
|
||||
}
|
||||
|
||||
|
@ -220,7 +221,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||
* Sub classes should override in order to test with different analyzer.
|
||||
*/
|
||||
protected Analyzer getAnalyzer() {
|
||||
return new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
|
||||
return new MockAnalyzer(random, MockTokenizer.WHITESPACE, false);
|
||||
}
|
||||
|
||||
/** convenience method: convert sub results to an array */
|
||||
|
@ -233,7 +234,7 @@ public abstract class FacetTestBase extends LuceneTestCase {
|
|||
}
|
||||
|
||||
/** utility Create a dummy document with specified categories and content */
|
||||
protected final void indexDoc(FacetIndexingParams iParams, IndexWriter iw,
|
||||
protected final void indexDoc(FacetIndexingParams iParams, RandomIndexWriter iw,
|
||||
TaxonomyWriter tw, String content, List<CategoryPath> categories) throws IOException,
|
||||
CorruptIndexException {
|
||||
Document d = new Document();
|
||||
|
|
|
@ -14,12 +14,12 @@ import org.apache.lucene.index.CorruptIndexException;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.TopScoreDocCollector;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -55,11 +55,11 @@ import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
|||
|
||||
public class FacetTestUtils {
|
||||
|
||||
public static Directory[][] createIndexTaxonomyDirs(int number) {
|
||||
public static Directory[][] createIndexTaxonomyDirs(int number) throws IOException {
|
||||
Directory[][] dirs = new Directory[number][2];
|
||||
for (int i = 0; i < number; i++) {
|
||||
dirs[i][0] = new RAMDirectory();
|
||||
dirs[i][1] = new RAMDirectory();
|
||||
dirs[i][0] = LuceneTestCase.newDirectory();
|
||||
dirs[i][1] = LuceneTestCase.newDirectory();
|
||||
}
|
||||
return dirs;
|
||||
}
|
||||
|
@ -122,7 +122,7 @@ public class FacetTestUtils {
|
|||
return collectors;
|
||||
}
|
||||
|
||||
public static void add(FacetIndexingParams iParams, IndexWriter iw,
|
||||
public static void add(FacetIndexingParams iParams, RandomIndexWriter iw,
|
||||
TaxonomyWriter tw, String... strings) throws IOException,
|
||||
CorruptIndexException {
|
||||
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
|
||||
|
|
|
@ -5,7 +5,6 @@ import java.io.IOException;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
@ -46,8 +45,8 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
|||
@BeforeClass
|
||||
public static void buildAssociationIndex() throws Exception {
|
||||
// create Directories for the search index and for the taxonomy index
|
||||
indexDir = new RAMDirectory();
|
||||
taxoDir = new RAMDirectory();
|
||||
indexDir = newDirectory();
|
||||
taxoDir = newDirectory();
|
||||
|
||||
// index the sample documents
|
||||
if (VERBOSE) {
|
||||
|
@ -73,6 +72,7 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
|||
assertTrue("Missing instance of tags/lucene in doc 1", iterator.setdoc(1));
|
||||
assoc = (Integer) iterator.getCategoryData(associationEnhancement);
|
||||
assertEquals("Unexpected association value for tags/lucene in doc 1", 1, assoc, 1E-5);
|
||||
indexReader.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -84,6 +84,7 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
|||
assertTrue("Unexpected failure of init()", iterator.init());
|
||||
assertFalse("Unexpected payload for root/a/f2 in doc 0", iterator.setdoc(0));
|
||||
assertFalse("Unexpected instance of root/a/f2 in doc 1", iterator.setdoc(1));
|
||||
indexReader.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -98,11 +99,14 @@ public class EnhancementsPayloadIteratorTest extends LuceneTestCase {
|
|||
float assoc = Float.intBitsToFloat((Integer) iterator
|
||||
.getCategoryData(associationEnhancement));
|
||||
assertEquals("Unexpected association value for genre/computing in doc 1", 0.34f, assoc, 0.001);
|
||||
indexReader.close();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void closeDirectories() throws IOException {
|
||||
indexDir.close();
|
||||
indexDir = null;
|
||||
taxoDir.close();
|
||||
taxoDir = null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,14 +4,13 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -45,8 +44,8 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testTwoEmptyAndNonEmptyByteArrays() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory taxoDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
EnhancementsIndexingParams indexingParams =
|
||||
new DefaultEnhancementsIndexingParams(
|
||||
|
@ -57,8 +56,8 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>();
|
||||
categoryPaths.add(new CategoryPath("a", "b"));
|
||||
|
||||
IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random, indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir);
|
||||
|
||||
// a category document builder will add the categories to a document
|
||||
|
@ -67,9 +66,9 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
indexWriter.addDocument(new EnhancementsDocumentBuilder(taxo,
|
||||
indexingParams).setCategoryPaths(categoryPaths).build(doc));
|
||||
|
||||
IndexReader indexReader = indexWriter.getReader();
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = IndexReader.open(indexDir);
|
||||
Term term = DrillDown.term(indexingParams, new CategoryPath("a","b"));
|
||||
EnhancementsPayloadIterator iterator = new EnhancementsPayloadIterator(
|
||||
indexingParams.getCategoryEnhancements(), indexReader, term);
|
||||
|
@ -82,13 +81,17 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
.getCategoryData(new CategoryEnhancementDummy3());
|
||||
assertTrue("Bad array returned for CategoryEnhancementDummy3", Arrays
|
||||
.equals(dummy3, CategoryEnhancementDummy3.CATEGORY_TOKEN_BYTES));
|
||||
indexReader.close();
|
||||
indexDir.close();
|
||||
taxo.close();
|
||||
taxoDir.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTwoNonEmptyByteArrays() throws Exception {
|
||||
// add document with a category containing data for both enhancements
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory taxoDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
|
||||
EnhancementsIndexingParams indexingParams =
|
||||
new DefaultEnhancementsIndexingParams(
|
||||
|
@ -98,8 +101,8 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>();
|
||||
categoryPaths.add(new CategoryPath("a", "b"));
|
||||
|
||||
IndexWriter indexWriter = new IndexWriter(indexDir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random, indexDir, newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
TaxonomyWriter taxo = new LuceneTaxonomyWriter(taxoDir);
|
||||
|
||||
// a category document builder will add the categories to a document
|
||||
|
@ -108,9 +111,9 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
indexWriter.addDocument(new EnhancementsDocumentBuilder(taxo,
|
||||
indexingParams).setCategoryPaths(categoryPaths).build(doc));
|
||||
|
||||
IndexReader indexReader = indexWriter.getReader();
|
||||
indexWriter.close();
|
||||
|
||||
IndexReader indexReader = IndexReader.open(indexDir);
|
||||
Term term = DrillDown.term(indexingParams, new CategoryPath("a","b"));
|
||||
EnhancementsPayloadIterator iterator = new EnhancementsPayloadIterator(
|
||||
indexingParams.getCategoryEnhancements(), indexReader, term);
|
||||
|
@ -125,5 +128,9 @@ public class TwoEnhancementsTest extends LuceneTestCase {
|
|||
.getCategoryData(new CategoryEnhancementDummy3());
|
||||
assertTrue("Bad array returned for CategoryEnhancementDummy3", Arrays
|
||||
.equals(dummy3, CategoryEnhancementDummy3.CATEGORY_TOKEN_BYTES));
|
||||
indexReader.close();
|
||||
taxo.close();
|
||||
indexDir.close();
|
||||
taxoDir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
package org.apache.lucene.facet.enhancements.association;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -55,10 +54,11 @@ public class CustomAssociationPropertyTest extends LuceneTestCase {
|
|||
EnhancementsIndexingParams iParams = new DefaultEnhancementsIndexingParams(
|
||||
new AssociationEnhancement());
|
||||
|
||||
Directory iDir = new RAMDirectory();
|
||||
Directory tDir = new RAMDirectory();
|
||||
Directory iDir = newDirectory();
|
||||
Directory tDir = newDirectory();
|
||||
|
||||
IndexWriter w = new IndexWriter(iDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
RandomIndexWriter w = new RandomIndexWriter(random, iDir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||
LuceneTaxonomyWriter taxoW = new LuceneTaxonomyWriter(tDir);
|
||||
|
||||
CategoryContainer cc = new CategoryContainer();
|
||||
|
@ -72,9 +72,9 @@ public class CustomAssociationPropertyTest extends LuceneTestCase {
|
|||
builder.setCategories(cc);
|
||||
w.addDocument(builder.build(new Document()));
|
||||
taxoW.close();
|
||||
IndexReader reader = w.getReader();
|
||||
w.close();
|
||||
|
||||
IndexReader reader = IndexReader.open(iDir);
|
||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(tDir);
|
||||
String field = iParams.getCategoryListParams(new CategoryPath("0")).getTerm().field();
|
||||
AssociationsPayloadIterator api = new AssociationsPayloadIterator(reader, field);
|
||||
|
@ -93,5 +93,10 @@ public class CustomAssociationPropertyTest extends LuceneTestCase {
|
|||
}
|
||||
|
||||
assertTrue("No categories found for doc #0", flag);
|
||||
|
||||
reader.close();
|
||||
taxo.close();
|
||||
iDir.close();
|
||||
tDir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,15 +4,15 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -49,23 +49,27 @@ public class FacetsPayloadProcessorProviderTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testTaxonomyMergeUtils() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory taxDir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
Directory taxDir = newDirectory();
|
||||
buildIndexWithFacets(dir, taxDir, true);
|
||||
|
||||
Directory dir1 = new RAMDirectory();
|
||||
Directory taxDir1 = new RAMDirectory();
|
||||
Directory dir1 = newDirectory();
|
||||
Directory taxDir1 = newDirectory();
|
||||
buildIndexWithFacets(dir1, taxDir1, false);
|
||||
|
||||
TaxonomyMergeUtils.merge(dir, taxDir, dir1, taxDir1);
|
||||
|
||||
verifyResults(dir1, taxDir1);
|
||||
dir1.close();
|
||||
taxDir1.close();
|
||||
dir.close();
|
||||
taxDir.close();
|
||||
}
|
||||
|
||||
private void verifyResults(Directory dir, Directory taxDir) throws IOException {
|
||||
IndexReader reader1 = IndexReader.open(dir);
|
||||
LuceneTaxonomyReader taxReader = new LuceneTaxonomyReader(taxDir);
|
||||
IndexSearcher searcher = new IndexSearcher(reader1);
|
||||
IndexSearcher searcher = newSearcher(reader1);
|
||||
FacetSearchParams fsp = new FacetSearchParams();
|
||||
fsp.addFacetRequest(new CountFacetRequest(new CategoryPath("tag"), NUM_DOCS));
|
||||
FacetsCollector collector = new FacetsCollector(fsp, reader1, taxReader);
|
||||
|
@ -81,11 +85,14 @@ public class FacetsPayloadProcessorProviderTest extends LuceneTestCase {
|
|||
}
|
||||
assertEquals(NUM_DOCS ,weight);
|
||||
}
|
||||
reader1.close();
|
||||
taxReader.close();
|
||||
}
|
||||
|
||||
private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc) throws IOException {
|
||||
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
|
||||
IndexWriter writer = new IndexWriter(dir, config);
|
||||
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random, MockTokenizer.WHITESPACE, false));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir, config);
|
||||
|
||||
LuceneTaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(taxDir);
|
||||
for (int i = 1; i <= NUM_DOCS; i++) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package org.apache.lucene.facet.index.categorypolicy;
|
||||
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -46,8 +46,9 @@ public class OrdinalPolicyTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testNonTopLevelOrdinalPolicy() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
TaxonomyWriter taxonomy = null;
|
||||
taxonomy = new LuceneTaxonomyWriter(new RAMDirectory());
|
||||
taxonomy = new LuceneTaxonomyWriter(dir);
|
||||
|
||||
int[] topLevelOrdinals = new int[10];
|
||||
String[] topLevelStrings = new String[10];
|
||||
|
@ -85,6 +86,8 @@ public class OrdinalPolicyTest extends LuceneTestCase {
|
|||
|
||||
// check illegal ordinal
|
||||
assertFalse("Should not add illegal ordinal", ordinalPolicy.shouldAdd(100000));
|
||||
taxonomy.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package org.apache.lucene.facet.index.categorypolicy;
|
||||
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -52,8 +52,9 @@ public class PathPolicyTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testNonTopLevelPathPolicy() throws Exception {
|
||||
Directory dir = newDirectory();
|
||||
TaxonomyWriter taxonomy = null;
|
||||
taxonomy = new LuceneTaxonomyWriter(new RAMDirectory());
|
||||
taxonomy = new LuceneTaxonomyWriter(dir);
|
||||
|
||||
CategoryPath[] topLevelPaths = new CategoryPath[10];
|
||||
String[] topLevelStrings = new String[10];
|
||||
|
@ -88,5 +89,7 @@ public class PathPolicyTest extends LuceneTestCase {
|
|||
+ nonTopLevelPaths[i],
|
||||
pathPolicy.shouldAdd(nonTopLevelPaths[i]));
|
||||
}
|
||||
taxonomy.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ package org.apache.lucene.facet.index.streaming;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.facet.FacetException;
|
||||
|
@ -49,8 +49,9 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
*/
|
||||
@Test
|
||||
public void testStreamDefaultParams() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||
new RAMDirectory());
|
||||
directory);
|
||||
CategoryParentsStream stream = new CategoryParentsStream(
|
||||
new CategoryAttributesStream(categoryContainer),
|
||||
taxonomyWriter, new DefaultFacetIndexingParams());
|
||||
|
@ -63,6 +64,7 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
assertEquals("Wrong number of tokens", 6, nTokens);
|
||||
|
||||
taxonomyWriter.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -74,8 +76,9 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
*/
|
||||
@Test
|
||||
public void testStreamNonTopLevelParams() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
final TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||
new RAMDirectory());
|
||||
directory);
|
||||
FacetIndexingParams indexingParams = new DefaultFacetIndexingParams() {
|
||||
@Override
|
||||
protected OrdinalPolicy fixedOrdinalPolicy() {
|
||||
|
@ -102,6 +105,7 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
assertEquals("Wrong number of tokens", 4, nTokens);
|
||||
|
||||
taxonomyWriter.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -113,7 +117,8 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
*/
|
||||
@Test
|
||||
public void testNoRetainableAttributes() throws IOException, FacetException {
|
||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(new RAMDirectory());
|
||||
Directory directory = newDirectory();
|
||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(directory);
|
||||
|
||||
new CategoryParentsStream(new CategoryAttributesStream(categoryContainer),
|
||||
taxonomyWriter, new DefaultFacetIndexingParams());
|
||||
|
@ -133,6 +138,8 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
}
|
||||
assertEquals("Wrong number of tokens with attributes", 1, nAttributes);
|
||||
|
||||
taxonomyWriter.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -144,8 +151,9 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
*/
|
||||
@Test
|
||||
public void testRetainableAttributes() throws IOException, FacetException {
|
||||
Directory directory = newDirectory();
|
||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||
new RAMDirectory());
|
||||
directory);
|
||||
|
||||
FacetIndexingParams indexingParams = new DefaultFacetIndexingParams();
|
||||
new CategoryParentsStream(new CategoryAttributesStream(
|
||||
|
@ -176,6 +184,7 @@ public class CategoryParentsStreamTest extends CategoryContainerTestBase {
|
|||
assertEquals("Wrong number of tokens with attributes", 3, nAttributes);
|
||||
|
||||
taxonomyWriter.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
private final class MyCategoryListTokenizer extends CategoryListTokenizer {
|
||||
|
|
|
@ -7,7 +7,7 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.facet.index.CategoryContainerTestBase;
|
||||
|
@ -46,8 +46,9 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
|||
*/
|
||||
@Test
|
||||
public void testTokensDefaultParams() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||
new RAMDirectory());
|
||||
directory);
|
||||
DefaultFacetIndexingParams indexingParams = new DefaultFacetIndexingParams();
|
||||
CategoryTokenizer tokenizer = new CategoryTokenizer(
|
||||
new CategoryAttributesStream(categoryContainer),
|
||||
|
@ -73,6 +74,7 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
|||
assertEquals("Wrong number of tokens", 3, nTokens);
|
||||
|
||||
taxonomyWriter.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -83,8 +85,9 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
|||
*/
|
||||
@Test
|
||||
public void testLongCategoryPath() throws IOException {
|
||||
Directory directory = newDirectory();
|
||||
TaxonomyWriter taxonomyWriter = new LuceneTaxonomyWriter(
|
||||
new RAMDirectory());
|
||||
directory);
|
||||
|
||||
List<CategoryPath> longCategory = new ArrayList<CategoryPath>();
|
||||
longCategory.add(new CategoryPath("one", "two", "three", "four",
|
||||
|
@ -107,5 +110,6 @@ public class CategoryTokenizerTest extends CategoryContainerTestBase {
|
|||
assertFalse("Unexpected token", tokenizer.incrementToken());
|
||||
|
||||
taxonomyWriter.close();
|
||||
directory.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import java.util.List;
|
|||
|
||||
import org.apache.lucene.DocumentBuilder.DocumentBuilderException;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
|
||||
import org.apache.lucene.facet.FacetTestBase;
|
||||
import org.apache.lucene.facet.index.params.FacetIndexingParams;
|
||||
|
@ -48,7 +48,7 @@ public abstract class BaseTestTopK extends FacetTestBase {
|
|||
private int nextInt;
|
||||
|
||||
@Override
|
||||
protected void populateIndex(IndexWriter iw, TaxonomyWriter taxo,
|
||||
protected void populateIndex(RandomIndexWriter iw, TaxonomyWriter taxo,
|
||||
FacetIndexingParams iParams) throws IOException,
|
||||
DocumentBuilderException, CorruptIndexException {
|
||||
currDoc = -1;
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
package org.apache.lucene.facet.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.Payload;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -95,20 +96,20 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testPayloadIntDecodingIterator() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
|
||||
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
dts.setIdx(i);
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("f", dts));
|
||||
writer.addDocument(doc);
|
||||
}
|
||||
writer.commit();
|
||||
IndexReader reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
|
||||
"f","1"), dts.encoder.createMatchingDecoder());
|
||||
cli.init();
|
||||
|
@ -127,6 +128,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||
}
|
||||
assertEquals("Missing categories!",10,totalCategories);
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -139,12 +141,21 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testPayloadIteratorWithInvalidDoc() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
DataTokenStream dts = new DataTokenStream("1",new SortingIntEncoder(
|
||||
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
||||
DataTokenStream dts2 = new DataTokenStream("2",new SortingIntEncoder(
|
||||
new UniqueValuesIntEncoder(new DGapIntEncoder(new VInt8IntEncoder()))));
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
// this test requires that no payloads ever be randomly present!
|
||||
final Analyzer noPayloadsAnalyzer = new Analyzer() {
|
||||
@Override
|
||||
public TokenStream tokenStream(String fieldName, Reader reader) {
|
||||
return new MockTokenizer(reader, MockTokenizer.KEYWORD, false);
|
||||
}
|
||||
};
|
||||
// NOTE: test is wired to LogMP... because test relies on certain docids having payloads
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, noPayloadsAnalyzer).setMergePolicy(newLogMergePolicy()));
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
dts.setIdx(i);
|
||||
Document doc = new Document();
|
||||
|
@ -170,10 +181,9 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||
|
||||
}
|
||||
|
||||
writer.commit();
|
||||
IndexReader reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
CategoryListIterator cli = new PayloadIntDecodingIterator(reader, new Term(
|
||||
"f","1"), dts.encoder.createMatchingDecoder());
|
||||
cli.init();
|
||||
|
@ -202,6 +212,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
|
|||
// Ok.. went through the first 4 docs, now lets try the 6th doc (docid 5)
|
||||
assertFalse("Doc #6 (docid=5) should not have a payload!",cli.skipTo(5));
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,15 +3,16 @@ package org.apache.lucene.facet.search;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -19,7 +20,6 @@ import org.apache.lucene.search.TermQuery;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
@ -58,6 +58,8 @@ public class DrillDownTest extends LuceneTestCase {
|
|||
private FacetSearchParams nonDefaultParams;
|
||||
private static IndexReader reader;
|
||||
private static LuceneTaxonomyReader taxo;
|
||||
private static Directory dir;
|
||||
private static Directory taxoDir;
|
||||
|
||||
public DrillDownTest() throws IOException {
|
||||
PerDimensionIndexingParams iParams = new PerDimensionIndexingParams();
|
||||
|
@ -71,10 +73,11 @@ public class DrillDownTest extends LuceneTestCase {
|
|||
}
|
||||
@BeforeClass
|
||||
public static void createIndexes() throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
dir = newDirectory();
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||
|
||||
Directory taxoDir = new RAMDirectory();
|
||||
taxoDir = newDirectory();
|
||||
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
|
@ -98,10 +101,9 @@ public class DrillDownTest extends LuceneTestCase {
|
|||
}
|
||||
|
||||
taxoWriter.close();
|
||||
writer.commit();
|
||||
reader = writer.getReader();
|
||||
writer.close();
|
||||
|
||||
reader = IndexReader.open(dir, true);
|
||||
taxo = new LuceneTaxonomyReader(taxoDir);
|
||||
}
|
||||
|
||||
|
@ -127,7 +129,7 @@ public class DrillDownTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testQuery() throws IOException {
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
|
||||
// Making sure the query yields 25 documents with the facet "a"
|
||||
Query q = DrillDown.query(defaultParams, new CategoryPath("a"));
|
||||
|
@ -155,7 +157,7 @@ public class DrillDownTest extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testQueryImplicitDefaultParams() throws IOException {
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
|
||||
// Create the base query to start with
|
||||
Query q = DrillDown.query(defaultParams, new CategoryPath("a"));
|
||||
|
@ -178,11 +180,16 @@ public class DrillDownTest extends LuceneTestCase {
|
|||
public static void closeIndexes() throws IOException {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
reader = null;
|
||||
}
|
||||
|
||||
if (taxo != null) {
|
||||
taxo.close();
|
||||
taxo = null;
|
||||
}
|
||||
|
||||
dir.close();
|
||||
taxoDir.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -100,8 +100,6 @@ public class TestFacetsAccumulatorWithComplement extends FacetTestBase {
|
|||
}
|
||||
|
||||
private void doTestComplements() throws Exception {
|
||||
assertTrue("Would like to test this with deletions!",indexReader.hasDeletions());
|
||||
assertTrue("Would like to test this with deletions!",indexReader.numDeletedDocs()>0);
|
||||
Query q = new MatchAllDocsQuery(); //new TermQuery(new Term(TEXT,"white"));
|
||||
if (VERBOSE) {
|
||||
System.out.println("Query: "+q);
|
||||
|
|
|
@ -4,14 +4,15 @@ import java.io.IOException;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.DocsEnum;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
|
@ -22,6 +23,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.search.MultiCollector;
|
||||
import org.apache.lucene.facet.FacetTestUtils;
|
||||
|
@ -62,8 +64,8 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
public void testDefault() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
// create and open an index writer
|
||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
|
||||
|
@ -74,15 +76,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
|
||||
seedIndex(iw, tw, iParams);
|
||||
|
||||
iw.commit();
|
||||
IndexReader ir = iw.getReader();
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
|
||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||
searcher);
|
||||
|
@ -98,14 +99,15 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
searcher.close();
|
||||
iw.close();
|
||||
tw.close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustom() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
// create and open an index writer
|
||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
||||
OpenMode.CREATE);
|
||||
|
@ -115,15 +117,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
new CategoryListParams(new Term("$author", "Authors")));
|
||||
seedIndex(iw, tw, iParams);
|
||||
|
||||
iw.commit();
|
||||
IndexReader ir = iw.getReader();
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
|
||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||
searcher);
|
||||
|
@ -139,14 +140,15 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
searcher.close();
|
||||
iw.close();
|
||||
tw.close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTwoCustomsSameField() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
// create and open an index writer
|
||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
||||
OpenMode.CREATE);
|
||||
|
@ -158,15 +160,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
new CategoryListParams(new Term("$music", "Composers")));
|
||||
seedIndex(iw, tw, iParams);
|
||||
|
||||
iw.commit();
|
||||
IndexReader ir = iw.getReader();
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
|
||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||
searcher);
|
||||
|
@ -183,6 +184,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
searcher.close();
|
||||
iw.close();
|
||||
tw.close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
private void assertPostingListExists(String field, String text, IndexReader ir) throws IOException {
|
||||
|
@ -194,8 +196,8 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
public void testDifferentFieldsAndText() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
// create and open an index writer
|
||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1], OpenMode.CREATE);
|
||||
|
||||
|
@ -206,15 +208,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
new CategoryListParams(new Term("$composers", "Composers")));
|
||||
seedIndex(iw, tw, iParams);
|
||||
|
||||
iw.commit();
|
||||
IndexReader ir = iw.getReader();
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
|
||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||
searcher);
|
||||
|
@ -229,14 +230,15 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
searcher.close();
|
||||
iw.close();
|
||||
tw.close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeSameSomeDifferent() throws Exception {
|
||||
Directory[][] dirs = getDirs();
|
||||
// create and open an index writer
|
||||
IndexWriter iw = new IndexWriter(dirs[0][0], new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, dirs[0][0], newIndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
// create and open a taxonomy writer
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[0][1],
|
||||
OpenMode.CREATE);
|
||||
|
@ -251,15 +253,14 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
|
||||
seedIndex(iw, tw, iParams);
|
||||
|
||||
iw.commit();
|
||||
IndexReader ir = iw.getReader();
|
||||
tw.commit();
|
||||
|
||||
// prepare index reader and taxonomy.
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dirs[0][1]);
|
||||
IndexReader ir = IndexReader.open(dirs[0][0]);
|
||||
|
||||
// prepare searcher to search against
|
||||
IndexSearcher searcher = new IndexSearcher(ir);
|
||||
IndexSearcher searcher = newSearcher(ir);
|
||||
|
||||
FacetsCollector facetsCollector = performSearch(iParams, tr, ir,
|
||||
searcher);
|
||||
|
@ -274,6 +275,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
searcher.close();
|
||||
iw.close();
|
||||
tw.close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
private Directory[][] getDirs() throws IOException {
|
||||
|
@ -358,7 +360,7 @@ public class TestMultipleCategoryLists extends LuceneTestCase {
|
|||
return facetsCollector;
|
||||
}
|
||||
|
||||
private void seedIndex(IndexWriter iw, TaxonomyWriter tw,
|
||||
private void seedIndex(RandomIndexWriter iw, TaxonomyWriter tw,
|
||||
FacetIndexingParams iParams) throws IOException, CorruptIndexException {
|
||||
FacetTestUtils.add(iParams, iw, tw, "Author", "Mark Twain");
|
||||
FacetTestUtils.add(iParams, iw, tw, "Author", "Stephen King");
|
||||
|
|
|
@ -59,8 +59,6 @@ public class TestScoredDocIdCollector extends FacetTestBase {
|
|||
@Test
|
||||
public void testConstantScore() throws Exception {
|
||||
// test that constant score works well
|
||||
assertTrue("Would like to test this with deletions!",indexReader.hasDeletions());
|
||||
assertTrue("Would like to test this with deletions!",indexReader.numDeletedDocs()>0);
|
||||
|
||||
Query q = new TermQuery(new Term(CONTENT_FIELD, "white"));
|
||||
if (VERBOSE) {
|
||||
|
|
|
@ -4,22 +4,21 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.document.Field.TermVector;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -70,8 +69,8 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
|||
Integer.MAX_VALUE };
|
||||
|
||||
for (int partitionSize : partitionSizes) {
|
||||
Directory iDir = new RAMDirectory();
|
||||
Directory tDir = new RAMDirectory();
|
||||
Directory iDir = newDirectory();
|
||||
Directory tDir = newDirectory();
|
||||
|
||||
if (VERBOSE) {
|
||||
System.out.println("Partition Size: " + partitionSize);
|
||||
|
@ -85,9 +84,9 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
|||
}
|
||||
};
|
||||
|
||||
IndexWriter iw = new IndexWriter(iDir,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
|
||||
RandomIndexWriter iw = new RandomIndexWriter(random, iDir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random)).setOpenMode(OpenMode.CREATE));
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(tDir);
|
||||
prvt_add(iParams, iw, tw, "a", "b");
|
||||
prvt_add(iParams, iw, tw, "a", "b", "1");
|
||||
|
@ -106,12 +105,12 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
|||
prvt_add(iParams, iw, tw, "a", "d");
|
||||
prvt_add(iParams, iw, tw, "a", "e");
|
||||
|
||||
iw.commit();
|
||||
IndexReader ir = iw.getReader();
|
||||
iw.close();
|
||||
tw.commit();
|
||||
tw.close();
|
||||
|
||||
IndexSearcher is = new IndexSearcher(iDir);
|
||||
IndexSearcher is = newSearcher(ir);
|
||||
LuceneTaxonomyReader tr = new LuceneTaxonomyReader(tDir);
|
||||
|
||||
// Get all of the documents and run the query, then do different
|
||||
|
@ -320,11 +319,15 @@ public class TestTopKInEachNodeResultHandler extends LuceneTestCase {
|
|||
assertFalse("Shouldn't have found anything for a FacetRequest " +
|
||||
"of a facet that doesn't exist in the index.", hasDoctor);
|
||||
assertEquals("Shouldn't have found more than seven request.", 7, facetResults.size());
|
||||
ir.close();
|
||||
tr.close();
|
||||
iDir.close();
|
||||
tDir.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void prvt_add(DefaultFacetIndexingParams iParams, IndexWriter iw,
|
||||
private void prvt_add(DefaultFacetIndexingParams iParams, RandomIndexWriter iw,
|
||||
TaxonomyWriter tw, String... strings) throws IOException,
|
||||
CorruptIndexException {
|
||||
ArrayList<CategoryPath> cps = new ArrayList<CategoryPath>();
|
||||
|
|
|
@ -140,6 +140,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
|
|||
assertEquals(6.0, parentRes.getValue(), Double.MIN_VALUE);
|
||||
frn = resultNodesAsArray(parentRes);
|
||||
assertEquals(1.0, frn[0].getValue(), Double.MIN_VALUE);
|
||||
closeAll();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,6 +235,7 @@ public class TestTopKResultsHandler extends BaseTestTopK {
|
|||
assertEquals("Shouldn't have found anything for a FacetRequest "
|
||||
+ "of a facet that doesn't exist in the index.", 0, facetResults.size());
|
||||
|
||||
closeAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -108,6 +109,7 @@ public class TestTotalFacetCounts extends LuceneTestCase {
|
|||
++partition;
|
||||
}
|
||||
readers[0].close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
tmpFile.delete();
|
||||
}
|
||||
|
||||
|
|
|
@ -5,13 +5,15 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.MockDirectoryWrapper;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -34,6 +36,7 @@ import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
|||
import org.apache.lucene.facet.taxonomy.TaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.SlowRAMDirectory;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
|
||||
|
@ -106,13 +109,23 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
initCache();
|
||||
}
|
||||
|
||||
/** runs a few instances of {@link MultiCLSearcher} in parallel */
|
||||
public void testGeneralSynchronization() throws Exception {
|
||||
int numIters = atLeast(2);
|
||||
for (int i = 0; i < numIters; i++) {
|
||||
doTestGeneralSynchronization(_TestUtil.nextInt(random, 2, 4),
|
||||
random.nextBoolean() ? -1 : _TestUtil.nextInt(random, 1, 10),
|
||||
_TestUtil.nextInt(random, 0, 3));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run many instances of {@link MultiCLSearcher} in parallel, results should
|
||||
* be sane. Each instance has a random delay for reading bytes, to ensure
|
||||
* that threads finish in different order than started.
|
||||
*/
|
||||
@Test
|
||||
public void testGeneralSynchronization() throws Exception {
|
||||
@Test @Nightly
|
||||
public void testGeneralSynchronizationBig() throws Exception {
|
||||
int[] numThreads = new int[] { 2, 3, 5, 8 };
|
||||
int[] sleepMillis = new int[] { -1, 1, 20, 33 };
|
||||
int[] cacheSize = new int[] { 0,1,2,3,5 };
|
||||
|
@ -130,17 +143,20 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
InterruptedException {
|
||||
TFC.setCacheSize(cacheSize);
|
||||
SlowRAMDirectory slowIndexDir = new SlowRAMDirectory(-1, random);
|
||||
MockDirectoryWrapper indexDir = new MockDirectoryWrapper(random, slowIndexDir);
|
||||
SlowRAMDirectory slowTaxoDir = new SlowRAMDirectory(-1, random);
|
||||
MockDirectoryWrapper taxoDir = new MockDirectoryWrapper(random, slowTaxoDir);
|
||||
|
||||
|
||||
// Index documents without the "slowness"
|
||||
MultiCLIndexer.index(slowIndexDir, slowTaxoDir);
|
||||
MultiCLIndexer.index(indexDir, taxoDir);
|
||||
|
||||
slowIndexDir.setSleepMillis(sleepMillis);
|
||||
slowTaxoDir.setSleepMillis(sleepMillis);
|
||||
|
||||
// Open the slow readers
|
||||
IndexReader slowIndexReader = IndexReader.open(slowIndexDir);
|
||||
TaxonomyReader slowTaxoReader = new LuceneTaxonomyReader(slowTaxoDir);
|
||||
IndexReader slowIndexReader = IndexReader.open(indexDir);
|
||||
TaxonomyReader slowTaxoReader = new LuceneTaxonomyReader(taxoDir);
|
||||
|
||||
// Class to perform search and return results as threads
|
||||
class Multi extends Thread {
|
||||
|
@ -221,6 +237,8 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
// we're done, close the index reader and the taxonomy.
|
||||
slowIndexReader.close();
|
||||
slowTaxoReader.close();
|
||||
indexDir.close();
|
||||
taxoDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -321,6 +339,7 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
readers[0].close();
|
||||
r2.close();
|
||||
outputFile.delete();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
private int assertReadFromDisc(TotalFacetCounts totalCounts, int prevGen, String errMsg) {
|
||||
|
@ -384,6 +403,9 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
readers[0].indexReader, readers[0].taxReader, iParams, null);
|
||||
assertReadFromDisc(totalCounts, 0, "after reading from disk.");
|
||||
outputFile.delete();
|
||||
writers[0].close();
|
||||
readers[0].close();
|
||||
IOUtils.closeSafely(false, dirs[0]);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -397,7 +419,7 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
|
||||
// Write index using 'normal' directories
|
||||
IndexWriter w = new IndexWriter(indexDir, new IndexWriterConfig(
|
||||
TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
|
||||
TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.WHITESPACE, false)));
|
||||
LuceneTaxonomyWriter tw = new LuceneTaxonomyWriter(taxoDir);
|
||||
DefaultFacetIndexingParams iParams = new DefaultFacetIndexingParams();
|
||||
// Add documents and facets
|
||||
|
@ -508,8 +530,13 @@ public class TestTotalFacetCountsCache extends LuceneTestCase {
|
|||
assertTrue("with cache of size 2 res no. 1 should come from cache",
|
||||
totalCounts1 == TFC.getTotalCounts(readers[1].indexReader, readers[1].taxReader, iParams, null));
|
||||
|
||||
writers[0].close();
|
||||
writers[1].close();
|
||||
readers[0].close();
|
||||
readers[1].close();
|
||||
for (Directory[] dirset : dirs) {
|
||||
IOUtils.closeSafely(false, dirset);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,19 +3,18 @@ package org.apache.lucene.facet.search.association;
|
|||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.enhancements.EnhancementsDocumentBuilder;
|
||||
import org.apache.lucene.facet.enhancements.association.AssociationEnhancement;
|
||||
|
@ -53,8 +52,9 @@ import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
|||
/** Test for associations */
|
||||
public class AssociationsFacetRequestTest extends LuceneTestCase {
|
||||
|
||||
private static Directory dir = new RAMDirectory();
|
||||
private static Directory taxoDir = new RAMDirectory();
|
||||
private static Directory dir;
|
||||
private static IndexReader reader;
|
||||
private static Directory taxoDir;
|
||||
|
||||
private static final CategoryPath aint = new CategoryPath("int", "a");
|
||||
private static final CategoryPath bint = new CategoryPath("int", "b");
|
||||
|
@ -63,8 +63,11 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
|
||||
@BeforeClass
|
||||
public static void beforeClassAssociationsFacetRequestTest() throws Exception {
|
||||
dir = newDirectory();
|
||||
taxoDir = newDirectory();
|
||||
// preparations - index, taxonomy, content
|
||||
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||
|
||||
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
||||
|
||||
|
@ -87,18 +90,22 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
}
|
||||
|
||||
taxoWriter.close();
|
||||
reader = writer.getReader();
|
||||
writer.close();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassAssociationsFacetRequestTest() throws Exception {
|
||||
reader.close();
|
||||
reader = null;
|
||||
dir.close();
|
||||
dir = null;
|
||||
taxoDir.close();
|
||||
taxoDir = null;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIntSumAssociation() throws Exception {
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||
|
||||
// facet requests for two facets
|
||||
|
@ -110,7 +117,8 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
|
||||
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
||||
|
||||
new IndexSearcher(reader).search(q, fc);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
searcher.search(q, fc);
|
||||
List<FacetResult> res = fc.getFacetResults();
|
||||
|
||||
assertNotNull("No results!",res);
|
||||
|
@ -118,14 +126,12 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
assertEquals("Wrong count for category 'a'!",200, (int) res.get(0).getFacetResultNode().getValue());
|
||||
assertEquals("Wrong count for category 'b'!",150, (int) res.get(1).getFacetResultNode().getValue());
|
||||
|
||||
searcher.close();
|
||||
taxo.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFloatSumAssociation() throws Exception {
|
||||
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||
|
||||
// facet requests for two facets
|
||||
|
@ -137,7 +143,8 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
|
||||
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
||||
|
||||
new IndexSearcher(reader).search(q, fc);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
searcher.search(q, fc);
|
||||
List<FacetResult> res = fc.getFacetResults();
|
||||
|
||||
assertNotNull("No results!",res);
|
||||
|
@ -145,8 +152,8 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
assertEquals("Wrong count for category 'a'!",50f, (float) res.get(0).getFacetResultNode().getValue(), 0.00001);
|
||||
assertEquals("Wrong count for category 'b'!",10f, (float) res.get(1).getFacetResultNode().getValue(), 0.00001);
|
||||
|
||||
searcher.close();
|
||||
taxo.close();
|
||||
reader.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -154,7 +161,6 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
// Same category list cannot be aggregated by two different aggregators. If
|
||||
// you want to do that, you need to separate the categories into two
|
||||
// category list (you'll still have one association list).
|
||||
IndexReader reader = IndexReader.open(dir, true);
|
||||
LuceneTaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||
|
||||
// facet requests for two facets
|
||||
|
@ -168,13 +174,16 @@ public class AssociationsFacetRequestTest extends LuceneTestCase {
|
|||
|
||||
FacetsCollector fc = new FacetsCollector(fsp, reader, taxo);
|
||||
|
||||
new IndexSearcher(reader).search(q, fc);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
searcher.search(q, fc);
|
||||
try {
|
||||
fc.getFacetResults();
|
||||
fail("different aggregators for same category list should not be supported");
|
||||
} catch (RuntimeException e) {
|
||||
// ok - expected
|
||||
}
|
||||
searcher.close();
|
||||
taxo.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ package org.apache.lucene.facet.search.params;
|
|||
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -53,8 +53,8 @@ public class FacetRequestTest extends LuceneTestCase {
|
|||
@Test
|
||||
public void testGetFacetResultHandlerDifferentTaxonomy() throws Exception {
|
||||
FacetRequest fr = new CountFacetRequest(new CategoryPath("a"), 10);
|
||||
RAMDirectory dir1 = new RAMDirectory();
|
||||
RAMDirectory dir2 = new RAMDirectory();
|
||||
Directory dir1 = newDirectory();
|
||||
Directory dir2 = newDirectory();
|
||||
// create empty indexes, so that LTR ctor won't complain about a missing index.
|
||||
new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
||||
new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
||||
|
@ -63,6 +63,10 @@ public class FacetRequestTest extends LuceneTestCase {
|
|||
FacetResultsHandler frh1 = fr.createFacetResultsHandler(tr1);
|
||||
FacetResultsHandler frh2 = fr.createFacetResultsHandler(tr2);
|
||||
assertTrue("should not return the same FacetResultHandler instance for different TaxonomyReader instances", frh1 != frh2);
|
||||
tr1.close();
|
||||
tr2.close();
|
||||
dir1.close();
|
||||
dir2.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -70,13 +74,15 @@ public class FacetRequestTest extends LuceneTestCase {
|
|||
// Tests that after a FRH is created by FR, changes to FR are not reflected
|
||||
// in the FRH.
|
||||
FacetRequest fr = new CountFacetRequest(new CategoryPath("a"), 10);
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
// create empty indexes, so that LTR ctor won't complain about a missing index.
|
||||
new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null)).close();
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
||||
FacetResultsHandler frh = fr.createFacetResultsHandler(tr);
|
||||
fr.setDepth(10);
|
||||
assertEquals(FacetRequest.DEFAULT_DEPTH, frh.getFacetRequest().getDepth());
|
||||
tr.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package org.apache.lucene.facet.search.params;
|
||||
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -36,11 +36,13 @@ public class FacetSearchParamsTest extends LuceneTestCase {
|
|||
FacetSearchParams fsp = new FacetSearchParams();
|
||||
assertEquals("unexpected default facet indexing params class", DefaultFacetIndexingParams.class.getName(), fsp.getFacetIndexingParams().getClass().getName());
|
||||
assertEquals("no facet requests should be added by default", 0, fsp.getFacetRequests().size());
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
new LuceneTaxonomyWriter(dir).close();
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
||||
assertEquals("unexpected partition offset for 0 categories", 1, PartitionsUtils.partitionOffset(fsp, 1, tr));
|
||||
assertEquals("unexpected partition size for 0 categories", 1, PartitionsUtils.partitionSize(fsp,tr));
|
||||
tr.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -53,7 +55,7 @@ public class FacetSearchParamsTest extends LuceneTestCase {
|
|||
@Test
|
||||
public void testPartitionSizeWithCategories() throws Exception {
|
||||
FacetSearchParams fsp = new FacetSearchParams();
|
||||
RAMDirectory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(dir);
|
||||
tw.addCategory(new CategoryPath("a"));
|
||||
tw.commit();
|
||||
|
@ -61,6 +63,8 @@ public class FacetSearchParamsTest extends LuceneTestCase {
|
|||
TaxonomyReader tr = new LuceneTaxonomyReader(dir);
|
||||
assertEquals("unexpected partition offset for 1 categories", 2, PartitionsUtils.partitionOffset(fsp, 1, tr));
|
||||
assertEquals("unexpected partition size for 1 categories", 2, PartitionsUtils.partitionSize(fsp,tr));
|
||||
tr.close();
|
||||
dir.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -4,14 +4,13 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -90,8 +89,8 @@ public class MultiIteratorsPerCLParamsTest extends LuceneTestCase {
|
|||
// FacetRequest's dimension
|
||||
CategoryListParams clp = new CategoryListParams();
|
||||
FacetIndexingParams iParams = new DefaultFacetIndexingParams(clp);
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory taxoDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
Directory taxoDir = newDirectory();
|
||||
populateIndex(iParams, indexDir, taxoDir);
|
||||
|
||||
TaxonomyReader taxo = new LuceneTaxonomyReader(taxoDir);
|
||||
|
@ -122,6 +121,10 @@ public class MultiIteratorsPerCLParamsTest extends LuceneTestCase {
|
|||
countForbiddenDimension = null;
|
||||
validateFacetedSearch(iParams, taxo, reader, clCache, allDocs, new String[] {
|
||||
"author", "date" }, new int[] { 5, 5 }, new int[] { 5, 2 });
|
||||
taxo.close();
|
||||
reader.close();
|
||||
indexDir.close();
|
||||
taxoDir.close();
|
||||
}
|
||||
|
||||
private void validateFacetedSearch(FacetIndexingParams iParams,
|
||||
|
@ -163,7 +166,8 @@ public class MultiIteratorsPerCLParamsTest extends LuceneTestCase {
|
|||
|
||||
private void populateIndex(FacetIndexingParams iParams, Directory indexDir,
|
||||
Directory taxoDir) throws Exception {
|
||||
IndexWriter writer = new IndexWriter(indexDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, indexDir,
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||
TaxonomyWriter taxoWriter = new LuceneTaxonomyWriter(taxoDir);
|
||||
|
||||
for (CategoryPath[] categories : perDocCategories) {
|
||||
|
|
|
@ -101,6 +101,7 @@ public abstract class BaseSampleTestTopK extends BaseTestTopK {
|
|||
}
|
||||
}
|
||||
}
|
||||
closeAll();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -158,13 +158,14 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriter() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
// Also check TaxonomyWriter.getSize() - see that the taxonomy's size
|
||||
// is what we expect it to be.
|
||||
assertEquals(expectedCategories.length, tw.getSize());
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** testWriterTwice is exactly like testWriter, except that after adding
|
||||
|
@ -173,7 +174,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterTwice() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
// run fillTaxonomy again - this will try to add the same categories
|
||||
|
@ -184,6 +185,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
// extraneous categories were created:
|
||||
assertEquals(expectedCategories.length, tw.getSize());
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** testWriterTwice2 is similar to testWriterTwice, except that the index
|
||||
|
@ -194,7 +196,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterTwice2() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -206,6 +208,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
fillTaxonomy(tw);
|
||||
assertEquals(expectedCategories.length, tw.getSize());
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -217,7 +220,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterTwice3() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
// First, create and fill the taxonomy
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
|
@ -239,6 +242,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
tw.commit();
|
||||
assertEquals(expectedCategories.length+1, tw.getSize());
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** Another set of tests for the writer, which don't use an array and
|
||||
|
@ -248,7 +252,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterSimpler() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
assertEquals(1, tw.getSize()); // the root only
|
||||
// Test that adding a new top-level category works
|
||||
|
@ -283,6 +287,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(9, tw.getSize());
|
||||
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** Test writing an empty index, and seeing that a reader finds in it
|
||||
|
@ -291,7 +296,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testRootOnly() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
// right after opening the index, it should already contain the
|
||||
// root, so have size 1:
|
||||
|
@ -303,6 +308,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getParent(0));
|
||||
assertEquals(0, tr.getOrdinal(new CategoryPath()));
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** The following test is exactly the same as testRootOnly, except we
|
||||
|
@ -312,7 +318,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testRootOnly2() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
tw.commit();
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
||||
|
@ -322,6 +328,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(0, tr.getOrdinal(new CategoryPath()));
|
||||
tw.close();
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** Basic tests for TaxonomyReader's category <=> ordinal transformations
|
||||
|
@ -331,7 +338,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testReaderBasic() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -373,6 +380,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(TaxonomyReader.INVALID_ORDINAL, tr.getOrdinal(new CategoryPath("Author", "Jules Verne")));
|
||||
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/** Tests for TaxonomyReader's getParent() method.
|
||||
|
@ -389,7 +397,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testReaderParent() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -436,6 +444,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
}
|
||||
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -453,7 +462,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterParent1() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -464,11 +473,12 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
|
||||
tw.close();
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriterParent2() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.commit();
|
||||
|
@ -478,6 +488,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
|
||||
tw.close();
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
private void checkWriterParent(TaxonomyReader tr, TaxonomyWriter tw) throws Exception {
|
||||
|
@ -530,7 +541,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testReaderParentArray() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -541,6 +552,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(tr.getParent(i), parents[i]);
|
||||
}
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -550,7 +562,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testChildrenArrays() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -601,6 +613,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -613,7 +626,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testChildrenArraysInvariants() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
tw.close();
|
||||
|
@ -685,6 +698,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
}
|
||||
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -692,7 +706,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testChildrenArraysGrowth() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
tw.addCategory(new CategoryPath("hi", "there"));
|
||||
tw.commit();
|
||||
|
@ -722,6 +736,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertTrue(Arrays.equals(new int[] { -1, -1, -1, 2, 1 }, ca.getOlderSiblingArray()));
|
||||
tw.close();
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -731,7 +746,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
@Ignore
|
||||
public void testTaxonomyReaderRefreshRaces() throws Exception {
|
||||
// compute base child arrays - after first chunk, and after the other
|
||||
Directory indexDirBase = new RAMDirectory();
|
||||
Directory indexDirBase = newDirectory();
|
||||
TaxonomyWriter twBase = new LuceneTaxonomyWriter(indexDirBase);
|
||||
twBase.addCategory(new CategoryPath("a", "0"));
|
||||
final CategoryPath abPath = new CategoryPath("a", "b");
|
||||
|
@ -757,6 +772,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
for (int retry=0; retry<100; retry++) {
|
||||
assertConsistentYoungestChild(abPath, abOrd, abYoungChildBase1, abYoungChildBase2, retry);
|
||||
}
|
||||
indexDirBase.close();
|
||||
}
|
||||
|
||||
private void assertConsistentYoungestChild(final CategoryPath abPath,
|
||||
|
@ -848,7 +864,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testSeparateReaderAndWriter() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
tw.commit();
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
||||
|
@ -910,11 +926,12 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(3, tr.getSize());
|
||||
tw.close();
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSeparateReaderAndWriter2() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
tw.commit();
|
||||
TaxonomyReader tr = new LuceneTaxonomyReader(indexDir);
|
||||
|
@ -940,6 +957,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
assertEquals(2, tr.getSize()); // still root only...
|
||||
tw.close();
|
||||
tr.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -948,6 +966,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterLock() throws Exception {
|
||||
// native fslock impl gets angry if we use it, so use RAMDirectory explicitly.
|
||||
Directory indexDir = new RAMDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
tw.addCategory(new CategoryPath("hi", "there"));
|
||||
|
@ -975,6 +994,8 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
tr.refresh();
|
||||
assertEquals(3, tr.getOrdinal(new CategoryPath("hey")));
|
||||
tr.close();
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1032,13 +1053,14 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterCheckPaths() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomyCheckPaths(tw);
|
||||
// Also check TaxonomyWriter.getSize() - see that the taxonomy's size
|
||||
// is what we expect it to be.
|
||||
assertEquals(expectedCategories.length, tw.getSize());
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1050,7 +1072,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
*/
|
||||
@Test
|
||||
public void testWriterCheckPaths2() throws Exception {
|
||||
Directory indexDir = new RAMDirectory();
|
||||
Directory indexDir = newDirectory();
|
||||
TaxonomyWriter tw = new LuceneTaxonomyWriter(indexDir);
|
||||
fillTaxonomy(tw);
|
||||
checkPaths(tw);
|
||||
|
@ -1063,6 +1085,7 @@ public class TestTaxonomyCombined extends LuceneTestCase {
|
|||
fillTaxonomy(tw);
|
||||
checkPaths(tw);
|
||||
tw.close();
|
||||
indexDir.close();
|
||||
}
|
||||
|
||||
// TODO (Facet): test multiple readers, one writer. Have the multiple readers
|
||||
|
|
|
@ -3,10 +3,11 @@ package org.apache.lucene.facet.taxonomy.lucene;
|
|||
import java.io.File;
|
||||
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util._TestUtil;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
||||
|
@ -36,16 +37,16 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void test1() throws Exception {
|
||||
Directory dir1 = new RAMDirectory();
|
||||
Directory dir1 = newDirectory();
|
||||
LuceneTaxonomyWriter tw1 = new LuceneTaxonomyWriter(dir1);
|
||||
tw1.addCategory(new CategoryPath("Author", "Mark Twain"));
|
||||
tw1.addCategory(new CategoryPath("Animals", "Dog"));
|
||||
Directory dir2 = new RAMDirectory();
|
||||
Directory dir2 = newDirectory();
|
||||
LuceneTaxonomyWriter tw2 = new LuceneTaxonomyWriter(dir2);
|
||||
tw2.addCategory(new CategoryPath("Author", "Rob Pike"));
|
||||
tw2.addCategory(new CategoryPath("Aardvarks", "Bob"));
|
||||
tw2.close();
|
||||
Directory dir3 = new RAMDirectory();
|
||||
Directory dir3 = newDirectory();
|
||||
LuceneTaxonomyWriter tw3 = new LuceneTaxonomyWriter(dir3);
|
||||
tw3.addCategory(new CategoryPath("Author", "Zebra Smith"));
|
||||
tw3.addCategory(new CategoryPath("Aardvarks", "Bob"));
|
||||
|
@ -93,10 +94,26 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
|||
assertEquals(5, map1[3]);
|
||||
assertEquals(7, map1[4]);
|
||||
assertEquals(6, map1[5]);
|
||||
|
||||
tr.close();
|
||||
dir1.close();
|
||||
dir2.close();
|
||||
dir3.close();
|
||||
}
|
||||
|
||||
// a reasonable random test
|
||||
public void testmedium() throws Exception {
|
||||
int numTests = atLeast(3);
|
||||
for (int i = 0; i < numTests; i++) {
|
||||
dotest(_TestUtil.nextInt(random, 1, 10),
|
||||
_TestUtil.nextInt(random, 1, 100),
|
||||
_TestUtil.nextInt(random, 100, 1000),
|
||||
random.nextBoolean());
|
||||
}
|
||||
}
|
||||
|
||||
// A more comprehensive and big random test.
|
||||
@Test
|
||||
@Test @Nightly
|
||||
public void testbig() throws Exception {
|
||||
dotest(2, 1000, 5000, false);
|
||||
dotest(10, 10000, 100, false);
|
||||
|
@ -113,8 +130,8 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
|||
Directory copydirs[] = new Directory[ntaxonomies];
|
||||
|
||||
for (int i=0; i<ntaxonomies; i++) {
|
||||
dirs[i] = new RAMDirectory();
|
||||
copydirs[i] = new RAMDirectory();
|
||||
dirs[i] = newDirectory();
|
||||
copydirs[i] = newDirectory();
|
||||
LuceneTaxonomyWriter tw = new LuceneTaxonomyWriter(dirs[i]);
|
||||
LuceneTaxonomyWriter copytw = new LuceneTaxonomyWriter(copydirs[i]);
|
||||
for (int j=0; j<ncats; j++) {
|
||||
|
@ -135,6 +152,7 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
|||
if (ntaxonomies>1) {
|
||||
for (int i=0; i<ntaxonomies-1; i++) {
|
||||
if (disk) {
|
||||
// TODO: use a LTC tempfile
|
||||
maps[i] = new DiskOrdinalMap(new File(System.getProperty("java.io.tmpdir"),
|
||||
"tmpmap"+i));
|
||||
} else {
|
||||
|
@ -193,7 +211,7 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
|||
int otherord = main.getOrdinal(other.getPath(j));
|
||||
assertTrue(otherord != TaxonomyReader.INVALID_ORDINAL);
|
||||
}
|
||||
tr.close();
|
||||
other.close();
|
||||
}
|
||||
|
||||
// Check that all the new categories in the merged taxonomy exist in
|
||||
|
@ -229,6 +247,8 @@ public class TestAddTaxonomies extends LuceneTestCase {
|
|||
}
|
||||
|
||||
main.close();
|
||||
IOUtils.closeSafely(false, dirs);
|
||||
IOUtils.closeSafely(false, copydirs);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ import java.io.IOException;
|
|||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.FilterIndexReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
@ -13,10 +12,11 @@ import org.apache.lucene.index.IndexWriterConfig;
|
|||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.lucene.LuceneTaxonomyWriter;
|
||||
|
@ -50,7 +50,7 @@ public class TestIndexClose extends LuceneTestCase {
|
|||
@Test
|
||||
public void testLeaks() throws Exception {
|
||||
LeakChecker checker = new LeakChecker();
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
LuceneTaxonomyWriter tw = checker.openWriter(dir);
|
||||
tw.close();
|
||||
assertEquals(0, checker.nopen());
|
||||
|
@ -88,6 +88,7 @@ public class TestIndexClose extends LuceneTestCase {
|
|||
}
|
||||
tw.close();
|
||||
assertEquals(0, checker.nopen());
|
||||
dir.close();
|
||||
}
|
||||
|
||||
private static class LeakChecker {
|
||||
|
@ -132,7 +133,7 @@ public class TestIndexClose extends LuceneTestCase {
|
|||
protected void openLuceneIndex (Directory directory, OpenMode openMode)
|
||||
throws CorruptIndexException, LockObtainFailedException, IOException {
|
||||
indexWriter = new InstrumentedIndexWriter(directory,
|
||||
new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer())
|
||||
newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random, MockTokenizer.KEYWORD, false))
|
||||
.setOpenMode(openMode));
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import java.util.Map;
|
|||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
|
@ -52,7 +51,7 @@ public class TestLuceneTaxonomyWriter extends LuceneTestCase {
|
|||
public void testCommit() throws Exception {
|
||||
// Verifies that nothing is committed to the underlying Directory, if
|
||||
// commit() wasn't called.
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, new NoOpCache());
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
ltw.commit(); // first commit, so that an index will be created
|
||||
|
@ -68,7 +67,7 @@ public class TestLuceneTaxonomyWriter extends LuceneTestCase {
|
|||
@Test
|
||||
public void testCommitUserData() throws Exception {
|
||||
// Verifies that committed data is retrievable
|
||||
Directory dir = new RAMDirectory();
|
||||
Directory dir = newDirectory();
|
||||
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, new NoOpCache());
|
||||
assertFalse(IndexReader.indexExists(dir));
|
||||
ltw.commit(); // first commit, so that an index will be created
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
package org.apache.lucene.facet.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.lucene.analysis.core.KeywordAnalyzer;
|
||||
import org.apache.lucene.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.analysis.MockTokenizer;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Field.Index;
|
||||
import org.apache.lucene.document.Field.Store;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MultiFields;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
|
@ -18,7 +19,6 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.OpenBitSet;
|
||||
import org.apache.lucene.util.OpenBitSetDISI;
|
||||
|
@ -50,7 +50,7 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
|
||||
@Test
|
||||
public void testComplementIterator() throws Exception {
|
||||
final int n = 100000;
|
||||
final int n = atLeast(10000);
|
||||
final OpenBitSet bits = new OpenBitSet(n);
|
||||
for (int i = 0; i < 5 * n; i++) {
|
||||
bits.flip(random.nextInt(n));
|
||||
|
@ -61,19 +61,22 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
|
||||
ScoredDocIDs scoredDocIDs = ScoredDocIdsUtils.createScoredDocIds(bits, n);
|
||||
|
||||
IndexReader reader = createReaderWithNDocs(n);
|
||||
Directory dir = newDirectory();
|
||||
IndexReader reader = createReaderWithNDocs(random, n, dir);
|
||||
try {
|
||||
assertEquals(n - verify.cardinality(), ScoredDocIdsUtils.getComplementSet(scoredDocIDs,
|
||||
reader).size());
|
||||
} finally {
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAllDocs() throws Exception {
|
||||
int maxDoc = 3;
|
||||
IndexReader reader = createReaderWithNDocs(maxDoc);
|
||||
Directory dir = newDirectory();
|
||||
IndexReader reader = createReaderWithNDocs(random, maxDoc, dir);
|
||||
try {
|
||||
ScoredDocIDs all = ScoredDocIdsUtils.createAllDocsScoredDocIDs(reader);
|
||||
assertEquals("invalid size", maxDoc, all.size());
|
||||
|
@ -95,6 +98,7 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
assertEquals(2, docIDsIter.advance(0));
|
||||
} finally {
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -119,7 +123,8 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
}
|
||||
};
|
||||
|
||||
IndexReader reader = createReaderWithNDocs(N_DOCS, docFactory);
|
||||
Directory dir = newDirectory();
|
||||
IndexReader reader = createReaderWithNDocs(random, N_DOCS, docFactory, dir);
|
||||
try {
|
||||
int numErasedDocs = reader.numDeletedDocs();
|
||||
|
||||
|
@ -142,7 +147,9 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
// Get all 'alpha' documents
|
||||
ScoredDocIdCollector collector = ScoredDocIdCollector.create(reader.maxDoc(), false);
|
||||
Query q = new TermQuery(new Term(DocumentFactory.field, DocumentFactory.alphaTxt));
|
||||
new IndexSearcher(reader).search(q, collector);
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
searcher.search(q, collector);
|
||||
searcher.close();
|
||||
|
||||
ScoredDocIDs scoredDocIds = collector.getScoredDocIDs();
|
||||
OpenBitSet resultSet = new OpenBitSetDISI(scoredDocIds.getDocIDs().iterator(), reader.maxDoc());
|
||||
|
@ -171,15 +178,15 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
}
|
||||
} finally {
|
||||
reader.close();
|
||||
dir.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an index with n documents, this method is meant for testing purposes ONLY
|
||||
* Node that this reader is NOT read-only and document can be deleted.
|
||||
*/
|
||||
static IndexReader createReaderWithNDocs(int nDocs) throws IOException {
|
||||
return createReaderWithNDocs(nDocs, new DocumentFactory(nDocs));
|
||||
static IndexReader createReaderWithNDocs(Random random, int nDocs, Directory directory) throws IOException {
|
||||
return createReaderWithNDocs(random, nDocs, new DocumentFactory(nDocs), directory);
|
||||
}
|
||||
|
||||
private static class DocumentFactory {
|
||||
|
@ -217,23 +224,21 @@ public class TestScoredDocIDsUtils extends LuceneTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
static IndexReader createReaderWithNDocs(int nDocs, DocumentFactory docFactory) throws IOException {
|
||||
Directory ramDir = new RAMDirectory();
|
||||
|
||||
static IndexReader createReaderWithNDocs(Random random, int nDocs, DocumentFactory docFactory, Directory dir) throws IOException {
|
||||
// Create the index
|
||||
IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
|
||||
RandomIndexWriter writer = new RandomIndexWriter(random, dir, newIndexWriterConfig(random, TEST_VERSION_CURRENT,
|
||||
new MockAnalyzer(random, MockTokenizer.KEYWORD, false)));
|
||||
for (int docNum = 0; docNum < nDocs; docNum++) {
|
||||
writer.addDocument(docFactory.getDoc(docNum));
|
||||
}
|
||||
writer.commit();
|
||||
writer.close();
|
||||
|
||||
// Delete documents marked for deletion
|
||||
IndexReader reader = IndexReader.open(ramDir, false);
|
||||
IndexReader reader = IndexReader.open(dir, false);
|
||||
reader.deleteDocuments(new Term(DocumentFactory.field, DocumentFactory.delTxt));
|
||||
reader.close();
|
||||
|
||||
// Open a fresh read-only reader with the deletions in place
|
||||
return IndexReader.open(ramDir, true);
|
||||
return IndexReader.open(dir, true);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
|
|||
/**
|
||||
* Test utility - slow directory
|
||||
*/
|
||||
// TODO: move to test-framework and sometimes use in tests?
|
||||
public class SlowRAMDirectory extends RAMDirectory {
|
||||
|
||||
private static final int IO_SLEEP_THRESHOLD = 50;
|
||||
|
|
Loading…
Reference in New Issue