LUCENE-5801: rename test vars, class and add missing ctor

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1607781 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shai Erera 2014-07-04 03:34:20 +00:00
parent f878737c44
commit 2cc8facde2
3 changed files with 42 additions and 51 deletions

View File

@ -141,4 +141,3 @@ public class OrdinalMappingAtomicReader extends FilterAtomicReader {
} }
} }

View File

@ -33,30 +33,32 @@ import org.apache.lucene.store.Directory;
* Utility methods for merging index and taxonomy directories. * Utility methods for merging index and taxonomy directories.
* @lucene.experimental * @lucene.experimental
*/ */
public class TaxonomyMergeUtils { public abstract class TaxonomyMergeUtils {
private TaxonomyMergeUtils() {}
/** /**
* Merges the given taxonomy and index directories and commits the changes to * Merges the given taxonomy and index directories and commits the changes to
* the given writers. * the given writers.
*/ */
public static void merge(Directory srcIndexDir, Directory srcTaxDir, OrdinalMap map, IndexWriter destIndexWriter, public static void merge(Directory srcIndexDir, Directory srcTaxoDir, OrdinalMap map, IndexWriter destIndexWriter,
DirectoryTaxonomyWriter destTaxWriter) throws IOException { DirectoryTaxonomyWriter destTaxoWriter) throws IOException {
// merge the taxonomies // merge the taxonomies
destTaxWriter.addTaxonomy(srcTaxDir, map); destTaxoWriter.addTaxonomy(srcTaxoDir, map);
int ordinalMap[] = map.getMap(); int ordinalMap[] = map.getMap();
DirectoryReader reader = DirectoryReader.open(srcIndexDir); DirectoryReader reader = DirectoryReader.open(srcIndexDir);
List<AtomicReaderContext> leaves = reader.leaves();
int numReaders = leaves.size();
AtomicReader wrappedLeaves[] = new AtomicReader[numReaders];
for (int i = 0; i < numReaders; i++) {
wrappedLeaves[i] = new OrdinalMappingAtomicReader(leaves.get(i).reader(), ordinalMap);
}
try { try {
List<AtomicReaderContext> leaves = reader.leaves();
int numReaders = leaves.size();
AtomicReader wrappedLeaves[] = new AtomicReader[numReaders];
for (int i = 0; i < numReaders; i++) {
wrappedLeaves[i] = new OrdinalMappingAtomicReader(leaves.get(i).reader(), ordinalMap);
}
destIndexWriter.addIndexes(new MultiReader(wrappedLeaves)); destIndexWriter.addIndexes(new MultiReader(wrappedLeaves));
// commit changes to taxonomy and index respectively. // commit changes to taxonomy and index respectively.
destTaxWriter.commit(); destTaxoWriter.commit();
destIndexWriter.commit(); destIndexWriter.commit();
} finally { } finally {
reader.close(); reader.close();

View File

@ -2,8 +2,6 @@ package org.apache.lucene.facet.taxonomy;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.facet.FacetField; import org.apache.lucene.facet.FacetField;
import org.apache.lucene.facet.FacetResult; import org.apache.lucene.facet.FacetResult;
@ -12,8 +10,6 @@ import org.apache.lucene.facet.Facets;
import org.apache.lucene.facet.FacetsCollector; import org.apache.lucene.facet.FacetsCollector;
import org.apache.lucene.facet.FacetsConfig; import org.apache.lucene.facet.FacetsConfig;
import org.apache.lucene.facet.LabelAndValue; import org.apache.lucene.facet.LabelAndValue;
import org.apache.lucene.facet.taxonomy.FastTaxonomyFacetCounts;
import org.apache.lucene.facet.taxonomy.TaxonomyMergeUtils;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap; import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
@ -45,10 +41,10 @@ import org.junit.Test;
* limitations under the License. * limitations under the License.
*/ */
public class OrdinalMappingReaderTest extends FacetTestCase { public class TestOrdinalMappingAtomicReader extends FacetTestCase {
private static final int NUM_DOCS = 100; private static final int NUM_DOCS = 100;
private FacetsConfig facetConfig = new FacetsConfig(); private final FacetsConfig facetConfig = new FacetsConfig();
@Before @Before
@Override @Override
@ -59,61 +55,55 @@ public class OrdinalMappingReaderTest extends FacetTestCase {
@Test @Test
public void testTaxonomyMergeUtils() throws Exception { public void testTaxonomyMergeUtils() throws Exception {
Directory dir = newDirectory(); Directory srcIndexDir = newDirectory();
Directory taxDir = newDirectory(); Directory srcTaxoDir = newDirectory();
buildIndexWithFacets(dir, taxDir, true); buildIndexWithFacets(srcIndexDir, srcTaxoDir, true);
Directory dir1 = newDirectory(); Directory targetIndexDir = newDirectory();
Directory taxDir1 = newDirectory(); Directory targetTaxoDir = newDirectory();
buildIndexWithFacets(dir1, taxDir1, false); buildIndexWithFacets(targetIndexDir, targetTaxoDir, false);
IndexWriter destIndexWriter = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, null)); IndexWriter destIndexWriter = new IndexWriter(targetIndexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
DirectoryTaxonomyWriter destTaxWriter = new DirectoryTaxonomyWriter(taxDir1); DirectoryTaxonomyWriter destTaxoWriter = new DirectoryTaxonomyWriter(targetTaxoDir);
try { try {
TaxonomyMergeUtils.merge(dir, taxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter); TaxonomyMergeUtils.merge(srcIndexDir, srcTaxoDir, new MemoryOrdinalMap(), destIndexWriter, destTaxoWriter);
} finally { } finally {
IOUtils.close(destIndexWriter, destTaxWriter); IOUtils.close(destIndexWriter, destTaxoWriter);
} }
verifyResults(targetIndexDir, targetTaxoDir);
verifyResults(dir1, taxDir1); IOUtils.close(targetIndexDir, targetTaxoDir, srcIndexDir, srcTaxoDir);
dir1.close();
taxDir1.close();
dir.close();
taxDir.close();
} }
private void verifyResults(Directory dir, Directory taxDir) throws IOException { private void verifyResults(Directory indexDir, Directory taxoDir) throws IOException {
DirectoryReader reader1 = DirectoryReader.open(dir); DirectoryReader indexReader = DirectoryReader.open(indexDir);
DirectoryTaxonomyReader taxReader = new DirectoryTaxonomyReader(taxDir); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = newSearcher(reader1); IndexSearcher searcher = newSearcher(indexReader);
FacetsCollector collector = new FacetsCollector(); FacetsCollector collector = new FacetsCollector();
FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, collector); FacetsCollector.search(searcher, new MatchAllDocsQuery(), 10, collector);
Facets facets = new FastTaxonomyFacetCounts(taxReader, facetConfig, collector); Facets facets = new FastTaxonomyFacetCounts(taxoReader, facetConfig, collector);
FacetResult result = facets.getTopChildren(10, "tag"); FacetResult result = facets.getTopChildren(10, "tag");
for (LabelAndValue lv: result.labelValues) { for (LabelAndValue lv: result.labelValues) {
int weight = lv.value.intValue();
String label = lv.label;
if (VERBOSE) { if (VERBOSE) {
System.out.println(label + ": " + weight); System.out.println(lv);
} }
assertEquals(NUM_DOCS ,weight); assertEquals(NUM_DOCS, lv.value.intValue());
} }
reader1.close();
taxReader.close(); IOUtils.close(indexReader, taxoReader);
} }
private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc) throws IOException { private void buildIndexWithFacets(Directory indexDir, Directory taxoDir, boolean asc) throws IOException {
IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT, null);
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)); RandomIndexWriter writer = new RandomIndexWriter(random(), indexDir, config);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
DirectoryTaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxDir); DirectoryTaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxoDir);
for (int i = 1; i <= NUM_DOCS; i++) { for (int i = 1; i <= NUM_DOCS; i++) {
Document doc = new Document(); Document doc = new Document();
for (int j = i; j <= NUM_DOCS; j++) { for (int j = i; j <= NUM_DOCS; j++) {
int facetValue = asc? j: NUM_DOCS - j; int facetValue = asc ? j: NUM_DOCS - j;
doc.add(new FacetField("tag", Integer.toString(facetValue))); doc.add(new FacetField("tag", Integer.toString(facetValue)));
} }
writer.addDocument(facetConfig.build(taxonomyWriter, doc)); writer.addDocument(facetConfig.build(taxonomyWriter, doc));