mirror of https://github.com/apache/lucene.git
LUCENE-5801: OrdinalMappingAtomicReader should not wrap non-facet BinaryDocValues fields
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1612079 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
dcb6f15e7f
commit
1f37d6b448
|
@ -18,8 +18,11 @@ package org.apache.lucene.facet.taxonomy;
|
|||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.facet.FacetsConfig;
|
||||
import org.apache.lucene.facet.FacetsConfig.DimConfig;
|
||||
import org.apache.lucene.facet.taxonomy.OrdinalsReader.OrdinalsSegmentReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.OrdinalMap;
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
|
@ -107,15 +110,21 @@ public class OrdinalMappingAtomicReader extends FilterAtomicReader {
|
|||
|
||||
private final int[] ordinalMap;
|
||||
private final InnerFacetsConfig facetsConfig;
|
||||
private final Set<String> facetFields;
|
||||
|
||||
/**
|
||||
* Wraps an AtomicReader, mapping ordinals according to the ordinalMap,
|
||||
* using the provided indexingParams.
|
||||
* Wraps an AtomicReader, mapping ordinals according to the ordinalMap, using
|
||||
* the provided {@link FacetsConfig} which was used to build the wrapped
|
||||
* reader.
|
||||
*/
|
||||
public OrdinalMappingAtomicReader(AtomicReader in, int[] ordinalMap) {
|
||||
public OrdinalMappingAtomicReader(AtomicReader in, int[] ordinalMap, FacetsConfig srcConfig) {
|
||||
super(in);
|
||||
this.ordinalMap = ordinalMap;
|
||||
facetsConfig = new InnerFacetsConfig();
|
||||
facetFields = new HashSet<>();
|
||||
for (DimConfig dc : srcConfig.getDimConfigs().values()) {
|
||||
facetFields.add(dc.indexFieldName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -136,8 +145,12 @@ public class OrdinalMappingAtomicReader extends FilterAtomicReader {
|
|||
|
||||
@Override
|
||||
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
|
||||
if (facetFields.contains(field)) {
|
||||
final OrdinalsReader ordsReader = getOrdinalsReader(field);
|
||||
return new OrdinalMappingBinaryDocValues(ordsReader.getReader(in.getContext()));
|
||||
} else {
|
||||
return in.getBinaryDocValues(field);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package org.apache.lucene.facet.taxonomy;
|
|||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.lucene.facet.FacetsConfig;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.OrdinalMap;
|
||||
import org.apache.lucene.index.AtomicReader;
|
||||
|
@ -42,7 +43,7 @@ public abstract class TaxonomyMergeUtils {
|
|||
* the given writers.
|
||||
*/
|
||||
public static void merge(Directory srcIndexDir, Directory srcTaxoDir, OrdinalMap map, IndexWriter destIndexWriter,
|
||||
DirectoryTaxonomyWriter destTaxoWriter) throws IOException {
|
||||
DirectoryTaxonomyWriter destTaxoWriter, FacetsConfig srcConfig) throws IOException {
|
||||
|
||||
// merge the taxonomies
|
||||
destTaxoWriter.addTaxonomy(srcTaxoDir, map);
|
||||
|
@ -53,7 +54,7 @@ public abstract class TaxonomyMergeUtils {
|
|||
int numReaders = leaves.size();
|
||||
AtomicReader wrappedLeaves[] = new AtomicReader[numReaders];
|
||||
for (int i = 0; i < numReaders; i++) {
|
||||
wrappedLeaves[i] = new OrdinalMappingAtomicReader(leaves.get(i).reader(), ordinalMap);
|
||||
wrappedLeaves[i] = new OrdinalMappingAtomicReader(leaves.get(i).reader(), ordinalMap, srcConfig);
|
||||
}
|
||||
destIndexWriter.addIndexes(new MultiReader(wrappedLeaves));
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.apache.lucene.facet.taxonomy;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.facet.FacetField;
|
||||
import org.apache.lucene.facet.FacetResult;
|
||||
|
@ -13,13 +14,16 @@ import org.apache.lucene.facet.LabelAndValue;
|
|||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter.MemoryOrdinalMap;
|
||||
import org.apache.lucene.index.BinaryDocValues;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.MultiDocValues;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -66,7 +70,7 @@ public class TestOrdinalMappingAtomicReader extends FacetTestCase {
|
|||
IndexWriter destIndexWriter = new IndexWriter(targetIndexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
|
||||
DirectoryTaxonomyWriter destTaxoWriter = new DirectoryTaxonomyWriter(targetTaxoDir);
|
||||
try {
|
||||
TaxonomyMergeUtils.merge(srcIndexDir, srcTaxoDir, new MemoryOrdinalMap(), destIndexWriter, destTaxoWriter);
|
||||
TaxonomyMergeUtils.merge(srcIndexDir, srcTaxoDir, new MemoryOrdinalMap(), destIndexWriter, destTaxoWriter, facetConfig);
|
||||
} finally {
|
||||
IOUtils.close(destIndexWriter, destTaxoWriter);
|
||||
}
|
||||
|
@ -92,6 +96,11 @@ public class TestOrdinalMappingAtomicReader extends FacetTestCase {
|
|||
assertEquals(NUM_DOCS, lv.value.intValue());
|
||||
}
|
||||
|
||||
BinaryDocValues bdv = MultiDocValues.getBinaryValues(indexReader, "bdv");
|
||||
BinaryDocValues cbdv = MultiDocValues.getBinaryValues(indexReader, "cbdv");
|
||||
for (int i = 0; i < indexReader.maxDoc(); i++) {
|
||||
assertEquals(Integer.parseInt(cbdv.get(i).utf8ToString()), Integer.parseInt(bdv.get(i).utf8ToString())*2);
|
||||
}
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
}
|
||||
|
||||
|
@ -106,6 +115,9 @@ public class TestOrdinalMappingAtomicReader extends FacetTestCase {
|
|||
int facetValue = asc ? j: NUM_DOCS - j;
|
||||
doc.add(new FacetField("tag", Integer.toString(facetValue)));
|
||||
}
|
||||
// make sure OrdinalMappingAtomicReader ignores non-facet fields
|
||||
doc.add(new BinaryDocValuesField("bdv", new BytesRef(Integer.toString(i))));
|
||||
doc.add(new BinaryDocValuesField("cbdv", new BytesRef(Integer.toString(i*2))));
|
||||
writer.addDocument(facetConfig.build(taxonomyWriter, doc));
|
||||
}
|
||||
taxonomyWriter.commit();
|
||||
|
|
Loading…
Reference in New Issue