mirror of https://github.com/apache/lucene.git
LUCENE-4849: make ParallelTaxonomyArrays abstract
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1457807 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7f2ea1c098
commit
b9c8815f3d
|
@ -45,6 +45,10 @@ Changes in backwards compatibility policy
|
|||
TieredMergePolicy. IndexWriterConfig setters now throw an exception when
|
||||
passed null if null is not a valid value.
|
||||
|
||||
* LUCENE-4849: Made ParallelTaxonomyArrays abstract with a concrete
|
||||
implementation for DirectoryTaxonomyWriter/Reader. Also moved it under
|
||||
o.a.l.facet.taxonomy. (Shai Erera)
|
||||
|
||||
New Features
|
||||
|
||||
* LUCENE-4815: DrillSideways now allows more than one FacetRequest per
|
||||
|
|
|
@ -7,8 +7,8 @@ import java.util.Collections;
|
|||
import java.util.Comparator;
|
||||
|
||||
import org.apache.lucene.facet.search.FacetRequest.SortOrder;
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
|
||||
/*
|
||||
|
|
|
@ -150,7 +150,7 @@ public class DrillSideways {
|
|||
searcher.search(dsq, hitCollector);
|
||||
|
||||
int numDims = drillDownDims.size();
|
||||
List<FacetResult>[] drillSidewaysResults = (List<FacetResult>[]) new List[numDims];
|
||||
List<FacetResult>[] drillSidewaysResults = new List[numDims];
|
||||
List<FacetResult> drillDownResults = null;
|
||||
|
||||
List<FacetResult> mergedResults = new ArrayList<FacetResult>();
|
||||
|
|
|
@ -15,8 +15,8 @@ import org.apache.lucene.facet.search.FacetRequest.FacetArraysSource;
|
|||
import org.apache.lucene.facet.search.FacetRequest.ResultMode;
|
||||
import org.apache.lucene.facet.search.FacetRequest.SortOrder;
|
||||
import org.apache.lucene.facet.search.FacetsCollector.MatchingDocs;
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
|
||||
/*
|
||||
|
|
|
@ -5,8 +5,8 @@ import java.util.ArrayList;
|
|||
|
||||
import org.apache.lucene.facet.partitions.IntermediateFacetResult;
|
||||
import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.util.ResultSortUtils;
|
||||
|
||||
/*
|
||||
|
|
|
@ -9,8 +9,8 @@ import org.apache.lucene.facet.collections.IntToObjectMap;
|
|||
import org.apache.lucene.facet.partitions.IntermediateFacetResult;
|
||||
import org.apache.lucene.facet.partitions.PartitionsFacetResultsHandler;
|
||||
import org.apache.lucene.facet.search.FacetRequest.SortOrder;
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
|
||||
/*
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
package org.apache.lucene.facet.taxonomy;
|
||||
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Returns 3 arrays for traversing the taxonomy:
|
||||
* <ul>
|
||||
* <li>{@code parents}: {@code parents[i]} denotes the parent of category
|
||||
* ordinal {@code i}.</li>
|
||||
* <li>{@code children}: {@code children[i]} denotes a child of category ordinal
|
||||
* {@code i}.</li>
|
||||
* <li>{@code siblings}: {@code siblings[i]} denotes the sibling of category
|
||||
* ordinal {@code i}.</li>
|
||||
* </ul>
|
||||
*
|
||||
* To traverse the taxonomy tree, you typically start with {@code children[0]}
|
||||
* (ordinal 0 is reserved for ROOT), and then depends if you want to do DFS or
|
||||
* BFS, you call {@code children[children[0]]} or {@code siblings[children[0]]}
|
||||
* and so forth, respectively.
|
||||
*
|
||||
* <p>
|
||||
* <b>NOTE:</b> you are not expected to modify the values of the arrays, since
|
||||
* the arrays are shared with other threads.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public abstract class ParallelTaxonomyArrays {
|
||||
|
||||
/**
|
||||
* Returns the parents array, where {@code parents[i]} denotes the parent of
|
||||
* category ordinal {@code i}.
|
||||
*/
|
||||
public abstract int[] parents();
|
||||
|
||||
/**
|
||||
* Returns the children array, where {@code children[i]} denotes a child of
|
||||
* category ordinal {@code i}.
|
||||
*/
|
||||
public abstract int[] children();
|
||||
|
||||
/**
|
||||
* Returns the siblings array, where {@code siblings[i]} denotes the sibling
|
||||
* of category ordinal {@code i}.
|
||||
*/
|
||||
public abstract int[] siblings();
|
||||
|
||||
}
|
|
@ -5,7 +5,6 @@ import java.io.IOException;
|
|||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
|
||||
/*
|
||||
|
|
|
@ -7,6 +7,7 @@ import java.util.logging.Logger;
|
|||
|
||||
import org.apache.lucene.facet.collections.LRUHashMap;
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
|
@ -62,7 +63,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
|
|||
private LRUHashMap<CategoryPath, Integer> ordinalCache;
|
||||
private LRUHashMap<Integer, CategoryPath> categoryCache;
|
||||
|
||||
private volatile ParallelTaxonomyArrays taxoArrays;
|
||||
private volatile TaxonomyIndexArrays taxoArrays;
|
||||
|
||||
private char delimiter = Consts.DEFAULT_DELIMITER;
|
||||
|
||||
|
@ -73,7 +74,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
|
|||
*/
|
||||
DirectoryTaxonomyReader(DirectoryReader indexReader, DirectoryTaxonomyWriter taxoWriter,
|
||||
LRUHashMap<CategoryPath,Integer> ordinalCache, LRUHashMap<Integer,CategoryPath> categoryCache,
|
||||
ParallelTaxonomyArrays taxoArrays) throws IOException {
|
||||
TaxonomyIndexArrays taxoArrays) throws IOException {
|
||||
this.indexReader = indexReader;
|
||||
this.taxoWriter = taxoWriter;
|
||||
this.taxoEpoch = taxoWriter == null ? -1 : taxoWriter.getTaxonomyEpoch();
|
||||
|
@ -82,7 +83,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
|
|||
this.ordinalCache = ordinalCache == null ? new LRUHashMap<CategoryPath,Integer>(DEFAULT_CACHE_VALUE) : ordinalCache;
|
||||
this.categoryCache = categoryCache == null ? new LRUHashMap<Integer,CategoryPath>(DEFAULT_CACHE_VALUE) : categoryCache;
|
||||
|
||||
this.taxoArrays = taxoArrays != null ? new ParallelTaxonomyArrays(indexReader, taxoArrays) : null;
|
||||
this.taxoArrays = taxoArrays != null ? new TaxonomyIndexArrays(indexReader, taxoArrays) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -130,7 +131,7 @@ public class DirectoryTaxonomyReader extends TaxonomyReader {
|
|||
// according to Java Concurrency in Practice, this might perform better on
|
||||
// some JVMs, because the array initialization doesn't happen on the
|
||||
// volatile member.
|
||||
ParallelTaxonomyArrays tmpArrays = new ParallelTaxonomyArrays(indexReader);
|
||||
TaxonomyIndexArrays tmpArrays = new TaxonomyIndexArrays(indexReader);
|
||||
taxoArrays = tmpArrays;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
|
|||
*/
|
||||
private volatile boolean cacheIsComplete;
|
||||
private volatile boolean isClosed = false;
|
||||
private volatile ParallelTaxonomyArrays taxoArrays;
|
||||
private volatile TaxonomyIndexArrays taxoArrays;
|
||||
private volatile int nextID;
|
||||
|
||||
/** Reads the commit data from a Directory. */
|
||||
|
@ -744,7 +744,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
|
|||
}
|
||||
}
|
||||
|
||||
private ParallelTaxonomyArrays getTaxoArrays() throws IOException {
|
||||
private TaxonomyIndexArrays getTaxoArrays() throws IOException {
|
||||
if (taxoArrays == null) {
|
||||
synchronized (this) {
|
||||
if (taxoArrays == null) {
|
||||
|
@ -754,7 +754,7 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
|
|||
// according to Java Concurrency, this might perform better on some
|
||||
// JVMs, since the object initialization doesn't happen on the
|
||||
// volatile member.
|
||||
ParallelTaxonomyArrays tmpArrays = new ParallelTaxonomyArrays(reader);
|
||||
TaxonomyIndexArrays tmpArrays = new TaxonomyIndexArrays(reader);
|
||||
taxoArrays = tmpArrays;
|
||||
} finally {
|
||||
readerManager.release(reader);
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.apache.lucene.facet.taxonomy.directory;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.apache.lucene.index.DocsAndPositionsEnum;
|
||||
|
@ -28,30 +29,12 @@ import org.apache.lucene.util.ArrayUtil;
|
|||
*/
|
||||
|
||||
/**
|
||||
* Returns 3 arrays for traversing the taxonomy:
|
||||
* <ul>
|
||||
* <li>{@code parents}: {@code parents[i]} denotes the parent of category
|
||||
* ordinal {@code i}.</li>
|
||||
* <li>{@code children}: {@code children[i]} denotes the youngest child of
|
||||
* category ordinal {@code i}. The youngest child is defined as the category
|
||||
* that was added last to the taxonomy as an immediate child of {@code i}.</li>
|
||||
* <li>{@code siblings}: {@code siblings[i]} denotes the sibling of category
|
||||
* ordinal {@code i}. The sibling is defined as the previous youngest child of
|
||||
* {@code parents[i]}.</li>
|
||||
* </ul>
|
||||
*
|
||||
* To traverse the taxonomy tree, you typically start with {@code children[0]}
|
||||
* (ordinal 0 is reserved for ROOT), and then depends if you want to do DFS or
|
||||
* BFS, you call {@code children[children[0]]} or {@code siblings[children[0]]}
|
||||
* and so forth, respectively.
|
||||
*
|
||||
* <p>
|
||||
* <b>NOTE:</b> you are not expected to modify the values of the arrays, since
|
||||
* the arrays are shared with other threads.
|
||||
* A {@link ParallelTaxonomyArrays} that are initialized from the taxonomy
|
||||
* index.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class ParallelTaxonomyArrays {
|
||||
class TaxonomyIndexArrays extends ParallelTaxonomyArrays {
|
||||
|
||||
private final int[] parents;
|
||||
|
||||
|
@ -63,11 +46,11 @@ public class ParallelTaxonomyArrays {
|
|||
private int[] children, siblings;
|
||||
|
||||
/** Used by {@link #add(int, int)} after the array grew. */
|
||||
private ParallelTaxonomyArrays(int[] parents) {
|
||||
private TaxonomyIndexArrays(int[] parents) {
|
||||
this.parents = parents;
|
||||
}
|
||||
|
||||
public ParallelTaxonomyArrays(IndexReader reader) throws IOException {
|
||||
public TaxonomyIndexArrays(IndexReader reader) throws IOException {
|
||||
parents = new int[reader.maxDoc()];
|
||||
if (parents.length > 0) {
|
||||
initParents(reader, 0);
|
||||
|
@ -82,7 +65,7 @@ public class ParallelTaxonomyArrays {
|
|||
}
|
||||
}
|
||||
|
||||
public ParallelTaxonomyArrays(IndexReader reader, ParallelTaxonomyArrays copyFrom) throws IOException {
|
||||
public TaxonomyIndexArrays(IndexReader reader, TaxonomyIndexArrays copyFrom) throws IOException {
|
||||
assert copyFrom != null;
|
||||
|
||||
// note that copyParents.length may be equal to reader.maxDoc(). this is not a bug
|
||||
|
@ -99,7 +82,7 @@ public class ParallelTaxonomyArrays {
|
|||
}
|
||||
}
|
||||
|
||||
private final synchronized void initChildrenSiblings(ParallelTaxonomyArrays copyFrom) {
|
||||
private final synchronized void initChildrenSiblings(TaxonomyIndexArrays copyFrom) {
|
||||
if (!initializedChildren) { // must do this check !
|
||||
children = new int[parents.length];
|
||||
siblings = new int[parents.length];
|
||||
|
@ -180,11 +163,11 @@ public class ParallelTaxonomyArrays {
|
|||
* <p>
|
||||
* <b>NOTE:</b> you should call this method from a thread-safe code.
|
||||
*/
|
||||
ParallelTaxonomyArrays add(int ordinal, int parentOrdinal) {
|
||||
TaxonomyIndexArrays add(int ordinal, int parentOrdinal) {
|
||||
if (ordinal >= parents.length) {
|
||||
int[] newarray = ArrayUtil.grow(parents, ordinal + 1);
|
||||
newarray[ordinal] = parentOrdinal;
|
||||
return new ParallelTaxonomyArrays(newarray);
|
||||
return new TaxonomyIndexArrays(newarray);
|
||||
}
|
||||
parents[ordinal] = parentOrdinal;
|
||||
return this;
|
||||
|
@ -194,6 +177,7 @@ public class ParallelTaxonomyArrays {
|
|||
* Returns the parents array, where {@code parents[i]} denotes the parent of
|
||||
* category ordinal {@code i}.
|
||||
*/
|
||||
@Override
|
||||
public int[] parents() {
|
||||
return parents;
|
||||
}
|
||||
|
@ -204,6 +188,7 @@ public class ParallelTaxonomyArrays {
|
|||
* category that was added last to the taxonomy as an immediate child of
|
||||
* {@code i}.
|
||||
*/
|
||||
@Override
|
||||
public int[] children() {
|
||||
if (!initializedChildren) {
|
||||
initChildrenSiblings(null);
|
||||
|
@ -218,6 +203,7 @@ public class ParallelTaxonomyArrays {
|
|||
* of category ordinal {@code i}. The sibling is defined as the previous
|
||||
* youngest child of {@code parents[i]}.
|
||||
*/
|
||||
@Override
|
||||
public int[] siblings() {
|
||||
if (!initializedChildren) {
|
||||
initChildrenSiblings(null);
|
|
@ -22,9 +22,9 @@ import java.io.IOException;
|
|||
import java.io.PrintStream;
|
||||
|
||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||
import org.apache.lucene.facet.taxonomy.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
|
||||
|
|
|
@ -11,7 +11,6 @@ import org.apache.lucene.facet.FacetTestCase;
|
|||
import org.apache.lucene.facet.SlowRAMDirectory;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyReader;
|
||||
import org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter;
|
||||
import org.apache.lucene.facet.taxonomy.directory.ParallelTaxonomyArrays;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
|
|
Loading…
Reference in New Issue