mirror of https://github.com/apache/lucene.git
LUCENE-3485: port 3x to trunk
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1179194 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
26abda038e
commit
e910534b7a
|
@ -1,6 +1,5 @@
|
||||||
package org.apache.lucene.facet.taxonomy.lucene;
|
package org.apache.lucene.facet.taxonomy.lucene;
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -10,17 +9,16 @@ import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
import org.apache.lucene.index.CorruptIndexException;
|
|
||||||
import org.apache.lucene.index.IndexReader;
|
|
||||||
import org.apache.lucene.index.MultiFields;
|
|
||||||
import org.apache.lucene.index.DocsEnum;
|
|
||||||
import org.apache.lucene.search.DocIdSetIterator;
|
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.apache.lucene.store.FSDirectory;
|
|
||||||
|
|
||||||
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
import org.apache.lucene.facet.taxonomy.TaxonomyReader;
|
||||||
import org.apache.lucene.facet.taxonomy.lucene.Consts.LoadFullPathOnly;
|
import org.apache.lucene.facet.taxonomy.lucene.Consts.LoadFullPathOnly;
|
||||||
|
import org.apache.lucene.index.CorruptIndexException;
|
||||||
|
import org.apache.lucene.index.DocsEnum;
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.index.MultiFields;
|
||||||
|
import org.apache.lucene.search.DocIdSetIterator;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.collections.LRUHashMap;
|
import org.apache.lucene.util.collections.LRUHashMap;
|
||||||
|
@ -102,6 +100,8 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
|
|
||||||
private char delimiter = Consts.DEFAULT_DELIMITER;
|
private char delimiter = Consts.DEFAULT_DELIMITER;
|
||||||
|
|
||||||
|
private volatile boolean closed = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Open for reading a taxonomy stored in a given {@link Directory}.
|
* Open for reading a taxonomy stored in a given {@link Directory}.
|
||||||
* @param directory
|
* @param directory
|
||||||
|
@ -129,40 +129,15 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
return IndexReader.open(directory);
|
return IndexReader.open(directory);
|
||||||
}
|
}
|
||||||
|
|
||||||
// convenience constructors... deprecated because they cause confusion
|
|
||||||
// because they use parent directory instead of the actual directory.
|
|
||||||
private Directory ourDirectory = null; // remember directory to close later, but only if we opened it here
|
|
||||||
/**
|
/**
|
||||||
* Open for reading a taxonomy stored in a subdirectory of a given
|
* @throws AlreadyClosedException if this IndexReader is closed
|
||||||
* directory on the file system.
|
*/
|
||||||
* @param parentDir The parent directory of the taxonomy's directory
|
protected final void ensureOpen() throws AlreadyClosedException {
|
||||||
* (usually this would be the directory holding the index).
|
if (indexReader.getRefCount() <= 0) {
|
||||||
* @param name The name of the taxonomy, and the subdirectory holding it.
|
throw new AlreadyClosedException("this TaxonomyReader is closed");
|
||||||
* @throws CorruptIndexException if the Taxonomy is corrupted.
|
}
|
||||||
* @throws IOException if another error occurred.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public LuceneTaxonomyReader(File parentDir, String name)
|
|
||||||
throws CorruptIndexException, IOException {
|
|
||||||
this(FSDirectory.open(new File(parentDir, name)));
|
|
||||||
ourDirectory = indexReader.directory(); // remember to close the directory we opened
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Open for reading a taxonomy stored in a subdirectory of a given
|
|
||||||
* directory on the file system.
|
|
||||||
* @param parentDir The parent directory of the taxonomy's directory.
|
|
||||||
* @param name The name of the taxonomy, and the subdirectory holding it.
|
|
||||||
* @throws CorruptIndexException if the Taxonomy is corrupted.
|
|
||||||
* @throws IOException if another error occurred.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public LuceneTaxonomyReader(String parentDir, String name)
|
|
||||||
throws CorruptIndexException, IOException {
|
|
||||||
this(FSDirectory.open(new File(parentDir, name)));
|
|
||||||
ourDirectory = indexReader.directory(); // rememebr to close the directory we opened
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* setCacheSize controls the maximum allowed size of each of the caches
|
* setCacheSize controls the maximum allowed size of each of the caches
|
||||||
* used by {@link #getPath(int)} and {@link #getOrdinal(CategoryPath)}.
|
* used by {@link #getPath(int)} and {@link #getOrdinal(CategoryPath)}.
|
||||||
|
@ -173,6 +148,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
* @param size the new maximum cache size, in number of entries.
|
* @param size the new maximum cache size, in number of entries.
|
||||||
*/
|
*/
|
||||||
public void setCacheSize(int size) {
|
public void setCacheSize(int size) {
|
||||||
|
ensureOpen();
|
||||||
synchronized(getCategoryCache) {
|
synchronized(getCategoryCache) {
|
||||||
getCategoryCache.setMaxSize(size);
|
getCategoryCache.setMaxSize(size);
|
||||||
}
|
}
|
||||||
|
@ -192,10 +168,12 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
* LuceneTaxonomyReader objects you create.
|
* LuceneTaxonomyReader objects you create.
|
||||||
*/
|
*/
|
||||||
public void setDelimiter(char delimiter) {
|
public void setDelimiter(char delimiter) {
|
||||||
|
ensureOpen();
|
||||||
this.delimiter = delimiter;
|
this.delimiter = delimiter;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getOrdinal(CategoryPath categoryPath) throws IOException {
|
public int getOrdinal(CategoryPath categoryPath) throws IOException {
|
||||||
|
ensureOpen();
|
||||||
if (categoryPath.length()==0) {
|
if (categoryPath.length()==0) {
|
||||||
return ROOT_ORDINAL;
|
return ROOT_ORDINAL;
|
||||||
}
|
}
|
||||||
|
@ -239,6 +217,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public CategoryPath getPath(int ordinal) throws CorruptIndexException, IOException {
|
public CategoryPath getPath(int ordinal) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
// TODO (Facet): Currently, the LRU cache we use (getCategoryCache) holds
|
// TODO (Facet): Currently, the LRU cache we use (getCategoryCache) holds
|
||||||
// strings with delimiters, not CategoryPath objects, so even if
|
// strings with delimiters, not CategoryPath objects, so even if
|
||||||
// we have a cache hit, we need to process the string and build a new
|
// we have a cache hit, we need to process the string and build a new
|
||||||
|
@ -255,6 +234,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean getPath(int ordinal, CategoryPath result) throws CorruptIndexException, IOException {
|
public boolean getPath(int ordinal, CategoryPath result) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
String label = getLabel(ordinal);
|
String label = getLabel(ordinal);
|
||||||
if (label==null) {
|
if (label==null) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -265,6 +245,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getLabel(int catID) throws CorruptIndexException, IOException {
|
private String getLabel(int catID) throws CorruptIndexException, IOException {
|
||||||
|
ensureOpen();
|
||||||
// First try to find the answer in the LRU cache. It is very
|
// First try to find the answer in the LRU cache. It is very
|
||||||
// unfortunate that we need to allocate an Integer object here -
|
// unfortunate that we need to allocate an Integer object here -
|
||||||
// it would have been better if we used a hash table specifically
|
// it would have been better if we used a hash table specifically
|
||||||
|
@ -314,6 +295,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getParent(int ordinal) {
|
public int getParent(int ordinal) {
|
||||||
|
ensureOpen();
|
||||||
// Note how we don't need to hold the read lock to do the following,
|
// Note how we don't need to hold the read lock to do the following,
|
||||||
// because the array reference is volatile, ensuring the correct
|
// because the array reference is volatile, ensuring the correct
|
||||||
// visibility and ordering: if we get the new reference, the new
|
// visibility and ordering: if we get the new reference, the new
|
||||||
|
@ -344,6 +326,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
*/
|
*/
|
||||||
|
|
||||||
public int[] getParentArray() {
|
public int[] getParentArray() {
|
||||||
|
ensureOpen();
|
||||||
// Note how we don't need to hold the read lock to do the following,
|
// Note how we don't need to hold the read lock to do the following,
|
||||||
// because the array reference is volatile, ensuring the correct
|
// because the array reference is volatile, ensuring the correct
|
||||||
// visibility and ordering: if we get the new reference, the new
|
// visibility and ordering: if we get the new reference, the new
|
||||||
|
@ -355,6 +338,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
// method in this class) to ensure that it never gets called concurrently
|
// method in this class) to ensure that it never gets called concurrently
|
||||||
// with itself.
|
// with itself.
|
||||||
public synchronized void refresh() throws IOException {
|
public synchronized void refresh() throws IOException {
|
||||||
|
ensureOpen();
|
||||||
/*
|
/*
|
||||||
* Since refresh() can be a lengthy operation, it is very important that we
|
* Since refresh() can be a lengthy operation, it is very important that we
|
||||||
* avoid locking out all readers for its duration. This is why we don't hold
|
* avoid locking out all readers for its duration. This is why we don't hold
|
||||||
|
@ -416,13 +400,25 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
indexReader.close();
|
if (!closed) {
|
||||||
if (ourDirectory!=null) {
|
decRef();
|
||||||
ourDirectory.close();
|
closed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Do the actual closing, free up resources */
|
||||||
|
private void doClose() throws IOException {
|
||||||
|
indexReader.close();
|
||||||
|
closed = true;
|
||||||
|
|
||||||
|
parentArray = null;
|
||||||
|
childrenArrays = null;
|
||||||
|
getCategoryCache.clear();
|
||||||
|
getOrdinalCache.clear();
|
||||||
|
}
|
||||||
|
|
||||||
public int getSize() {
|
public int getSize() {
|
||||||
|
ensureOpen();
|
||||||
indexReaderLock.readLock().lock();
|
indexReaderLock.readLock().lock();
|
||||||
try {
|
try {
|
||||||
return indexReader.numDocs();
|
return indexReader.numDocs();
|
||||||
|
@ -432,6 +428,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, String> getCommitUserData() {
|
public Map<String, String> getCommitUserData() {
|
||||||
|
ensureOpen();
|
||||||
return indexReader.getCommitUserData();
|
return indexReader.getCommitUserData();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -439,6 +436,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
Object childrenArraysRebuild = new Object();
|
Object childrenArraysRebuild = new Object();
|
||||||
|
|
||||||
public ChildrenArrays getChildrenArrays() {
|
public ChildrenArrays getChildrenArrays() {
|
||||||
|
ensureOpen();
|
||||||
// Check if the taxonomy grew since we built the array, and if it
|
// Check if the taxonomy grew since we built the array, and if it
|
||||||
// did, create new (and larger) arrays and fill them as required.
|
// did, create new (and larger) arrays and fill them as required.
|
||||||
// We do all this under a lock, two prevent to concurrent calls to
|
// We do all this under a lock, two prevent to concurrent calls to
|
||||||
|
@ -492,6 +490,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toString(int max) {
|
public String toString(int max) {
|
||||||
|
ensureOpen();
|
||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
int upperl = Math.min(max, this.indexReader.maxDoc());
|
int upperl = Math.min(max, this.indexReader.maxDoc());
|
||||||
for (int i = 0; i < upperl; i++) {
|
for (int i = 0; i < upperl; i++) {
|
||||||
|
@ -537,6 +536,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
* @return lucene indexReader
|
* @return lucene indexReader
|
||||||
*/
|
*/
|
||||||
IndexReader getInternalIndexReader() {
|
IndexReader getInternalIndexReader() {
|
||||||
|
ensureOpen();
|
||||||
return this.indexReader;
|
return this.indexReader;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -547,13 +547,20 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void decRef() throws IOException {
|
public void decRef() throws IOException {
|
||||||
this.indexReader.decRef();
|
ensureOpen();
|
||||||
|
if (indexReader.getRefCount() == 1) {
|
||||||
|
// Do not decRef the indexReader - doClose does it by calling reader.close()
|
||||||
|
doClose();
|
||||||
|
} else {
|
||||||
|
indexReader.decRef();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expert: returns the current refCount for this taxonomy reader
|
* Expert: returns the current refCount for this taxonomy reader
|
||||||
*/
|
*/
|
||||||
public int getRefCount() {
|
public int getRefCount() {
|
||||||
|
ensureOpen();
|
||||||
return this.indexReader.getRefCount();
|
return this.indexReader.getRefCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -565,6 +572,7 @@ public class LuceneTaxonomyReader implements TaxonomyReader {
|
||||||
* otherwise the reader may never be closed.
|
* otherwise the reader may never be closed.
|
||||||
*/
|
*/
|
||||||
public void incRef() {
|
public void incRef() {
|
||||||
|
ensureOpen();
|
||||||
this.indexReader.incRef();
|
this.indexReader.incRef();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
package org.apache.lucene.facet.taxonomy.lucene;
|
||||||
|
|
||||||
|
import org.apache.lucene.facet.taxonomy.CategoryPath;
|
||||||
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.util.LuceneTestCase;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
* contributor license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright ownership.
|
||||||
|
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||||
|
* (the "License"); you may not use this file except in compliance with
|
||||||
|
* the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class TestLuceneTaxonomyReader extends LuceneTestCase {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCloseAfterIncRef() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir);
|
||||||
|
ltw.addCategory(new CategoryPath("a"));
|
||||||
|
ltw.close();
|
||||||
|
|
||||||
|
LuceneTaxonomyReader ltr = new LuceneTaxonomyReader(dir);
|
||||||
|
ltr.incRef();
|
||||||
|
ltr.close();
|
||||||
|
|
||||||
|
// should not fail as we incRef() before close
|
||||||
|
ltr.getSize();
|
||||||
|
ltr.decRef();
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCloseTwice() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir);
|
||||||
|
ltw.addCategory(new CategoryPath("a"));
|
||||||
|
ltw.close();
|
||||||
|
|
||||||
|
LuceneTaxonomyReader ltr = new LuceneTaxonomyReader(dir);
|
||||||
|
ltr.close();
|
||||||
|
ltr.close(); // no exception should be thrown
|
||||||
|
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAlreadyClosed() throws Exception {
|
||||||
|
Directory dir = newDirectory();
|
||||||
|
LuceneTaxonomyWriter ltw = new LuceneTaxonomyWriter(dir);
|
||||||
|
ltw.addCategory(new CategoryPath("a"));
|
||||||
|
ltw.close();
|
||||||
|
|
||||||
|
LuceneTaxonomyReader ltr = new LuceneTaxonomyReader(dir);
|
||||||
|
ltr.close();
|
||||||
|
try {
|
||||||
|
ltr.getSize();
|
||||||
|
fail("An AlreadyClosedException should have been thrown here");
|
||||||
|
} catch (AlreadyClosedException ace) {
|
||||||
|
// good!
|
||||||
|
}
|
||||||
|
dir.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
Loading…
Reference in New Issue