LUCENE-4165: Removed closing of Reader from HunspellDictionary so InputStreams are not closed

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1362371 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christopher John Male 2012-07-17 05:46:32 +00:00
parent 27c545432c
commit 75504b9d7a
4 changed files with 122 additions and 26 deletions

View File

@ -1213,6 +1213,10 @@ Build
tasks) to correctly encode build file names as URIs for later processing by
XSL. (Greg Bowyer, Uwe Schindler)
* LUCENE-4165: Removed closing of the Reader used to read the affix file in
HunspellDictionary. Consumers are now responsible for closing all InputStreams
once the Dictionary has been instantiated. (Torsten Krah, Uwe Schindler, Chris Male)
======================= Lucene 3.6.0 =======================
Changes in backwards compatibility policy

View File

@ -66,10 +66,11 @@ public class HunspellDictionary {
/**
* Creates a new HunspellDictionary containing the information read from the provided InputStreams to hunspell affix
* and dictionary files
* and dictionary files.
* You have to close the provided InputStreams yourself.
*
* @param affix InputStream for reading the hunspell affix file
* @param dictionary InputStream for reading the hunspell dictionary file
* @param affix InputStream for reading the hunspell affix file (won't be closed).
* @param dictionary InputStream for reading the hunspell dictionary file (won't be closed).
* @param version Lucene Version
* @throws IOException Can be thrown while reading from the InputStreams
* @throws ParseException Can be thrown if the content of the files does not meet expected formats
@ -80,10 +81,11 @@ public class HunspellDictionary {
/**
* Creates a new HunspellDictionary containing the information read from the provided InputStreams to hunspell affix
* and dictionary files
* and dictionary files.
* You have to close the provided InputStreams yourself.
*
* @param affix InputStream for reading the hunspell affix file
* @param dictionary InputStream for reading the hunspell dictionary file
* @param affix InputStream for reading the hunspell affix file (won't be closed).
* @param dictionary InputStream for reading the hunspell dictionary file (won't be closed).
* @param version Lucene Version
* @param ignoreCase If true, dictionary matching will be case insensitive
* @throws IOException Can be thrown while reading from the InputStreams
@ -95,10 +97,11 @@ public class HunspellDictionary {
/**
* Creates a new HunspellDictionary containing the information read from the provided InputStreams to hunspell affix
* and dictionary files
* and dictionary files.
* You have to close the provided InputStreams yourself.
*
* @param affix InputStream for reading the hunspell affix file
* @param dictionaries InputStreams for reading the hunspell dictionary file
* @param affix InputStream for reading the hunspell affix file (won't be closed).
* @param dictionaries InputStreams for reading the hunspell dictionary file (won't be closed).
* @param version Lucene Version
* @param ignoreCase If true, dictionary matching will be case insensitive
* @throws IOException Can be thrown while reading from the InputStreams
@ -110,10 +113,11 @@ public class HunspellDictionary {
/**
* Creates a new HunspellDictionary containing the information read from the provided InputStreams to hunspell affix
* and dictionary files
* and dictionary files.
* You have to close the provided InputStreams yourself.
*
* @param affix InputStream for reading the hunspell affix file
* @param dictionaries InputStreams for reading the hunspell dictionary file
* @param affix InputStream for reading the hunspell affix file (won't be closed).
* @param dictionaries InputStreams for reading the hunspell dictionary file (won't be closed).
* @param version Lucene Version
* @param ignoreCase If true, dictionary matching will be case insensitive
* @param strictAffixParsing Affix strict parsing enabled or not (an error while reading a rule causes exception or is ignored)
@ -194,7 +198,6 @@ public class HunspellDictionary {
flagParsingStrategy = getFlagParsingStrategy(line);
}
}
reader.close();
}
/**

View File

@ -17,19 +17,98 @@ package org.apache.lucene.analysis.hunspell;
* limitations under the License.
*/
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Version;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.text.ParseException;
import java.util.Arrays;
import static junit.framework.Assert.assertEquals;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.Assert;
import org.junit.Test;
public class HunspellDictionaryTest extends LuceneTestCase {
private class CloseCheckInputStream extends InputStream {
private InputStream delegate;
private boolean closed = false;
public CloseCheckInputStream(InputStream delegate) {
super();
this.delegate = delegate;
}
public int read() throws IOException {
return delegate.read();
}
public int hashCode() {
return delegate.hashCode();
}
public int read(byte[] b) throws IOException {
return delegate.read(b);
}
public boolean equals(Object obj) {
return delegate.equals(obj);
}
public int read(byte[] b, int off, int len) throws IOException {
return delegate.read(b, off, len);
}
public long skip(long n) throws IOException {
return delegate.skip(n);
}
public String toString() {
return delegate.toString();
}
public int available() throws IOException {
return delegate.available();
}
public void close() throws IOException {
this.closed = true;
delegate.close();
}
public void mark(int readlimit) {
delegate.mark(readlimit);
}
public void reset() throws IOException {
delegate.reset();
}
public boolean markSupported() {
return delegate.markSupported();
}
public boolean isClosed() {
return this.closed;
}
}
@Test
public void testResourceCleanup() throws IOException, ParseException {
CloseCheckInputStream affixStream = new CloseCheckInputStream(getClass().getResourceAsStream("testCompressed.aff"));
CloseCheckInputStream dictStream = new CloseCheckInputStream(getClass().getResourceAsStream("testCompressed.dic"));
new HunspellDictionary(affixStream, dictStream, TEST_VERSION_CURRENT);
assertFalse(affixStream.isClosed());
assertFalse(dictStream.isClosed());
affixStream.close();
dictStream.close();
assertTrue(affixStream.isClosed());
assertTrue(dictStream.isClosed());
}
@Test
public void testHunspellDictionary_loadDicAff() throws IOException, ParseException {
@ -40,7 +119,7 @@ public class HunspellDictionaryTest extends LuceneTestCase {
assertEquals(3, dictionary.lookupSuffix(new char[]{'e'}, 0, 1).size());
assertEquals(1, dictionary.lookupPrefix(new char[]{'s'}, 0, 1).size());
assertEquals(1, dictionary.lookupWord(new char[]{'o', 'l', 'r'}, 0, 3).size());
affixStream.close();
dictStream.close();
}
@ -54,7 +133,7 @@ public class HunspellDictionaryTest extends LuceneTestCase {
assertEquals(3, dictionary.lookupSuffix(new char[]{'e'}, 0, 1).size());
assertEquals(1, dictionary.lookupPrefix(new char[]{'s'}, 0, 1).size());
assertEquals(1, dictionary.lookupWord(new char[]{'o', 'l', 'r'}, 0, 3).size());
affixStream.close();
dictStream.close();
}
@ -69,7 +148,9 @@ public class HunspellDictionaryTest extends LuceneTestCase {
assertEquals(1, dictionary.lookupPrefix(new char[]{'s'}, 0, 1).size());
assertEquals(1, dictionary.lookupWord(new char[]{'o', 'l', 'r'}, 0, 3).size());
//strict parsing disabled: malformed rule is not loaded
assertNull(dictionary.lookupPrefix(new char[]{'a'}, 0, 1));
assertNull(dictionary.lookupPrefix(new char[]{'a'}, 0, 1));
affixStream.close();
dictStream.close();
affixStream = getClass().getResourceAsStream("testWrongAffixRule.aff");
dictStream = getClass().getResourceAsStream("test.dic");
@ -81,7 +162,7 @@ public class HunspellDictionaryTest extends LuceneTestCase {
Assert.assertEquals("The affix file contains a rule with less than five elements", e.getMessage());
Assert.assertEquals(23, e.getErrorOffset());
}
affixStream.close();
dictStream.close();
}

View File

@ -28,6 +28,7 @@ import org.apache.lucene.analysis.util.InitializationException;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.lucene.analysis.util.ResourceLoaderAware;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.util.IOUtils;
/**
* TokenFilterFactory that creates instances of {@link org.apache.lucene.analysis.hunspell.HunspellStemFilter}.
@ -76,7 +77,6 @@ public class HunspellStemFilterFactory extends TokenFilterFactory implements Res
else throw new InitializationException("Unknown value for " + PARAM_IGNORE_CASE + ": " + pic + ". Must be true or false");
}
String strictAffixParsingParam = args.get(PARAM_STRICT_AFFIX_PARSING);
boolean strictAffixParsing = true;
if(strictAffixParsingParam != null) {
@ -85,14 +85,22 @@ public class HunspellStemFilterFactory extends TokenFilterFactory implements Res
else throw new InitializationException("Unknown value for " + PARAM_STRICT_AFFIX_PARSING + ": " + strictAffixParsingParam + ". Must be true or false");
}
InputStream affix = null;
List<InputStream> dictionaries = new ArrayList<InputStream>();
try {
List<InputStream> dictionaries = new ArrayList<InputStream>();
dictionaries = new ArrayList<InputStream>();
for (String file : dictionaryFiles) {
dictionaries.add(loader.openResource(file));
}
this.dictionary = new HunspellDictionary(loader.openResource(affixFile), dictionaries, luceneMatchVersion, ignoreCase, strictAffixParsing);
affix = loader.openResource(affixFile);
this.dictionary = new HunspellDictionary(affix, dictionaries, luceneMatchVersion, ignoreCase, strictAffixParsing);
} catch (Exception e) {
throw new InitializationException("Unable to load hunspell data! [dictionary=" + args.get("dictionary") + ",affix=" + affixFile + "]", e);
} finally {
IOUtils.closeWhileHandlingException(affix);
IOUtils.closeWhileHandlingException(dictionaries);
}
}