LUCENE-6662: Fixd potential resource leaks.

This commit is contained in:
Adrien Grand 2016-06-09 15:48:10 +02:00
parent 274dcdb5f6
commit 04b0a459ec
7 changed files with 106 additions and 103 deletions

View File

@ -4,7 +4,10 @@ For more information on past and future Lucene versions, please see:
http://s.apache.org/luceneversions http://s.apache.org/luceneversions
======================= Lucene 6.2.0 ======================= ======================= Lucene 6.2.0 =======================
(No Changes)
Bug Fixes
LUCENE-6662: Fixd potential resource leaks. (Rishabh Patel via Adrien Grand)
======================= Lucene 6.1.0 ======================= ======================= Lucene 6.1.0 =======================

View File

@ -148,19 +148,17 @@ class WordDictionary extends AbstractDictionary {
private void loadFromObjectInputStream(InputStream serialObjectInputStream) private void loadFromObjectInputStream(InputStream serialObjectInputStream)
throws IOException, ClassNotFoundException { throws IOException, ClassNotFoundException {
ObjectInputStream input = new ObjectInputStream(serialObjectInputStream); try (ObjectInputStream input = new ObjectInputStream(serialObjectInputStream)) {
wordIndexTable = (short[]) input.readObject(); wordIndexTable = (short[]) input.readObject();
charIndexTable = (char[]) input.readObject(); charIndexTable = (char[]) input.readObject();
wordItem_charArrayTable = (char[][][]) input.readObject(); wordItem_charArrayTable = (char[][][]) input.readObject();
wordItem_frequencyTable = (int[][]) input.readObject(); wordItem_frequencyTable = (int[][]) input.readObject();
// log.info("load core dict from serialization."); // log.info("load core dict from serialization.");
input.close(); }
} }
private void saveToObj(Path serialObj) { private void saveToObj(Path serialObj) {
try { try (ObjectOutputStream output = new ObjectOutputStream(Files.newOutputStream(serialObj))) {
ObjectOutputStream output = new ObjectOutputStream(Files.newOutputStream(
serialObj));
output.writeObject(wordIndexTable); output.writeObject(wordIndexTable);
output.writeObject(charIndexTable); output.writeObject(charIndexTable);
output.writeObject(wordItem_charArrayTable); output.writeObject(wordItem_charArrayTable);

View File

@ -117,33 +117,32 @@ public class Compile {
} }
for (int i = 1; i < args.length; i++) { for (int i = 1; i < args.length; i++) {
LineNumberReader in;
// System.out.println("[" + args[i] + "]"); // System.out.println("[" + args[i] + "]");
Diff diff = new Diff(); Diff diff = new Diff();
allocTrie(); allocTrie();
System.out.println(args[i]); System.out.println(args[i]);
in = new LineNumberReader(Files.newBufferedReader(Paths.get(args[i]), Charset.forName(charset))); try (LineNumberReader in = new LineNumberReader(Files.newBufferedReader(Paths.get(args[i]), Charset.forName(charset)))) {
for (String line = in.readLine(); line != null; line = in.readLine()) { for (String line = in.readLine(); line != null; line = in.readLine()) {
try { try {
line = line.toLowerCase(Locale.ROOT); line = line.toLowerCase(Locale.ROOT);
StringTokenizer st = new StringTokenizer(line); StringTokenizer st = new StringTokenizer(line);
String stem = st.nextToken(); String stem = st.nextToken();
if (storeorig) { if (storeorig) {
trie.add(stem, "-a"); trie.add(stem, "-a");
}
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.equals(stem) == false) {
trie.add(token, diff.exec(token, stem));
} }
while (st.hasMoreTokens()) {
String token = st.nextToken();
if (token.equals(stem) == false) {
trie.add(token, diff.exec(token, stem));
}
}
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
} }
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
} }
} }
in.close();
Optimizer o = new Optimizer(); Optimizer o = new Optimizer();
Optimizer2 o2 = new Optimizer2(); Optimizer2 o2 = new Optimizer2();
@ -180,11 +179,11 @@ public class Compile {
trie.printInfo(System.out, prefix + " "); trie.printInfo(System.out, prefix + " ");
} }
DataOutputStream os = new DataOutputStream(new BufferedOutputStream( try (DataOutputStream os = new DataOutputStream(new BufferedOutputStream(
Files.newOutputStream(Paths.get(args[i] + ".out")))); Files.newOutputStream(Paths.get(args[i] + ".out"))))) {
os.writeUTF(args[0]); os.writeUTF(args[0]);
trie.store(os); trie.store(os);
os.close(); }
} }
} }

View File

@ -98,25 +98,25 @@ public class DiffIt {
int nop = get(3, args[0]); int nop = get(3, args[0]);
for (int i = 1; i < args.length; i++) { for (int i = 1; i < args.length; i++) {
LineNumberReader in;
// System.out.println("[" + args[i] + "]"); // System.out.println("[" + args[i] + "]");
Diff diff = new Diff(ins, del, rep, nop); Diff diff = new Diff(ins, del, rep, nop);
String charset = System.getProperty("egothor.stemmer.charset", "UTF-8"); String charset = System.getProperty("egothor.stemmer.charset", "UTF-8");
in = new LineNumberReader(Files.newBufferedReader(Paths.get(args[i]), Charset.forName(charset))); try (LineNumberReader in = new LineNumberReader(Files.newBufferedReader(Paths.get(args[i]), Charset.forName(charset)))) {
for (String line = in.readLine(); line != null; line = in.readLine()) { for (String line = in.readLine(); line != null; line = in.readLine()) {
try { try {
line = line.toLowerCase(Locale.ROOT); line = line.toLowerCase(Locale.ROOT);
StringTokenizer st = new StringTokenizer(line); StringTokenizer st = new StringTokenizer(line);
String stem = st.nextToken(); String stem = st.nextToken();
System.out.println(stem + " -a"); System.out.println(stem + " -a");
while (st.hasMoreTokens()) { while (st.hasMoreTokens()) {
String token = st.nextToken(); String token = st.nextToken();
if (token.equals(stem) == false) { if (token.equals(stem) == false) {
System.out.println(stem + " " + diff.exec(token, stem)); System.out.println(stem + " " + diff.exec(token, stem));
}
} }
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
} }
} catch (java.util.NoSuchElementException x) {
// no base token (stem) on a line
} }
} }
} }

View File

@ -34,12 +34,12 @@ public class CloseTaxonomyReaderTask extends PerfTask {
@Override @Override
public int doLogic() throws IOException { public int doLogic() throws IOException {
TaxonomyReader taxoReader = getRunData().getTaxonomyReader(); try (TaxonomyReader taxoReader = getRunData().getTaxonomyReader()) {
getRunData().setTaxonomyReader(null); getRunData().setTaxonomyReader(null);
if (taxoReader.getRefCount() != 1) { if (taxoReader.getRefCount() != 1) {
System.out.println("WARNING: CloseTaxonomyReader: reference count is currently " + taxoReader.getRefCount()); System.out.println("WARNING: CloseTaxonomyReader: reference count is currently " + taxoReader.getRefCount());
}
} }
taxoReader.close();
return 1; return 1;
} }

View File

@ -902,18 +902,18 @@ public class DirectoryTaxonomyWriter implements TaxonomyWriter {
return map; return map;
} }
addDone(); // in case this wasn't previously called addDone(); // in case this wasn't previously called
DataInputStream in = new DataInputStream(new BufferedInputStream( try (DataInputStream in = new DataInputStream(new BufferedInputStream(
Files.newInputStream(tmpfile))); Files.newInputStream(tmpfile)))) {
map = new int[in.readInt()]; map = new int[in.readInt()];
// NOTE: The current code assumes here that the map is complete, // NOTE: The current code assumes here that the map is complete,
// i.e., every ordinal gets one and exactly one value. Otherwise, // i.e., every ordinal gets one and exactly one value. Otherwise,
// we may run into an EOF here, or vice versa, not read everything. // we may run into an EOF here, or vice versa, not read everything.
for (int i=0; i<map.length; i++) { for (int i=0; i<map.length; i++) {
int origordinal = in.readInt(); int origordinal = in.readInt();
int newordinal = in.readInt(); int newordinal = in.readInt();
map[origordinal] = newordinal; map[origordinal] = newordinal;
}
} }
in.close();
// Delete the temporary file, which is no longer needed. // Delete the temporary file, which is no longer needed.
Files.delete(tmpfile); Files.delete(tmpfile);

View File

@ -231,51 +231,54 @@ public class JaspellTernarySearchTrie implements Accountable {
in = new BufferedReader(IOUtils.getDecodingReader(new GZIPInputStream( in = new BufferedReader(IOUtils.getDecodingReader(new GZIPInputStream(
Files.newInputStream(file)), StandardCharsets.UTF_8)); Files.newInputStream(file)), StandardCharsets.UTF_8));
else in = Files.newBufferedReader(file, StandardCharsets.UTF_8); else in = Files.newBufferedReader(file, StandardCharsets.UTF_8);
String word; try {
int pos; String word;
Float occur, one = new Float(1); int pos;
while ((word = in.readLine()) != null) { Float occur, one = new Float(1);
pos = word.indexOf("\t"); while ((word = in.readLine()) != null) {
occur = one; pos = word.indexOf("\t");
if (pos != -1) { occur = one;
occur = Float.parseFloat(word.substring(pos + 1).trim()); if (pos != -1) {
word = word.substring(0, pos); occur = Float.parseFloat(word.substring(pos + 1).trim());
} word = word.substring(0, pos);
String key = word.toLowerCase(locale); }
if (rootNode == null) { String key = word.toLowerCase(locale);
rootNode = new TSTNode(key.charAt(0), null); if (rootNode == null) {
} rootNode = new TSTNode(key.charAt(0), null);
TSTNode node = null; }
if (key.length() > 0 && rootNode != null) { TSTNode node = null;
TSTNode currentNode = rootNode; if (key.length() > 0 && rootNode != null) {
int charIndex = 0; TSTNode currentNode = rootNode;
while (true) { int charIndex = 0;
if (currentNode == null) break; while (true) {
int charComp = compareCharsAlphabetically(key.charAt(charIndex), if (currentNode == null) break;
currentNode.splitchar); int charComp = compareCharsAlphabetically(key.charAt(charIndex),
if (charComp == 0) { currentNode.splitchar);
charIndex++; if (charComp == 0) {
if (charIndex == key.length()) { charIndex++;
node = currentNode; if (charIndex == key.length()) {
break; node = currentNode;
break;
}
currentNode = currentNode.relatives[TSTNode.EQKID];
} else if (charComp < 0) {
currentNode = currentNode.relatives[TSTNode.LOKID];
} else {
currentNode = currentNode.relatives[TSTNode.HIKID];
} }
currentNode = currentNode.relatives[TSTNode.EQKID];
} else if (charComp < 0) {
currentNode = currentNode.relatives[TSTNode.LOKID];
} else {
currentNode = currentNode.relatives[TSTNode.HIKID];
} }
Float occur2 = null;
if (node != null) occur2 = ((Float) (node.data));
if (occur2 != null) {
occur += occur2.floatValue();
}
currentNode = getOrCreateNode(word.trim().toLowerCase(locale));
currentNode.data = occur;
} }
Float occur2 = null;
if (node != null) occur2 = ((Float) (node.data));
if (occur2 != null) {
occur += occur2.floatValue();
}
currentNode = getOrCreateNode(word.trim().toLowerCase(locale));
currentNode.data = occur;
} }
} finally {
IOUtils.close(in);
} }
in.close();
} }
/** /**