mirror of https://github.com/apache/lucene.git
Replace consecutive close() calls and close() calls with null checks with IOUtils.close() (#12428)
This commit is contained in:
parent
ec1367862d
commit
368dbffef3
|
@ -206,6 +206,9 @@ Other
|
|||
* GITHUB#12410: Refactor vectorization support (split provider from implementation classes).
|
||||
(Uwe Schindler, Chris Hegarty)
|
||||
|
||||
* GITHUB#12428: Replace consecutive close() calls and close() calls with null checks with IOUtils.close().
|
||||
(Shubham Chaudhary)
|
||||
|
||||
======================== Lucene 9.7.0 =======================
|
||||
|
||||
API Changes
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter;
|
|||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.tests.util.TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** trivial test of ICUCollationDocValuesField */
|
||||
public class TestICUCollationDocValuesField extends LuceneTestCase {
|
||||
|
@ -68,8 +69,7 @@ public class TestICUCollationDocValuesField extends LuceneTestCase {
|
|||
StoredFields storedFields = ir.storedFields();
|
||||
assertEquals("abc", storedFields.document(td.scoreDocs[0].doc).get("field"));
|
||||
assertEquals("ABC", storedFields.document(td.scoreDocs[1].doc).get("field"));
|
||||
ir.close();
|
||||
dir.close();
|
||||
IOUtils.close(ir, dir);
|
||||
}
|
||||
|
||||
public void testRanges() throws Exception {
|
||||
|
@ -107,8 +107,7 @@ public class TestICUCollationDocValuesField extends LuceneTestCase {
|
|||
doTestRanges(is, start, end, lowerVal, upperVal, collator);
|
||||
}
|
||||
|
||||
ir.close();
|
||||
dir.close();
|
||||
IOUtils.close(ir, dir);
|
||||
}
|
||||
|
||||
private void doTestRanges(
|
||||
|
|
|
@ -163,10 +163,8 @@ public class PerfRunData implements Closeable {
|
|||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (indexWriter != null) {
|
||||
indexWriter.close();
|
||||
}
|
||||
IOUtils.close(
|
||||
indexWriter,
|
||||
indexReader,
|
||||
directory,
|
||||
taxonomyWriter,
|
||||
|
@ -190,10 +188,7 @@ public class PerfRunData implements Closeable {
|
|||
public void reinit(boolean eraseIndex) throws Exception {
|
||||
|
||||
// cleanup index
|
||||
if (indexWriter != null) {
|
||||
indexWriter.close();
|
||||
}
|
||||
IOUtils.close(indexReader, directory);
|
||||
IOUtils.close(indexWriter, indexReader, directory);
|
||||
indexWriter = null;
|
||||
indexReader = null;
|
||||
|
||||
|
|
|
@ -303,10 +303,8 @@ public class EnwikiContentSource extends ContentSource {
|
|||
public void close() throws IOException {
|
||||
synchronized (EnwikiContentSource.this) {
|
||||
parser.stop();
|
||||
if (is != null) {
|
||||
is.close();
|
||||
is = null;
|
||||
}
|
||||
IOUtils.close(is);
|
||||
is = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -204,9 +204,7 @@ public class LineDocSource extends ContentSource {
|
|||
|
||||
private synchronized void openFile() {
|
||||
try {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
InputStream is = StreamUtils.inputStream(file);
|
||||
reader = new BufferedReader(new InputStreamReader(is, encoding), StreamUtils.BUFFER_SIZE);
|
||||
if (skipHeaderLine) {
|
||||
|
@ -219,10 +217,8 @@ public class LineDocSource extends ContentSource {
|
|||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
reader = null;
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
reader = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.lucene.search.grouping.GroupDocs;
|
|||
import org.apache.lucene.search.grouping.GroupingSearch;
|
||||
import org.apache.lucene.search.grouping.TopGroups;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/**
|
||||
* Utility class for creating training / test / cross validation indexes from the original index.
|
||||
|
@ -174,10 +175,7 @@ public class DatasetSplitter {
|
|||
throw new IOException(e);
|
||||
} finally {
|
||||
// close IWs
|
||||
testWriter.close();
|
||||
cvWriter.close();
|
||||
trainingWriter.close();
|
||||
originalIndex.close();
|
||||
IOUtils.close(testWriter, cvWriter, trainingWriter, originalIndex);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter;
|
|||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.tests.util.TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -83,8 +84,7 @@ public abstract class ClassificationTestBase<T> extends LuceneTestCase {
|
|||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
indexWriter.close();
|
||||
dir.close();
|
||||
IOUtils.close(indexWriter, dir);
|
||||
}
|
||||
|
||||
protected ClassificationResult<T> checkCorrectClassification(
|
||||
|
|
|
@ -63,6 +63,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.tests.util.TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.NamedThreadFactory;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -330,22 +331,7 @@ public final class Test20NewsgroupsClassification extends LuceneTestCase {
|
|||
service.shutdown();
|
||||
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
directory.close();
|
||||
if (testReader != null) {
|
||||
testReader.close();
|
||||
}
|
||||
if (test != null) {
|
||||
test.close();
|
||||
}
|
||||
if (train != null) {
|
||||
train.close();
|
||||
}
|
||||
if (cv != null) {
|
||||
cv.close();
|
||||
}
|
||||
IOUtils.close(reader, directory, testReader, test, train, cv);
|
||||
|
||||
for (Classifier<BytesRef> c : classifiers) {
|
||||
if (c instanceof Closeable) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Tests for {@link BM25NBClassifier} */
|
||||
|
@ -45,9 +46,7 @@ public class TestBM25NBClassifier extends ClassificationTestBase<BytesRef> {
|
|||
new BM25NBClassifier(leafReader, analyzer, null, categoryFieldName, textFieldName);
|
||||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -62,9 +61,7 @@ public class TestBM25NBClassifier extends ClassificationTestBase<BytesRef> {
|
|||
new BM25NBClassifier(leafReader, analyzer, query, categoryFieldName, textFieldName);
|
||||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,9 +75,7 @@ public class TestBM25NBClassifier extends ClassificationTestBase<BytesRef> {
|
|||
new BM25NBClassifier(leafReader, analyzer, null, categoryFieldName, textFieldName);
|
||||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Testcase for {@link org.apache.lucene.classification.BooleanPerceptronClassifier} */
|
||||
|
@ -42,9 +43,7 @@ public class TestBooleanPerceptronClassifier extends ClassificationTestBase<Bool
|
|||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, false);
|
||||
checkCorrectClassification(classifier, POLITICS_INPUT, true);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,9 +59,7 @@ public class TestBooleanPerceptronClassifier extends ClassificationTestBase<Bool
|
|||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, false);
|
||||
checkCorrectClassification(classifier, POLITICS_INPUT, true);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -79,9 +76,7 @@ public class TestBooleanPerceptronClassifier extends ClassificationTestBase<Bool
|
|||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, false);
|
||||
checkCorrectClassification(classifier, POLITICS_INPUT, true);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Testcase for {@link org.apache.lucene.classification.CachingNaiveBayesClassifier} */
|
||||
|
@ -52,9 +53,7 @@ public class TestCachingNaiveBayesClassifier extends ClassificationTestBase<Byte
|
|||
POLITICS_INPUT,
|
||||
POLITICS_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,9 +70,7 @@ public class TestCachingNaiveBayesClassifier extends ClassificationTestBase<Byte
|
|||
TECHNOLOGY_INPUT,
|
||||
TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,9 +86,7 @@ public class TestCachingNaiveBayesClassifier extends ClassificationTestBase<Byte
|
|||
TECHNOLOGY_INPUT,
|
||||
TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Tests for {@link KNearestFuzzyClassifier} */
|
||||
|
@ -42,9 +43,7 @@ public class TestKNearestFuzzyClassifier extends ClassificationTestBase<BytesRef
|
|||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
checkCorrectClassification(classifier, POLITICS_INPUT, POLITICS_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,9 +59,7 @@ public class TestKNearestFuzzyClassifier extends ClassificationTestBase<BytesRef
|
|||
leafReader, null, analyzer, query, 3, categoryFieldName, textFieldName);
|
||||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.search.similarities.BM25Similarity;
|
|||
import org.apache.lucene.search.similarities.LMDirichletSimilarity;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Testcase for {@link KNearestNeighborClassifier} */
|
||||
|
@ -89,9 +90,7 @@ public class TestKNearestNeighborClassifier extends ClassificationTestBase<Bytes
|
|||
TECHNOLOGY_RESULT);
|
||||
assertTrue(resultDS.getScore() != resultLMS.getScore());
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -117,9 +116,7 @@ public class TestKNearestNeighborClassifier extends ClassificationTestBase<Bytes
|
|||
assertTrue(classes.get(0).getScore() > classes.get(1).getScore());
|
||||
checkCorrectClassification(knnClassifier, STRONG_TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -145,9 +142,7 @@ public class TestKNearestNeighborClassifier extends ClassificationTestBase<Bytes
|
|||
assertTrue(classes.get(0).getScore() > classes.get(1).getScore());
|
||||
checkCorrectClassification(knnClassifier, SUPER_STRONG_TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -164,9 +159,7 @@ public class TestKNearestNeighborClassifier extends ClassificationTestBase<Bytes
|
|||
TECHNOLOGY_INPUT,
|
||||
TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Testcase for {@link SimpleNaiveBayesClassifier} */
|
||||
|
@ -47,9 +48,7 @@ public class TestSimpleNaiveBayesClassifier extends ClassificationTestBase<Bytes
|
|||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
checkCorrectClassification(classifier, POLITICS_INPUT, POLITICS_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -66,9 +65,7 @@ public class TestSimpleNaiveBayesClassifier extends ClassificationTestBase<Bytes
|
|||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
checkCorrectClassification(classifier, POLITICS_INPUT, POLITICS_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -83,9 +80,7 @@ public class TestSimpleNaiveBayesClassifier extends ClassificationTestBase<Bytes
|
|||
leafReader, analyzer, null, categoryFieldName, textFieldName);
|
||||
checkCorrectClassification(classifier, TECHNOLOGY_INPUT, TECHNOLOGY_RESULT);
|
||||
} finally {
|
||||
if (leafReader != null) {
|
||||
leafReader.close();
|
||||
}
|
||||
IOUtils.close(leafReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import org.apache.lucene.document.Document;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Tests for {@link org.apache.lucene.classification.KNearestNeighborClassifier} */
|
||||
|
@ -74,9 +75,7 @@ public class TestKNearestNeighborDocumentClassifier
|
|||
VIDEOGAME_RESULT);
|
||||
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,9 +133,7 @@ public class TestKNearestNeighborDocumentClassifier
|
|||
VIDEOGAME_RESULT);
|
||||
assertEquals(1.0, score4, 0);
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,9 +168,7 @@ public class TestKNearestNeighborDocumentClassifier
|
|||
getBatmanAmbiguosDocument(),
|
||||
VIDEOGAME_RESULT);
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,9 +203,7 @@ public class TestKNearestNeighborDocumentClassifier
|
|||
getBatmanDocument(),
|
||||
VIDEOGAME_RESULT);
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
package org.apache.lucene.classification.document;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Tests for {@link org.apache.lucene.classification.SimpleNaiveBayesClassifier} */
|
||||
|
@ -56,9 +57,7 @@ public class TestSimpleNaiveBayesDocumentClassifier
|
|||
getBatmanDocument(),
|
||||
VIDEOGAME_ANALYZED_RESULT);
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,9 +110,7 @@ public class TestSimpleNaiveBayesDocumentClassifier
|
|||
VIDEOGAME_ANALYZED_RESULT);
|
||||
assertEquals(0.52, score4, 0.01);
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -140,9 +137,7 @@ public class TestSimpleNaiveBayesDocumentClassifier
|
|||
getBatmanAmbiguosDocument(),
|
||||
VIDEOGAME_ANALYZED_RESULT);
|
||||
} finally {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.classification.SimpleNaiveBayesClassifier;
|
|||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.tests.analysis.MockAnalyzer;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
/** Tests for {@link ConfusionMatrixGenerator} */
|
||||
|
@ -81,9 +82,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
assertTrue(f1Measure >= 0d);
|
||||
assertTrue(f1Measure <= 1d);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,9 +99,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
reader, classifier, categoryFieldName, textFieldName, -1);
|
||||
checkCM(confusionMatrix);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -138,9 +135,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
reader, classifier, categoryFieldName, textFieldName, -1);
|
||||
checkCM(confusionMatrix);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -157,9 +152,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
reader, classifier, categoryFieldName, textFieldName, -1);
|
||||
checkCM(confusionMatrix);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,9 +170,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
reader, classifier, categoryFieldName, textFieldName, -1);
|
||||
checkCM(confusionMatrix);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,9 +188,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
reader, classifier, categoryFieldName, textFieldName, -1);
|
||||
checkCM(confusionMatrix);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,9 +218,7 @@ public class TestConfusionMatrixGenerator extends ClassificationTestBase<Object>
|
|||
assertTrue(confusionMatrix.getF1Measure("false") >= 0d);
|
||||
assertTrue(confusionMatrix.getF1Measure("false") <= 1d);
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
reader.close();
|
||||
}
|
||||
IOUtils.close(reader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.lucene.tests.store.BaseDirectoryWrapper;
|
|||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.tests.util.TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -83,9 +84,7 @@ public class TestDataSplitter extends LuceneTestCase {
|
|||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
originalIndex.close();
|
||||
indexWriter.close();
|
||||
dir.close();
|
||||
IOUtils.close(originalIndex, indexWriter, dir);
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
@ -133,28 +132,18 @@ public class TestDataSplitter extends LuceneTestCase {
|
|||
DirectoryReader cvReader = DirectoryReader.open(crossValidationIndex);
|
||||
assertEquals((int) (originalIndex.maxDoc() * crossValidationRatio), cvReader.maxDoc(), 20);
|
||||
|
||||
trainingReader.close();
|
||||
testReader.close();
|
||||
cvReader.close();
|
||||
IOUtils.close(trainingReader, testReader, cvReader);
|
||||
closeQuietly(trainingReader);
|
||||
closeQuietly(testReader);
|
||||
closeQuietly(cvReader);
|
||||
} finally {
|
||||
if (trainingIndex != null) {
|
||||
trainingIndex.close();
|
||||
}
|
||||
if (testIndex != null) {
|
||||
testIndex.close();
|
||||
}
|
||||
if (crossValidationIndex != null) {
|
||||
crossValidationIndex.close();
|
||||
}
|
||||
IOUtils.close(trainingIndex, testIndex, crossValidationIndex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void closeQuietly(IndexReader reader) throws IOException {
|
||||
try {
|
||||
if (reader != null) reader.close();
|
||||
IOUtils.close(reader);
|
||||
} catch (
|
||||
@SuppressWarnings("unused")
|
||||
Exception e) {
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.search.ScoreDoc;
|
|||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.tests.index.RandomIndexWriter;
|
||||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
@ -74,8 +75,7 @@ public class TestDocToDoubleVectorUtils extends LuceneTestCase {
|
|||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
index.close();
|
||||
dir.close();
|
||||
IOUtils.close(index, dir);
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.lucene.util.Accountable;
|
|||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
|
||||
/**
|
||||
|
@ -190,22 +191,16 @@ public class BlockTermsReader extends FieldsProducer {
|
|||
public void close() throws IOException {
|
||||
try {
|
||||
try {
|
||||
if (indexReader != null) {
|
||||
indexReader.close();
|
||||
}
|
||||
IOUtils.close(indexReader);
|
||||
} finally {
|
||||
// null so if an app hangs on to us (ie, we are not
|
||||
// GCable, despite being closed) we still free most
|
||||
// ram
|
||||
indexReader = null;
|
||||
if (in != null) {
|
||||
in.close();
|
||||
}
|
||||
IOUtils.close(in);
|
||||
}
|
||||
} finally {
|
||||
if (postingsReader != null) {
|
||||
postingsReader.close();
|
||||
}
|
||||
IOUtils.close(postingsReader);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.lucene.store.Directory;
|
|||
import org.apache.lucene.tests.index.RandomIndexWriter;
|
||||
import org.apache.lucene.tests.search.QueryUtils;
|
||||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
public class TestFeatureField extends LuceneTestCase {
|
||||
|
||||
|
@ -177,8 +178,7 @@ public class TestFeatureField extends LuceneTestCase {
|
|||
|
||||
assertEquals(DocIdSetIterator.NO_MORE_DOCS, s.iterator().nextDoc());
|
||||
|
||||
reader.close();
|
||||
dir.close();
|
||||
IOUtils.close(reader, dir);
|
||||
}
|
||||
|
||||
public void testExplanations() throws Exception {
|
||||
|
@ -237,8 +237,7 @@ public class TestFeatureField extends LuceneTestCase {
|
|||
QueryUtils.check(
|
||||
random(), FeatureField.newSigmoidQuery("features", "pagerank", .2f, 12f, 0.6f), searcher);
|
||||
|
||||
reader.close();
|
||||
dir.close();
|
||||
IOUtils.close(reader, dir);
|
||||
}
|
||||
|
||||
public void testLogSimScorer() {
|
||||
|
@ -303,8 +302,7 @@ public class TestFeatureField extends LuceneTestCase {
|
|||
double expected = Math.pow(10 * 100 * 1 * 42, 1 / 4.); // geometric mean
|
||||
assertEquals(expected, pivot, 0.1);
|
||||
|
||||
reader.close();
|
||||
dir.close();
|
||||
IOUtils.close(reader, dir);
|
||||
}
|
||||
|
||||
public void testDemo() throws IOException {
|
||||
|
@ -359,8 +357,7 @@ public class TestFeatureField extends LuceneTestCase {
|
|||
assertEquals(3, topDocs.scoreDocs[2].doc);
|
||||
assertEquals(2, topDocs.scoreDocs[3].doc);
|
||||
|
||||
reader.close();
|
||||
dir.close();
|
||||
IOUtils.close(reader, dir);
|
||||
}
|
||||
|
||||
public void testBasicsNonScoringCase() throws IOException {
|
||||
|
|
|
@ -79,8 +79,7 @@ public class TestIndexSearcher extends LuceneTestCase {
|
|||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
reader.close();
|
||||
dir.close();
|
||||
IOUtils.close(reader, dir);
|
||||
}
|
||||
|
||||
// should not throw exception
|
||||
|
@ -180,8 +179,7 @@ public class TestIndexSearcher extends LuceneTestCase {
|
|||
}
|
||||
reader.close();
|
||||
}
|
||||
w.close();
|
||||
dir.close();
|
||||
IOUtils.close(w, dir);
|
||||
}
|
||||
|
||||
public void testGetQueryCache() throws IOException {
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.apache.lucene.search.QueryVisitor;
|
|||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** Simple command-line based search demo. */
|
||||
public class SearchFiles {
|
||||
|
@ -154,10 +155,7 @@ public class SearchFiles {
|
|||
break;
|
||||
}
|
||||
}
|
||||
if (vectorDict != null) {
|
||||
vectorDict.close();
|
||||
}
|
||||
reader.close();
|
||||
IOUtils.close(vectorDict, reader);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** Shows example usage of category associations. */
|
||||
public class AssociationsFacetsExample {
|
||||
|
@ -86,8 +87,7 @@ public class AssociationsFacetsExample {
|
|||
doc.add(new FloatAssociationFacetField(0.34f, "genre", "software"));
|
||||
indexWriter.addDocument(config.build(taxoWriter, doc));
|
||||
|
||||
indexWriter.close();
|
||||
taxoWriter.close();
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
}
|
||||
|
||||
/** User runs a query and aggregates facets by summing their association values. */
|
||||
|
@ -115,8 +115,7 @@ public class AssociationsFacetsExample {
|
|||
results.add(tags.getTopChildren(10, "tags"));
|
||||
results.add(genre.getTopChildren(10, "genre"));
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
@ -142,8 +141,7 @@ public class AssociationsFacetsExample {
|
|||
"$genre", taxoReader, config, fc, AssociationAggregationFunction.SUM);
|
||||
FacetResult result = facets.getTopChildren(10, "genre");
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/**
|
||||
* Shows simple usage of dynamic range faceting, using the expressions module to calculate distance.
|
||||
|
@ -258,8 +259,7 @@ public class DistanceFacetsExample implements Closeable {
|
|||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
searcher.getIndexReader().close();
|
||||
indexDir.close();
|
||||
IOUtils.close(searcher.getIndexReader(), indexDir);
|
||||
}
|
||||
|
||||
/** Runs the search and drill-down examples and prints the results. */
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** Shows facets aggregation by an expression. */
|
||||
public class ExpressionAggregationFacetsExample {
|
||||
|
@ -77,8 +78,7 @@ public class ExpressionAggregationFacetsExample {
|
|||
doc.add(new FacetField("A", "C"));
|
||||
indexWriter.addDocument(config.build(taxoWriter, doc));
|
||||
|
||||
indexWriter.close();
|
||||
taxoWriter.close();
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
}
|
||||
|
||||
/** User runs a query and aggregates facets. */
|
||||
|
@ -114,8 +114,7 @@ public class ExpressionAggregationFacetsExample {
|
|||
expr.getDoubleValuesSource(bindings));
|
||||
FacetResult result = facets.getTopChildren(10, "A");
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** Demonstrates indexing categories into different indexed fields. */
|
||||
public class MultiCategoryListsFacetsExample {
|
||||
|
@ -87,8 +88,7 @@ public class MultiCategoryListsFacetsExample {
|
|||
doc.add(new FacetField("Publish Date", "1999", "5", "5"));
|
||||
indexWriter.addDocument(config.build(taxoWriter, doc));
|
||||
|
||||
indexWriter.close();
|
||||
taxoWriter.close();
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
}
|
||||
|
||||
/** User runs a query and counts facets. */
|
||||
|
@ -114,8 +114,7 @@ public class MultiCategoryListsFacetsExample {
|
|||
Facets pubDate = new FastTaxonomyFacetCounts("pubdate", taxoReader, config, fc);
|
||||
results.add(pubDate.getTopChildren(10, "Publish Date"));
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** Shows simple usage of dynamic range faceting. */
|
||||
public class RangeFacetsExample implements Closeable {
|
||||
|
@ -183,8 +184,7 @@ public class RangeFacetsExample implements Closeable {
|
|||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
searcher.getIndexReader().close();
|
||||
indexDir.close();
|
||||
IOUtils.close(searcher.getIndexReader(), indexDir);
|
||||
}
|
||||
|
||||
/** Runs the search and drill-down examples and prints the results. */
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.store.ByteBuffersDirectory;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
||||
/** Shows simple usage of faceted indexing and search. */
|
||||
public class SimpleFacetsExample {
|
||||
|
@ -89,8 +90,7 @@ public class SimpleFacetsExample {
|
|||
doc.add(new FacetField("Publish Date", "1999", "5", "5"));
|
||||
indexWriter.addDocument(config.build(taxoWriter, doc));
|
||||
|
||||
indexWriter.close();
|
||||
taxoWriter.close();
|
||||
IOUtils.close(indexWriter, taxoWriter);
|
||||
}
|
||||
|
||||
/** User runs a query and counts facets. */
|
||||
|
@ -114,8 +114,7 @@ public class SimpleFacetsExample {
|
|||
results.add(facets.getTopChildren(10, "Author"));
|
||||
results.add(facets.getTopChildren(10, "Publish Date"));
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
@ -140,8 +139,7 @@ public class SimpleFacetsExample {
|
|||
results.add(facets.getTopChildren(10, "Author"));
|
||||
results.add(facets.getTopChildren(10, "Publish Date"));
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
@ -165,8 +163,7 @@ public class SimpleFacetsExample {
|
|||
Facets facets = new FastTaxonomyFacetCounts(taxoReader, config, fc);
|
||||
FacetResult result = facets.getTopChildren(10, "Author");
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -193,8 +190,7 @@ public class SimpleFacetsExample {
|
|||
// Retrieve results
|
||||
List<FacetResult> facets = result.facets.getAllDims(10);
|
||||
|
||||
indexReader.close();
|
||||
taxoReader.close();
|
||||
IOUtils.close(indexReader, taxoReader);
|
||||
|
||||
return facets;
|
||||
}
|
||||
|
|
|
@ -48,6 +48,7 @@ import org.apache.lucene.tests.util.English;
|
|||
import org.apache.lucene.tests.util.LuceneTestCase;
|
||||
import org.apache.lucene.tests.util.TestUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
|
@ -89,8 +90,7 @@ public class TestPayloadCheckQuery extends LuceneTestCase {
|
|||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
reader.close();
|
||||
directory.close();
|
||||
IOUtils.close(reader, directory);
|
||||
searcher = null;
|
||||
reader = null;
|
||||
directory = null;
|
||||
|
|
Loading…
Reference in New Issue