SOLR-13680: use try-with-resource to close closeable resources

closes #822
This commit is contained in:
Munendra S N 2019-08-10 13:51:11 +05:30
parent aec2eb0c9d
commit 4ac23230a4
12 changed files with 111 additions and 130 deletions

View File

@ -110,6 +110,8 @@ Other Changes
* SOLR-13659: Refactor CacheConfig to lazily load the the implementation class (noble)
* SOLR-13680: Use try-with-resource to close the closeable resource (Furkan KAMACI, Munendra S N)
================== 8.2.0 ==================
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

View File

@ -496,17 +496,9 @@ public abstract class ManagedResourceStorage {
if (inputStream == null) {
return null;
}
Object parsed = null;
InputStreamReader reader = null;
try {
reader = new InputStreamReader(inputStream, UTF_8);
Object parsed;
try (InputStreamReader reader = new InputStreamReader(inputStream, UTF_8)) {
parsed = parseText(reader, resourceId);
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception ignore){}
}
}
String objectType = (parsed != null) ? parsed.getClass().getSimpleName() : "null";

View File

@ -16,7 +16,12 @@
*/
package org.apache.solr.util;
import java.io.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
@ -44,15 +49,9 @@ public class FileUtils {
}
public static void copyFile(File src , File destination) throws IOException {
FileChannel in = null;
FileChannel out = null;
try {
in = new FileInputStream(src).getChannel();
out = new FileOutputStream(destination).getChannel();
try (FileChannel in = new FileInputStream(src).getChannel();
FileChannel out = new FileOutputStream(destination).getChannel()) {
in.transferTo(0, in.size(), out);
} finally {
try { if (in != null) in.close(); } catch (IOException e) {}
try { if (out != null) out.close(); } catch (IOException e) {}
}
}
@ -71,16 +70,9 @@ public class FileUtils {
IOException exc = null;
while(!success && retryCount < 5) {
retryCount++;
RandomAccessFile file = null;
try {
try {
file = new RandomAccessFile(fullFile, "rw");
file.getFD().sync();
success = true;
} finally {
if (file != null)
file.close();
}
try (RandomAccessFile file = new RandomAccessFile(fullFile, "rw")) {
file.getFD().sync();
success = true;
} catch (IOException ioe) {
if (exc == null)
exc = ioe;

View File

@ -565,14 +565,15 @@ public class SolrCLI {
if (path.startsWith("file:") && path.contains("!")) {
String[] split = path.split("!");
URL jar = new URL(split[0]);
ZipInputStream zip = new ZipInputStream(jar.openStream());
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null) {
if (entry.getName().endsWith(".class")) {
String className = entry.getName().replaceAll("[$].*", "")
.replaceAll("[.]class", "").replace('/', '.');
if (className.startsWith(packageName))
classes.add(className);
try (ZipInputStream zip = new ZipInputStream(jar.openStream())) {
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null) {
if (entry.getName().endsWith(".class")) {
String className = entry.getName().replaceAll("[$].*", "")
.replaceAll("[.]class", "").replace('/', '.');
if (className.startsWith(packageName))
classes.add(className);
}
}
}
}

View File

@ -251,18 +251,18 @@ public class TestCoreContainer extends SolrTestCaseJ4 {
File lib = new File(tmpRoot.toFile(), "lib");
lib.mkdirs();
JarOutputStream jar1 = new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")));
jar1.putNextEntry(new JarEntry("defaultSharedLibFile"));
jar1.closeEntry();
jar1.close();
try (JarOutputStream jar1 = new JarOutputStream(new FileOutputStream(new File(lib, "jar1.jar")))) {
jar1.putNextEntry(new JarEntry("defaultSharedLibFile"));
jar1.closeEntry();
}
File customLib = new File(tmpRoot.toFile(), "customLib");
customLib.mkdirs();
JarOutputStream jar2 = new JarOutputStream(new FileOutputStream(new File(customLib, "jar2.jar")));
jar2.putNextEntry(new JarEntry("customSharedLibFile"));
jar2.closeEntry();
jar2.close();
try (JarOutputStream jar2 = new JarOutputStream(new FileOutputStream(new File(customLib, "jar2.jar")))) {
jar2.putNextEntry(new JarEntry("customSharedLibFile"));
jar2.closeEntry();
}
final CoreContainer cc1 = init(tmpRoot, "<solr></solr>");
try {

View File

@ -267,19 +267,18 @@ public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
public static ByteBuffer generateZip(Class... classes) throws IOException {
ZipOutputStream zipOut = null;
SimplePostTool.BAOS bos = new SimplePostTool.BAOS();
zipOut = new ZipOutputStream(bos);
zipOut.setLevel(ZipOutputStream.DEFLATED);
for (Class c : classes) {
String path = c.getName().replace('.', '/').concat(".class");
ZipEntry entry = new ZipEntry(path);
ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path));
zipOut.putNextEntry(entry);
zipOut.write(b.array(), 0, b.limit());
zipOut.closeEntry();
try (ZipOutputStream zipOut = new ZipOutputStream(bos)) {
zipOut.setLevel(ZipOutputStream.DEFLATED);
for (Class c : classes) {
String path = c.getName().replace('.', '/').concat(".class");
ZipEntry entry = new ZipEntry(path);
ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path));
zipOut.putNextEntry(entry);
zipOut.write(b.array(), 0, b.limit());
zipOut.closeEntry();
}
}
zipOut.close();
return bos.getByteBuffer();
}

View File

@ -16,22 +16,25 @@
*/
package org.apache.solr.handler;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.util.ContentStream;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.List;
import java.util.ArrayList;
public class TestCSVLoader extends SolrTestCaseJ4 {
@BeforeClass
@ -68,10 +71,8 @@ public class TestCSVLoader extends SolrTestCaseJ4 {
}
void makeFile(String contents) {
try {
Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8);
try (Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8)) {
out.write(contents);
out.close();
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -1604,20 +1604,17 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
* character copy of file using UTF-8. If port is non-null, will be substituted any time "TEST_PORT" is found.
*/
private static void copyFile(File src, File dst, Integer port, boolean internalCompression) throws IOException {
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(src), StandardCharsets.UTF_8));
Writer out = new OutputStreamWriter(new FileOutputStream(dst), StandardCharsets.UTF_8);
try (BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(src), StandardCharsets.UTF_8));
Writer out = new OutputStreamWriter(new FileOutputStream(dst), StandardCharsets.UTF_8)) {
for (String line = in.readLine(); null != line; line = in.readLine()) {
if (null != port)
line = line.replace("TEST_PORT", port.toString());
line = line.replace("COMPRESSION", internalCompression?"internal":"false");
out.write(line);
for (String line = in.readLine(); null != line; line = in.readLine()) {
if (null != port) {
line = line.replace("TEST_PORT", port.toString());
}
line = line.replace("COMPRESSION", internalCompression ? "internal" : "false");
out.write(line);
}
}
in.close();
out.close();
}
private UpdateResponse emptyUpdate(SolrClient client, String... params)

View File

@ -1286,9 +1286,9 @@ public class TestRecovery extends SolrTestCaseJ4 {
h.close();
files = ulog.getLogList(logDir);
Arrays.sort(files);
RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length-1]), "rw");
raf.writeChars("This is a trashed log file that really shouldn't work at all, but we'll see...");
raf.close();
try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) {
raf.writeChars("This is a trashed log file that really shouldn't work at all, but we'll see...");
}
ignoreException("Failure to open existing");
createCore();
@ -1337,11 +1337,11 @@ public class TestRecovery extends SolrTestCaseJ4 {
h.close();
String[] files = ulog.getLogList(logDir);
Arrays.sort(files);
RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length-1]), "rw");
raf.seek(raf.length()); // seek to end
raf.writeLong(0xffffffffffffffffL);
raf.writeChars("This should be appended to a good log file, representing a bad partially written record.");
raf.close();
try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) {
raf.seek(raf.length()); // seek to end
raf.writeLong(0xffffffffffffffffL);
raf.writeChars("This should be appended to a good log file, representing a bad partially written record.");
}
logReplay.release(1000);
logReplayFinish.drainPermits();
@ -1398,11 +1398,11 @@ public class TestRecovery extends SolrTestCaseJ4 {
String[] files = ulog.getLogList(logDir);
Arrays.sort(files);
RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length-1]), "rw");
long len = raf.length();
raf.seek(0); // seek to start
raf.write(new byte[(int)len]); // zero out file
raf.close();
try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, files[files.length - 1]), "rw")) {
long len = raf.length();
raf.seek(0); // seek to start
raf.write(new byte[(int) len]); // zero out file
}
ignoreException("Failure to open existing log file"); // this is what the corrupted log currently produces... subject to change.
@ -1475,16 +1475,16 @@ public class TestRecovery extends SolrTestCaseJ4 {
String[] files = ulog.getLogList(logDir);
Arrays.sort(files);
String fname = files[files.length-1];
RandomAccessFile raf = new RandomAccessFile(new File(logDir, fname), "rw");
raf.seek(raf.length()); // seek to end
raf.writeLong(0xffffffffffffffffL);
raf.writeChars("This should be appended to a good log file, representing a bad partially written record.");
byte[] content = new byte[(int)raf.length()];
raf.seek(0);
raf.readFully(content);
byte[] content;
try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, fname), "rw")) {
raf.seek(raf.length()); // seek to end
raf.writeLong(0xffffffffffffffffL);
raf.writeChars("This should be appended to a good log file, representing a bad partially written record.");
raf.close();
content = new byte[(int) raf.length()];
raf.seek(0);
raf.readFully(content);
}
// Now make a newer log file with just the IDs changed. NOTE: this may not work if log format changes too much!
findReplace("AAAAAA".getBytes(StandardCharsets.UTF_8), "aaaaaa".getBytes(StandardCharsets.UTF_8), content);
@ -1497,10 +1497,9 @@ public class TestRecovery extends SolrTestCaseJ4 {
UpdateLog.LOG_FILENAME_PATTERN,
UpdateLog.TLOG_NAME,
logNumber + 1);
raf = new RandomAccessFile(new File(logDir, fname2), "rw");
raf.write(content);
raf.close();
try (RandomAccessFile raf = new RandomAccessFile(new File(logDir, fname2), "rw")) {
raf.write(content);
}
logReplay.release(1000);
logReplayFinish.drainPermits();

View File

@ -51,10 +51,9 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
void makeExternalFile(String field, String contents) {
String dir = h.getCore().getDataDir();
String filename = dir + "/external_" + field + "." + (start++);
try {
Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8);
try (Writer out = new OutputStreamWriter(new FileOutputStream(filename), StandardCharsets.UTF_8)) {
out.write(contents);
out.close();
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@ -18,6 +18,15 @@ package org.apache.solr.servlet;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Date;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpRequestBase;
@ -28,15 +37,6 @@ import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Date;
/**
* A test case for the several HTTP cache headers emitted by Solr
*/
@ -258,9 +258,9 @@ public class CacheHeaderTest extends CacheHeaderTestBase {
protected File makeFile(String contents, String charset) {
try {
File f = createTempFile("cachetest","csv").toFile();
Writer out = new OutputStreamWriter(new FileOutputStream(f), charset);
out.write(contents);
out.close();
try (Writer out = new OutputStreamWriter(new FileOutputStream(f), charset)) {
out.write(contents);
}
return f;
} catch (Exception e) {
throw new RuntimeException(e);

View File

@ -149,19 +149,18 @@ public class TestNamedUpdateProcessors extends AbstractFullDistribZkTestBase {
public static ByteBuffer generateZip(Class... classes) throws IOException {
ZipOutputStream zipOut = null;
SimplePostTool.BAOS bos = new SimplePostTool.BAOS();
zipOut = new ZipOutputStream(bos);
zipOut.setLevel(ZipOutputStream.DEFLATED);
for (Class c : classes) {
String path = c.getName().replace('.', '/').concat(".class");
ZipEntry entry = new ZipEntry(path);
ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path));
zipOut.putNextEntry(entry);
zipOut.write(b.array(), 0, b.limit());
zipOut.closeEntry();
try (ZipOutputStream zipOut = new ZipOutputStream(bos)) {
zipOut.setLevel(ZipOutputStream.DEFLATED);
for (Class c : classes) {
String path = c.getName().replace('.', '/').concat(".class");
ZipEntry entry = new ZipEntry(path);
ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path));
zipOut.putNextEntry(entry);
zipOut.write(b.array(), 0, b.limit());
zipOut.closeEntry();
}
}
zipOut.close();
return bos.getByteBuffer();
}