Fix resource leaks and suppress an incorrect LGTM alert (#8589)

* Fix resource leaks and suppress an incorrect alert

* Replace Guava's Files
This commit is contained in:
Benedict Jin 2019-10-11 03:40:45 +08:00 committed by Roman Leventov
parent 4c215b417e
commit bba262a4c5
2 changed files with 7 additions and 6 deletions

View File

@ -412,7 +412,7 @@ public class FileSmoosher implements Closeable
this.outFile = outFile;
this.maxLength = maxLength;
FileOutputStream outStream = closer.register(new FileOutputStream(outFile));
FileOutputStream outStream = closer.register(new FileOutputStream(outFile)); // lgtm [java/output-resource-leak]
this.channel = closer.register(outStream.getChannel());
}

View File

@ -22,7 +22,6 @@ package org.apache.druid.indexer;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.io.Files;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.IAE;
@ -62,6 +61,8 @@ import java.io.OutputStreamWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@ -295,7 +296,7 @@ public class JobHelper
{
log.info("Uploading jar to path[%s]", path);
try (OutputStream os = fs.create(path)) {
Files.asByteSource(jarFile).copyTo(os);
Files.copy(jarFile.toPath(), os);
}
}
@ -366,9 +367,9 @@ public class JobHelper
public static void writeJobIdToFile(String hadoopJobIdFileName, String hadoopJobId)
{
if (hadoopJobId != null && hadoopJobIdFileName != null) {
try {
try (final OutputStream out = Files.newOutputStream(Paths.get(hadoopJobIdFileName))) {
HadoopDruidIndexerConfig.JSON_MAPPER.writeValue(
new OutputStreamWriter(new FileOutputStream(new File(hadoopJobIdFileName)), StandardCharsets.UTF_8),
new OutputStreamWriter(out, StandardCharsets.UTF_8),
hadoopJobId
);
log.info("MR job id [%s] is written to the file [%s]", hadoopJobId, hadoopJobIdFileName);
@ -550,7 +551,7 @@ public class JobHelper
List<String> filesToCopy = Arrays.asList(baseDir.list());
for (String fileName : filesToCopy) {
final File fileToCopy = new File(baseDir, fileName);
if (java.nio.file.Files.isRegularFile(fileToCopy.toPath())) {
if (Files.isRegularFile(fileToCopy.toPath())) {
size += copyFileToZipStream(fileToCopy, outputStream, progressable);
} else {
log.warn("File at [%s] is not a regular file! skipping as part of zip", fileToCopy.getPath());