From 8854ce018ef75670304ada2ab38b173828778c56 Mon Sep 17 00:00:00 2001 From: Akash Dwivedi Date: Mon, 13 Feb 2017 15:12:14 -0800 Subject: [PATCH] File.deleteOnExit() (#3923) * Less use of File.deleteOnExit() * removed deleteOnExit from most of the tests/benchmarks/iopeon * Made IOpeon closable * Formatting. * Revert DeterminePartitionsJobTest, remove cleanup method from IOPeon --- .../benchmark/FilterPartitionBenchmark.java | 17 +- .../FilteredAggregatorBenchmark.java | 17 +- ...loatCompressionBenchmarkFileGenerator.java | 2 +- ...LongCompressionBenchmarkFileGenerator.java | 2 +- .../indexing/IndexMergeBenchmark.java | 56 +++++-- .../indexing/IndexPersistBenchmark.java | 50 +++--- .../benchmark/query/SearchBenchmark.java | 16 +- .../benchmark/query/SelectBenchmark.java | 16 +- .../benchmark/query/TimeseriesBenchmark.java | 17 +- .../druid/benchmark/query/TopNBenchmark.java | 17 +- .../azure/AzureDataSegmentPullerTest.java | 87 +++++----- .../google/GoogleDataSegmentPullerTest.java | 47 +++--- .../loading/HdfsDataSegmentFinderTest.java | 5 +- .../loading/HdfsDataSegmentPullerTest.java | 10 +- .../HdfsFileTimestampVersionFinderTest.java | 4 +- .../java/io/druid/indexer/HadoopIOPeon.java | 2 +- .../DetermineHashedPartitionsJobTest.java | 152 ++++++++++-------- .../io/druid/indexer/HadoopIOPeonTest.java | 4 +- .../druid/indexer/HdfsClasspathSetupTest.java | 3 +- .../util/common/CompressionUtilsTest.java | 142 ++++++++-------- .../java/io/druid/segment/IndexMerger.java | 2 +- .../java/io/druid/segment/IndexMergerV9.java | 2 +- .../segment/StringDimensionMergerLegacy.java | 2 +- .../java/io/druid/segment/data/IOPeon.java | 4 +- .../io/druid/segment/data/TmpFileIOPeon.java | 3 +- .../java/io/druid/segment/EmptyIndexTest.java | 57 ++++--- .../IndexMergerV9WithSpatialIndexTest.java | 52 +++--- .../data/CompressedIntsIndexedWriterTest.java | 2 +- .../CompressedVSizeIndexedV3WriterTest.java | 2 +- .../CompressedVSizeIntsIndexedWriterTest.java | 2 +- .../druid/segment/data/IOPeonForTesting.java | 2 +- .../data/VSizeIndexedIntsWriterTest.java | 2 +- .../loading/LocalDataSegmentPullerTest.java | 11 +- .../plumber/RealtimePlumberSchoolTest.java | 5 +- 34 files changed, 476 insertions(+), 338 deletions(-) diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java index e9c70f8fc61..9d38fb236cc 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java @@ -26,7 +26,6 @@ import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.hash.Hashing; import com.google.common.io.Files; - import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; @@ -76,6 +75,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -87,6 +87,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -118,6 +119,7 @@ public class FilterPartitionBenchmark private IncrementalIndex incIndex; private QueryableIndex qIndex; private File indexFile; + private File tmpDir; private Filter timeFilterNone; private Filter timeFilterHalf; @@ -172,13 +174,12 @@ public class FilterPartitionBenchmark incIndex.add(row); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); indexFile = INDEX_MERGER_V9.persist( incIndex, - tmpFile, + tmpDir, new IndexSpec() ); qIndex = INDEX_IO.loadIndex(indexFile); @@ -219,6 +220,12 @@ public class FilterPartitionBenchmark )); } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java index 7b9251e8975..430da3e2dc4 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilteredAggregatorBenchmark.java @@ -24,7 +24,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.hash.Hashing; import com.google.common.io.Files; - import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; @@ -77,6 +76,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -87,6 +87,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -124,6 +125,7 @@ public class FilteredAggregatorBenchmark private QueryRunnerFactory factory; private BenchmarkSchemaInfo schemaInfo; private TimeseriesQuery query; + private File tmpDir; private static String JS_FN = "function(str) { return 'super-' + str; }"; private static ExtractionFn JS_EXTRACTION_FN = new JavaScriptExtractionFn(JS_FN, false, JavaScriptConfig.getEnabledInstance()); @@ -187,13 +189,12 @@ public class FilteredAggregatorBenchmark inputRows.add(row); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); indexFile = INDEX_MERGER_V9.persist( incIndex, - tmpFile, + tmpDir, new IndexSpec() ); qIndex = INDEX_IO.loadIndex(indexFile); @@ -220,6 +221,12 @@ public class FilteredAggregatorBenchmark .build(); } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics) { return new OnheapIncrementalIndex( diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java index f3cf40c1441..10f046e805b 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java @@ -184,7 +184,7 @@ public class FloatCompressionBenchmarkFileGenerator output.write(ByteBuffer.wrap(baos.toByteArray())); } finally { - iopeon.cleanup(); + iopeon.close(); br.close(); } System.out.print(compFile.length() / 1024 + "\n"); diff --git a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java index 0fb0a1fe565..5d6215fa44e 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java @@ -177,7 +177,7 @@ public class LongCompressionBenchmarkFileGenerator output.write(ByteBuffer.wrap(baos.toByteArray())); } finally { - iopeon.cleanup(); + iopeon.close(); br.close(); } System.out.print(compFile.length() / 1024 + "\n"); diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java index c9aa0430bde..bbf13d8e700 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexMergeBenchmark.java @@ -22,7 +22,6 @@ package io.druid.benchmark.indexing; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.hash.Hashing; import com.google.common.io.Files; - import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; @@ -42,6 +41,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -52,6 +52,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -88,6 +89,7 @@ public class IndexMergeBenchmark private List indexesToMerge; private BenchmarkSchemaInfo schemaInfo; + private File tmpDir; static { JSON_MAPPER = new DefaultObjectMapper(); @@ -137,13 +139,12 @@ public class IndexMergeBenchmark incIndex.add(row); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); File indexFile = INDEX_MERGER_V9.persist( incIndex, - tmpFile, + tmpDir, new IndexSpec() ); @@ -152,6 +153,12 @@ public class IndexMergeBenchmark } } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( @@ -176,14 +183,23 @@ public class IndexMergeBenchmark File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-" + System.currentTimeMillis(), ".TEMPFILE"); tmpFile.delete(); tmpFile.mkdirs(); - log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); - tmpFile.deleteOnExit(); + try { + log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); - File mergedFile = INDEX_MERGER.mergeQueryableIndex(indexesToMerge, rollup, schemaInfo.getAggsArray(), tmpFile, new IndexSpec()); + File mergedFile = INDEX_MERGER.mergeQueryableIndex( + indexesToMerge, + rollup, + schemaInfo.getAggsArray(), + tmpFile, + new IndexSpec() + ); - blackhole.consume(mergedFile); + blackhole.consume(mergedFile); + } + finally { + tmpFile.delete(); + } - tmpFile.delete(); } @Benchmark @@ -194,13 +210,23 @@ public class IndexMergeBenchmark File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-V9-" + System.currentTimeMillis(), ".TEMPFILE"); tmpFile.delete(); tmpFile.mkdirs(); - log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); - tmpFile.deleteOnExit(); + try { + log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); - File mergedFile = INDEX_MERGER_V9.mergeQueryableIndex(indexesToMerge, rollup, schemaInfo.getAggsArray(), tmpFile, new IndexSpec()); + File mergedFile = INDEX_MERGER_V9.mergeQueryableIndex( + indexesToMerge, + rollup, + schemaInfo.getAggsArray(), + tmpFile, + new IndexSpec() + ); - blackhole.consume(mergedFile); + blackhole.consume(mergedFile); + } + finally { + tmpFile.delete(); + + } - tmpFile.delete(); } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java index ec11e30e9cf..4c03e3d79f6 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IndexPersistBenchmark.java @@ -22,7 +22,6 @@ package io.druid.benchmark.indexing; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.hash.Hashing; import com.google.common.io.Files; - import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; @@ -41,6 +40,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -174,19 +174,21 @@ public class IndexPersistBenchmark @OutputTimeUnit(TimeUnit.MICROSECONDS) public void persist(Blackhole blackhole) throws Exception { - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + File tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); + try { + File indexFile = INDEX_MERGER.persist( + incIndex, + tmpDir, + new IndexSpec() + ); - File indexFile = INDEX_MERGER.persist( - incIndex, - tmpFile, - new IndexSpec() - ); + blackhole.consume(indexFile); + } + finally { + FileUtils.deleteDirectory(tmpDir); + } - blackhole.consume(indexFile); - - tmpFile.delete(); } @Benchmark @@ -194,18 +196,20 @@ public class IndexPersistBenchmark @OutputTimeUnit(TimeUnit.MICROSECONDS) public void persistV9(Blackhole blackhole) throws Exception { - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit();; + File tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); + try { + File indexFile = INDEX_MERGER_V9.persist( + incIndex, + tmpDir, + new IndexSpec() + ); - File indexFile = INDEX_MERGER_V9.persist( - incIndex, - tmpFile, - new IndexSpec() - ); + blackhole.consume(indexFile); - blackhole.consume(indexFile); - - tmpFile.delete(); + } + finally { + FileUtils.deleteDirectory(tmpDir); + } } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java index c295702b94a..a222597024b 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SearchBenchmark.java @@ -79,6 +79,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -89,6 +90,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -132,6 +134,7 @@ public class SearchBenchmark private BenchmarkSchemaInfo schemaInfo; private Druids.SearchQueryBuilder queryBuilder; private SearchQuery query; + private File tmpDir; private ExecutorService executorService; @@ -351,15 +354,14 @@ public class SearchBenchmark incIndexes.add(incIndex); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); qIndexes = new ArrayList<>(); for (int i = 0; i < numSegments; i++) { File indexFile = INDEX_MERGER_V9.persist( incIndexes.get(i), - tmpFile, + tmpDir, new IndexSpec() ); @@ -378,6 +380,12 @@ public class SearchBenchmark ); } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java index e7a0fed1161..a68c57e0e43 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SelectBenchmark.java @@ -67,6 +67,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -77,6 +78,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -123,6 +125,7 @@ public class SelectBenchmark private BenchmarkSchemaInfo schemaInfo; private Druids.SelectQueryBuilder queryBuilder; private SelectQuery query; + private File tmpDir; private ExecutorService executorService; @@ -211,15 +214,14 @@ public class SelectBenchmark incIndexes.add(incIndex); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); qIndexes = new ArrayList<>(); for (int i = 0; i < numSegments; i++) { File indexFile = INDEX_MERGER_V9.persist( incIndexes.get(i), - tmpFile, + tmpDir, new IndexSpec() ); QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile); @@ -236,6 +238,12 @@ public class SelectBenchmark ); } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java index 273e71d6502..3425d6b239f 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java @@ -24,7 +24,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.hash.Hashing; import com.google.common.io.Files; - import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; @@ -74,6 +73,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -85,6 +85,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -121,6 +122,7 @@ public class TimeseriesBenchmark private List incIndexes; private List qIndexes; + private File tmpDir; private QueryRunnerFactory factory; private BenchmarkSchemaInfo schemaInfo; @@ -278,15 +280,14 @@ public class TimeseriesBenchmark incIndexes.add(incIndex); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); qIndexes = new ArrayList<>(); for (int i = 0; i < numSegments; i++) { File indexFile = INDEX_MERGER_V9.persist( incIndexes.get(i), - tmpFile, + tmpDir, new IndexSpec() ); @@ -303,6 +304,12 @@ public class TimeseriesBenchmark ); } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java index 2725d001a9b..7ebe056c1c3 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TopNBenchmark.java @@ -24,7 +24,6 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.hash.Hashing; import com.google.common.io.Files; - import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; @@ -72,6 +71,7 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.serde.ComplexMetrics; +import org.apache.commons.io.FileUtils; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -82,6 +82,7 @@ import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; @@ -126,6 +127,7 @@ public class TopNBenchmark private BenchmarkSchemaInfo schemaInfo; private TopNQueryBuilder queryBuilder; private TopNQuery query; + private File tmpDir; private ExecutorService executorService; @@ -255,15 +257,14 @@ public class TopNBenchmark incIndexes.add(incIndex); } - File tmpFile = Files.createTempDir(); - log.info("Using temp dir: " + tmpFile.getAbsolutePath()); - tmpFile.deleteOnExit(); + tmpDir = Files.createTempDir(); + log.info("Using temp dir: " + tmpDir.getAbsolutePath()); qIndexes = new ArrayList<>(); for (int i = 0; i < numSegments; i++) { File indexFile = INDEX_MERGER_V9.persist( incIndexes.get(i), - tmpFile, + tmpDir, new IndexSpec() ); @@ -283,6 +284,12 @@ public class TopNBenchmark ); } + @TearDown + public void tearDown() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + private IncrementalIndex makeIncIndex() { return new OnheapIncrementalIndex( diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java index 798c0688444..5a5eec038ce 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -21,7 +21,6 @@ package io.druid.storage.azure; import com.google.common.collect.ImmutableMap; import com.microsoft.azure.storage.StorageException; - import io.druid.java.util.common.FileUtils; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; @@ -46,7 +45,6 @@ import static org.junit.Assert.assertTrue; public class AzureDataSegmentPullerTest extends EasyMockSupport { - private AzureStorage azureStorage; private static final String SEGMENT_FILE_NAME = "segment"; private static final String containerName = "container"; private static final String blobPath = "/path/to/storage/index.zip"; @@ -61,6 +59,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport 0, 1 ); + private AzureStorage azureStorage; @Before public void before() @@ -73,25 +72,29 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport { final String value = "bucket"; final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value); - pulledFile.deleteOnExit(); final File toDir = Files.createTempDirectory("druid").toFile(); - toDir.deleteOnExit(); - final InputStream zipStream = new FileInputStream(pulledFile); + try { + final InputStream zipStream = new FileInputStream(pulledFile); - expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream); + expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream); - replayAll(); + replayAll(); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); - FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir); + FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir); - File expected = new File(toDir, SEGMENT_FILE_NAME); - assertEquals(value.length(), result.size()); - assertTrue(expected.exists()); - assertEquals(value.length(), expected.length()); + File expected = new File(toDir, SEGMENT_FILE_NAME); + assertEquals(value.length(), result.size()); + assertTrue(expected.exists()); + assertEquals(value.length(), expected.length()); - verifyAll(); + verifyAll(); + } + finally { + pulledFile.delete(); + org.apache.commons.io.FileUtils.deleteDirectory(toDir); + } } @Test(expected = RuntimeException.class) @@ -100,27 +103,30 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport { final File outDir = Files.createTempDirectory("druid").toFile(); - outDir.deleteOnExit(); + try { + expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( + new StorageException( + "error", + "error", + 404, + null, + null + ) + ); - expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( - new StorageException( - "error", - "error", - 404, - null, - null - ) - ); + replayAll(); - replayAll(); + AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); - AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); + puller.getSegmentFiles(containerName, blobPath, outDir); - puller.getSegmentFiles(containerName, blobPath, outDir); + assertFalse(outDir.exists()); - assertFalse(outDir.exists()); - - verifyAll(); + verifyAll(); + } + finally { + org.apache.commons.io.FileUtils.deleteDirectory(outDir); + } } @@ -128,18 +134,23 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport public void getSegmentFilesTest() throws SegmentLoadingException { final File outDir = new File(""); - final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); - final AzureDataSegmentPuller puller = createMockBuilder(AzureDataSegmentPuller.class).withConstructor( - azureStorage - ).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); + try { + final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); + final AzureDataSegmentPuller puller = createMockBuilder(AzureDataSegmentPuller.class).withConstructor( + azureStorage + ).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); - expect(puller.getSegmentFiles(containerName, blobPath, outDir)).andReturn(result); + expect(puller.getSegmentFiles(containerName, blobPath, outDir)).andReturn(result); - replayAll(); + replayAll(); - puller.getSegmentFiles(dataSegment, outDir); + puller.getSegmentFiles(dataSegment, outDir); - verifyAll(); + verifyAll(); + } + finally { + outDir.delete(); + } } diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java index cfc4b785061..a6d3facfd9c 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java @@ -57,38 +57,47 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport throws IOException, SegmentLoadingException { final File outDir = Files.createTempDirectory("druid").toFile(); - outDir.deleteOnExit(); - GoogleStorage storage = createMock(GoogleStorage.class); + try { + GoogleStorage storage = createMock(GoogleStorage.class); - expect(storage.get(bucket, path)).andThrow(new IOException("")); + expect(storage.get(bucket, path)).andThrow(new IOException("")); - replayAll(); + replayAll(); - GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage); - puller.getSegmentFiles(bucket, path, outDir); + GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage); + puller.getSegmentFiles(bucket, path, outDir); - assertFalse(outDir.exists()); + assertFalse(outDir.exists()); - verifyAll(); + verifyAll(); + } + finally { + org.apache.commons.io.FileUtils.deleteDirectory(outDir); + } } @Test - public void getSegmentFilesTest() throws SegmentLoadingException + public void getSegmentFilesTest() throws SegmentLoadingException, IOException { final File outDir = new File(""); - final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); - GoogleStorage storage = createMock(GoogleStorage.class); - GoogleDataSegmentPuller puller = createMockBuilder(GoogleDataSegmentPuller.class).withConstructor( - storage - ).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); + try { + final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); + GoogleStorage storage = createMock(GoogleStorage.class); + GoogleDataSegmentPuller puller = createMockBuilder(GoogleDataSegmentPuller.class).withConstructor( + storage + ).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); - expect(puller.getSegmentFiles(bucket, path, outDir)).andReturn(result); + expect(puller.getSegmentFiles(bucket, path, outDir)).andReturn(result); - replayAll(); + replayAll(); - puller.getSegmentFiles(dataSegment, outDir); + puller.getSegmentFiles(dataSegment, outDir); - verifyAll(); + verifyAll(); + } + finally { + org.apache.commons.io.FileUtils.deleteDirectory(outDir); + } } @Test @@ -104,7 +113,7 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport assertTrue(outDir.exists()); } finally { - outDir.delete(); + org.apache.commons.io.FileUtils.deleteDirectory(outDir); } } } diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index f1d7060979a..e9277e08b04 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -27,6 +27,7 @@ import io.druid.jackson.DefaultObjectMapper; import io.druid.storage.hdfs.HdfsDataSegmentFinder; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; +import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; @@ -133,7 +134,6 @@ public class HdfsDataSegmentFinderTest mapper.registerSubtypes(new NamedType(NumberedShardSpec.class, "numbered")); hdfsTmpDir = File.createTempFile("hdfsDataSource", "dir"); - hdfsTmpDir.deleteOnExit(); if (!hdfsTmpDir.delete()) { throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); } @@ -145,11 +145,12 @@ public class HdfsDataSegmentFinderTest } @AfterClass - public static void tearDownStatic() + public static void tearDownStatic() throws IOException { if (miniCluster != null) { miniCluster.shutdown(true); } + FileUtils.deleteDirectory(hdfsTmpDir); } @Before diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java index 798b1aaf1c1..be0ed7d9ae4 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java @@ -24,6 +24,7 @@ import com.google.common.io.ByteStreams; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.StringUtils; import io.druid.storage.hdfs.HdfsDataSegmentPuller; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -62,7 +63,6 @@ public class HdfsDataSegmentPullerTest public static void setupStatic() throws IOException, ClassNotFoundException { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); - hdfsTmpDir.deleteOnExit(); if (!hdfsTmpDir.delete()) { throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); } @@ -74,7 +74,6 @@ public class HdfsDataSegmentPullerTest final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data"); tmpFile.delete(); try { - tmpFile.deleteOnExit(); Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath()); try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) { Files.copy(tmpFile.toPath(), stream); @@ -91,6 +90,7 @@ public class HdfsDataSegmentPullerTest if (miniCluster != null) { miniCluster.shutdown(true); } + FileUtils.deleteDirectory(hdfsTmpDir); } @@ -112,18 +112,14 @@ public class HdfsDataSegmentPullerTest public void testZip() throws IOException, SegmentLoadingException { final File tmpDir = com.google.common.io.Files.createTempDir(); - tmpDir.deleteOnExit(); final File tmpFile = File.createTempFile("zipContents", ".txt", tmpDir); - tmpFile.deleteOnExit(); final Path zipPath = new Path("/tmp/testZip.zip"); final File outTmpDir = com.google.common.io.Files.createTempDir(); - outTmpDir.deleteOnExit(); final URI uri = URI.create(uriBase.toString() + zipPath.toString()); - tmpFile.deleteOnExit(); try (final OutputStream stream = new FileOutputStream(tmpFile)) { ByteStreams.copy(new ByteArrayInputStream(pathByteContents), stream); } @@ -164,7 +160,6 @@ public class HdfsDataSegmentPullerTest final Path zipPath = new Path("/tmp/testZip.gz"); final File outTmpDir = com.google.common.io.Files.createTempDir(); - outTmpDir.deleteOnExit(); final File outFile = new File(outTmpDir, "testZip"); outFile.delete(); @@ -201,7 +196,6 @@ public class HdfsDataSegmentPullerTest final Path zipPath = new Path(perTestPath, "test.txt"); final File outTmpDir = com.google.common.io.Files.createTempDir(); - outTmpDir.deleteOnExit(); final File outFile = new File(outTmpDir, "test.txt"); outFile.delete(); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java index 7df3bc52c2b..7c8869d9eb8 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java @@ -23,6 +23,7 @@ import com.google.common.io.ByteStreams; import io.druid.java.util.common.StringUtils; import io.druid.storage.hdfs.HdfsFileTimestampVersionFinder; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -58,7 +59,6 @@ public class HdfsFileTimestampVersionFinderTest public static void setupStatic() throws IOException, ClassNotFoundException { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); - hdfsTmpDir.deleteOnExit(); if (!hdfsTmpDir.delete()) { throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); } @@ -70,7 +70,6 @@ public class HdfsFileTimestampVersionFinderTest final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data"); tmpFile.delete(); try { - tmpFile.deleteOnExit(); Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath()); try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) { Files.copy(tmpFile.toPath(), stream); @@ -87,6 +86,7 @@ public class HdfsFileTimestampVersionFinderTest if (miniCluster != null) { miniCluster.shutdown(true); } + FileUtils.deleteDirectory(hdfsTmpDir); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java index d20b3d51f5b..e74d03d96ea 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java @@ -55,7 +55,7 @@ class HadoopIOPeon implements IOPeon } @Override - public void cleanup() throws IOException + public void close() throws IOException { throw new UnsupportedOperationException(); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index 1c144d060bf..81125632dbb 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -33,6 +33,7 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; +import org.apache.commons.io.FileUtils; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -95,79 +96,96 @@ public class DetermineHashedPartitionsJobTest ); } - public DetermineHashedPartitionsJobTest(String dataFilePath, long targetPartitionSize, String interval, int errorMargin, int expectedNumTimeBuckets, int[] expectedNumOfShards) throws IOException + public DetermineHashedPartitionsJobTest( + String dataFilePath, + long targetPartitionSize, + String interval, + int errorMargin, + int expectedNumTimeBuckets, + int[] expectedNumOfShards + ) throws IOException { this.expectedNumOfShards = expectedNumOfShards; this.expectedNumTimeBuckets = expectedNumTimeBuckets; this.errorMargin = errorMargin; File tmpDir = Files.createTempDir(); - tmpDir.deleteOnExit(); - HadoopIngestionSpec ingestionSpec = new HadoopIngestionSpec( - new DataSchema( - "test_schema", - HadoopDruidIndexerConfig.JSON_MAPPER.convertValue( - new StringInputRowParser( - new DelimitedParseSpec( - new TimestampSpec("ts", null, null), - new DimensionsSpec( - DimensionsSpec.getDefaultSchemas(ImmutableList.of("market", "quality", "placement", "placementish")), - null, - null - ), - "\t", - null, - Arrays.asList( - "ts", - "market", - "quality", - "placement", - "placementish", - "index" - ) - ), - null - ), - Map.class - ), - new AggregatorFactory[]{new DoubleSumAggregatorFactory("index", "index")}, - new UniformGranularitySpec( - Granularity.DAY, - QueryGranularities.NONE, - ImmutableList.of(new Interval(interval)) - ), - HadoopDruidIndexerConfig.JSON_MAPPER - ), - new HadoopIOConfig( - ImmutableMap.of( - "paths", - dataFilePath, - "type", - "static" - ), null, tmpDir.getAbsolutePath() - ), - new HadoopTuningConfig( - tmpDir.getAbsolutePath(), - null, - new HashedPartitionsSpec(targetPartitionSize, null, true, null, null), - null, - null, - null, - false, - false, - false, - false, - null, - false, - false, - null, - null, - null, - false, - false - ) - ); - this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec); + try { + + HadoopIngestionSpec ingestionSpec = new HadoopIngestionSpec( + new DataSchema( + "test_schema", + HadoopDruidIndexerConfig.JSON_MAPPER.convertValue( + new StringInputRowParser( + new DelimitedParseSpec( + new TimestampSpec("ts", null, null), + new DimensionsSpec( + DimensionsSpec.getDefaultSchemas(ImmutableList.of( + "market", + "quality", + "placement", + "placementish" + )), + null, + null + ), + "\t", + null, + Arrays.asList( + "ts", + "market", + "quality", + "placement", + "placementish", + "index" + ) + ), + null + ), + Map.class + ), + new AggregatorFactory[]{new DoubleSumAggregatorFactory("index", "index")}, + new UniformGranularitySpec( + Granularity.DAY, + QueryGranularities.NONE, + ImmutableList.of(new Interval(interval)) + ), + HadoopDruidIndexerConfig.JSON_MAPPER + ), + new HadoopIOConfig( + ImmutableMap.of( + "paths", + dataFilePath, + "type", + "static" + ), null, tmpDir.getAbsolutePath() + ), + new HadoopTuningConfig( + tmpDir.getAbsolutePath(), + null, + new HashedPartitionsSpec(targetPartitionSize, null, true, null, null), + null, + null, + null, + false, + false, + false, + false, + null, + false, + false, + null, + null, + null, + false, + false + ) + ); + this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec); + } + finally { + FileUtils.deleteDirectory(tmpDir); + } } @Test diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java index 7a1316bb568..546d7ac3ced 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java @@ -69,8 +69,8 @@ public class HadoopIOPeonTest Assert.assertNotNull(ioPeon.makeInputStream(tmpFolder.newFile(TMP_FILE_NAME).getName())); } - @Test(expected = UnsupportedOperationException.class) public void testCleanup() throws IOException + @Test(expected = UnsupportedOperationException.class) public void testClose() throws IOException { - ioPeon.cleanup(); + ioPeon.close(); } } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java index a8882414a93..497690da4b7 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java @@ -26,6 +26,7 @@ import com.google.common.util.concurrent.MoreExecutors; import io.druid.common.utils.UUIDUtils; import io.druid.java.util.common.StringUtils; import junit.framework.Assert; +import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -71,7 +72,6 @@ public class HdfsClasspathSetupTest public static void setupStatic() throws IOException, ClassNotFoundException { hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir"); - hdfsTmpDir.deleteOnExit(); if (!hdfsTmpDir.delete()) { throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); } @@ -100,6 +100,7 @@ public class HdfsClasspathSetupTest if (miniCluster != null) { miniCluster.shutdown(true); } + FileUtils.deleteDirectory(hdfsTmpDir); } @After diff --git a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java index 64eb814daaa..0553552fca2 100644 --- a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java @@ -53,8 +53,6 @@ import java.util.zip.GZIPOutputStream; public class CompressionUtilsTest { - @Rule - public final TemporaryFolder temporaryFolder = new TemporaryFolder(); private static final String content; private static final byte[] expected; private static final byte[] gzBytes; @@ -85,9 +83,19 @@ public class CompressionUtilsTest gzBytes = gzByteStream.toByteArray(); } + @Rule + public final TemporaryFolder temporaryFolder = new TemporaryFolder(); private File testDir; private File testFile; + public static void assertGoodDataStream(InputStream stream) throws IOException + { + try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) { + ByteStreams.copy(stream, bos); + Assert.assertArrayEquals(expected, bos.toByteArray()); + } + } + @Before public void setUp() throws IOException { @@ -99,14 +107,6 @@ public class CompressionUtilsTest Assert.assertTrue(testFile.getParentFile().equals(testDir)); } - public static void assertGoodDataStream(InputStream stream) throws IOException - { - try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) { - ByteStreams.copy(stream, bos); - Assert.assertArrayEquals(expected, bos.toByteArray()); - } - } - @Test public void testGoodGzNameResolution() { @@ -131,15 +131,24 @@ public class CompressionUtilsTest { final File tmpDir = temporaryFolder.newFolder("testGoodZipCompressUncompress"); final File zipFile = new File(tmpDir, "compressionUtilTest.zip"); - zipFile.deleteOnExit(); - CompressionUtils.zip(testDir, zipFile); - final File newDir = new File(tmpDir, "newDir"); - newDir.mkdir(); - CompressionUtils.unzip(zipFile, newDir); - final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName()); - Assert.assertTrue(newPath.toFile().exists()); - try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) { - assertGoodDataStream(inputStream); + try { + CompressionUtils.zip(testDir, zipFile); + final File newDir = new File(tmpDir, "newDir"); + newDir.mkdir(); + CompressionUtils.unzip(zipFile, newDir); + final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName()); + Assert.assertTrue(newPath.toFile().exists()); + try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) { + assertGoodDataStream(inputStream); + } + } + finally { + if (zipFile.exists()) { + zipFile.delete(); + } + if (tmpDir.exists()) { + tmpDir.delete(); + } } } @@ -252,53 +261,6 @@ public class CompressionUtilsTest } } - private static class ZeroRemainingInputStream extends FilterInputStream - { - private final AtomicInteger pos = new AtomicInteger(0); - - protected ZeroRemainingInputStream(InputStream in) - { - super(in); - } - - @Override - public synchronized void reset() throws IOException - { - super.reset(); - pos.set(0); - } - - @Override - public int read(byte b[]) throws IOException - { - final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length); - pos.addAndGet(len); - return read(b, 0, len); - } - - @Override - public int read() throws IOException - { - pos.incrementAndGet(); - return super.read(); - } - - @Override - public int read(byte b[], int off, int len) throws IOException - { - final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length); - pos.addAndGet(l); - return super.read(b, off, l); - } - - @Override - public int available() throws IOException - { - return 0; - } - } - - @Test // Sanity check to make sure the test class works as expected public void testZeroRemainingInputStream() throws IOException @@ -410,7 +372,6 @@ public class CompressionUtilsTest } } - @Test // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 public void testGunzipBugStreamWorkarround() throws IOException @@ -539,7 +500,6 @@ public class CompressionUtilsTest Assert.assertEquals(4, flushes.get()); // 2 for suppressed closes, 2 for manual calls to shake out errors } - @Test(expected = IOException.class) public void testStreamErrorGzip() throws Exception { @@ -596,4 +556,50 @@ public class CompressionUtilsTest ) ); } + + private static class ZeroRemainingInputStream extends FilterInputStream + { + private final AtomicInteger pos = new AtomicInteger(0); + + protected ZeroRemainingInputStream(InputStream in) + { + super(in); + } + + @Override + public synchronized void reset() throws IOException + { + super.reset(); + pos.set(0); + } + + @Override + public int read(byte b[]) throws IOException + { + final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length); + pos.addAndGet(len); + return read(b, 0, len); + } + + @Override + public int read() throws IOException + { + pos.incrementAndGet(); + return super.read(); + } + + @Override + public int read(byte b[], int off, int len) throws IOException + { + final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length); + pos.addAndGet(l); + return super.read(b, off, l); + } + + @Override + public int available() throws IOException + { + return 0; + } + } } diff --git a/processing/src/main/java/io/druid/segment/IndexMerger.java b/processing/src/main/java/io/druid/segment/IndexMerger.java index c815adfb7de..042e7c62d30 100644 --- a/processing/src/main/java/io/druid/segment/IndexMerger.java +++ b/processing/src/main/java/io/druid/segment/IndexMerger.java @@ -642,7 +642,7 @@ public class IndexMerger @Override public void close() throws IOException { - ioPeon.cleanup(); + ioPeon.close(); } }); try { diff --git a/processing/src/main/java/io/druid/segment/IndexMergerV9.java b/processing/src/main/java/io/druid/segment/IndexMergerV9.java index 764046f8738..dd6db74a02e 100644 --- a/processing/src/main/java/io/druid/segment/IndexMergerV9.java +++ b/processing/src/main/java/io/druid/segment/IndexMergerV9.java @@ -135,7 +135,7 @@ public class IndexMergerV9 extends IndexMerger @Override public void close() throws IOException { - ioPeon.cleanup(); + ioPeon.close(); } }); final FileSmoosher v9Smoosher = new FileSmoosher(outDir); diff --git a/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java b/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java index d4c4616a366..55801670226 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionMergerLegacy.java @@ -210,7 +210,7 @@ public class StringDimensionMergerLegacy extends StringDimensionMergerV9 impleme spatialWriter.close(); serializerUtils.writeString(spatialIndexFile, dimensionName); ByteStreams.copy(spatialWriter.combineStreams(), spatialIndexFile); - spatialIoPeon.cleanup(); + spatialIoPeon.close(); } } diff --git a/processing/src/main/java/io/druid/segment/data/IOPeon.java b/processing/src/main/java/io/druid/segment/data/IOPeon.java index 568224bd783..7c669d735bf 100644 --- a/processing/src/main/java/io/druid/segment/data/IOPeon.java +++ b/processing/src/main/java/io/druid/segment/data/IOPeon.java @@ -19,15 +19,15 @@ package io.druid.segment.data; +import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** */ -public interface IOPeon +public interface IOPeon extends Closeable { public OutputStream makeOutputStream(String filename) throws IOException; public InputStream makeInputStream(String filename) throws IOException; - public void cleanup() throws IOException; } diff --git a/processing/src/main/java/io/druid/segment/data/TmpFileIOPeon.java b/processing/src/main/java/io/druid/segment/data/TmpFileIOPeon.java index d8e6c5d39a3..634118c6fa5 100644 --- a/processing/src/main/java/io/druid/segment/data/TmpFileIOPeon.java +++ b/processing/src/main/java/io/druid/segment/data/TmpFileIOPeon.java @@ -53,7 +53,6 @@ public class TmpFileIOPeon implements IOPeon File retFile = createdFiles.get(filename); if (retFile == null) { retFile = File.createTempFile("filePeon", filename); - retFile.deleteOnExit(); createdFiles.put(filename, retFile); return new BufferedOutputStream(new FileOutputStream(retFile)); } else if (allowOverwrite) { @@ -72,7 +71,7 @@ public class TmpFileIOPeon implements IOPeon } @Override - public void cleanup() throws IOException + public void close() throws IOException { for (File file : createdFiles.values()) { file.delete(); diff --git a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java index 8d21f034e9b..2e4ee6e8946 100644 --- a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java @@ -28,6 +28,7 @@ import io.druid.segment.column.Column; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; import io.druid.segment.incremental.OnheapIncrementalIndex; +import org.apache.commons.io.FileUtils; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -46,32 +47,40 @@ public class EmptyIndexTest if (!tmpDir.mkdir()) { throw new IllegalStateException("tmp mkdir failed"); } - tmpDir.deleteOnExit(); - IncrementalIndex emptyIndex = new OnheapIncrementalIndex( - 0, - QueryGranularities.NONE, - new AggregatorFactory[0], - 1000 - ); - IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( - new Interval("2012-08-01/P3D"), - emptyIndex, - new ConciseBitmapFactory() - ); - TestHelper.getTestIndexMerger().merge( - Lists.newArrayList(emptyIndexAdapter), - true, - new AggregatorFactory[0], - tmpDir, - new IndexSpec() - ); + try { + IncrementalIndex emptyIndex = new OnheapIncrementalIndex( + 0, + QueryGranularities.NONE, + new AggregatorFactory[0], + 1000 + ); + IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( + new Interval("2012-08-01/P3D"), + emptyIndex, + new ConciseBitmapFactory() + ); + TestHelper.getTestIndexMerger().merge( + Lists.newArrayList(emptyIndexAdapter), + true, + new AggregatorFactory[0], + tmpDir, + new IndexSpec() + ); - QueryableIndex emptyQueryableIndex = TestHelper.getTestIndexIO().loadIndex(tmpDir); + QueryableIndex emptyQueryableIndex = TestHelper.getTestIndexIO().loadIndex(tmpDir); - Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions())); - Assert.assertEquals("getMetricNames", 0, Iterables.size(emptyQueryableIndex.getColumnNames())); - Assert.assertEquals("getDataInterval", new Interval("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); - Assert.assertEquals("getReadOnlyTimestamps", 0, emptyQueryableIndex.getColumn(Column.TIME_COLUMN_NAME).getLength()); + Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions())); + Assert.assertEquals("getMetricNames", 0, Iterables.size(emptyQueryableIndex.getColumnNames())); + Assert.assertEquals("getDataInterval", new Interval("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); + Assert.assertEquals( + "getReadOnlyTimestamps", + 0, + emptyQueryableIndex.getColumn(Column.TIME_COLUMN_NAME).getLength() + ); + } + finally { + FileUtils.deleteDirectory(tmpDir); + } } } diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java index ee9db667a33..4bbe83118d9 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -46,6 +46,7 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.OnheapIncrementalIndex; +import org.apache.commons.io.FileUtils; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; @@ -256,10 +257,14 @@ public class IndexMergerV9WithSpatialIndexTest File tmpFile = File.createTempFile("billy", "yay"); tmpFile.delete(); tmpFile.mkdirs(); - tmpFile.deleteOnExit(); - INDEX_MERGER_V9.persist(theIndex, tmpFile, indexSpec); - return INDEX_IO.loadIndex(tmpFile); + try { + INDEX_MERGER_V9.persist(theIndex, tmpFile, indexSpec); + return INDEX_IO.loadIndex(tmpFile); + } + finally { + FileUtils.deleteDirectory(tmpFile); + } } private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) @@ -470,33 +475,38 @@ public class IndexMergerV9WithSpatialIndexTest File mergedFile = new File(tmpFile, "merged"); firstFile.mkdirs(); - firstFile.deleteOnExit(); secondFile.mkdirs(); - secondFile.deleteOnExit(); thirdFile.mkdirs(); - thirdFile.deleteOnExit(); mergedFile.mkdirs(); - mergedFile.deleteOnExit(); INDEX_MERGER_V9.persist(first, DATA_INTERVAL, firstFile, indexSpec); INDEX_MERGER_V9.persist(second, DATA_INTERVAL, secondFile, indexSpec); INDEX_MERGER_V9.persist(third, DATA_INTERVAL, thirdFile, indexSpec); - QueryableIndex mergedRealtime = INDEX_IO.loadIndex( - INDEX_MERGER_V9.mergeQueryableIndex( - Arrays.asList( - INDEX_IO.loadIndex(firstFile), - INDEX_IO.loadIndex(secondFile), - INDEX_IO.loadIndex(thirdFile) - ), - true, - METRIC_AGGS, - mergedFile, - indexSpec - ) - ); + try { + QueryableIndex mergedRealtime = INDEX_IO.loadIndex( + INDEX_MERGER_V9.mergeQueryableIndex( + Arrays.asList( + INDEX_IO.loadIndex(firstFile), + INDEX_IO.loadIndex(secondFile), + INDEX_IO.loadIndex(thirdFile) + ), + true, + METRIC_AGGS, + mergedFile, + indexSpec + ) + ); + return mergedRealtime; + + } + finally { + FileUtils.deleteDirectory(firstFile); + FileUtils.deleteDirectory(secondFile); + FileUtils.deleteDirectory(thirdFile); + FileUtils.deleteDirectory(mergedFile); + } - return mergedRealtime; } catch (IOException e) { throw Throwables.propagate(e); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java index 1cc237380dc..671beb7ce54 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java @@ -92,7 +92,7 @@ public class CompressedIntsIndexedWriterTest @After public void tearDown() throws Exception { - ioPeon.cleanup(); + ioPeon.close(); } private void generateVals(final int totalSize, final int maxValue) throws IOException diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java index 52aeaa4ebb3..b2a59d3989c 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java @@ -182,7 +182,7 @@ public class CompressedVSizeIndexedV3WriterTest @After public void tearDown() throws Exception { - ioPeon.cleanup(); + ioPeon.close(); } @Test diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java index 0a9b3dae62e..2a2268f13dc 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedWriterTest.java @@ -91,7 +91,7 @@ public class CompressedVSizeIntsIndexedWriterTest @After public void tearDown() throws Exception { - ioPeon.cleanup(); + ioPeon.close(); } private void generateVals(final int totalSize, final int maxValue) throws IOException diff --git a/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java b/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java index 2e5d7b768c0..effc42ad9b8 100644 --- a/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java +++ b/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java @@ -61,7 +61,7 @@ class IOPeonForTesting implements IOPeon } @Override - public void cleanup() throws IOException + public void close() throws IOException { outStreams.clear(); } diff --git a/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsWriterTest.java b/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsWriterTest.java index ef515f22736..a27509da7bd 100644 --- a/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsWriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/VSizeIndexedIntsWriterTest.java @@ -50,7 +50,7 @@ public class VSizeIndexedIntsWriterTest @After public void tearDown() throws Exception { - ioPeon.cleanup(); + ioPeon.close(); } private void generateVals(final int totalSize, final int maxValue) throws IOException diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java index b15079384d5..641c737454f 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPullerTest.java @@ -20,9 +20,9 @@ package io.druid.segment.loading; import com.google.common.io.Files; - import io.druid.java.util.common.CompressionUtils; - +import org.apache.commons.io.FileUtils; +import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -49,10 +49,15 @@ public class LocalDataSegmentPullerTest public void setup() throws IOException { tmpDir = temporaryFolder.newFolder(); - tmpDir.deleteOnExit(); puller = new LocalDataSegmentPuller(); } + @After + public void after() throws IOException + { + FileUtils.deleteDirectory(tmpDir); + } + @Test public void simpleZipTest() throws IOException, SegmentLoadingException { diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index 6bab29316c9..2dabb4ac980 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -96,6 +96,7 @@ public class RealtimePlumberSchoolTest private DataSchema schema; private DataSchema schema2; private FireDepartmentMetrics metrics; + private File tmpDir; public RealtimePlumberSchoolTest(RejectionPolicyFactory rejectionPolicy, boolean buildV9Directly) { @@ -124,8 +125,7 @@ public class RealtimePlumberSchoolTest @Before public void setUp() throws Exception { - final File tmpDir = Files.createTempDir(); - tmpDir.deleteOnExit(); + tmpDir = Files.createTempDir(); ObjectMapper jsonMapper = new DefaultObjectMapper(); @@ -237,6 +237,7 @@ public class RealtimePlumberSchoolTest schema.getDataSource() ) ); + FileUtils.deleteDirectory(tmpDir); } @Test(timeout = 60000)