File.deleteOnExit() (#3923)

* Less use of File.deleteOnExit()
 * removed deleteOnExit from most of the tests/benchmarks/iopeon
 * Made IOpeon closable

* Formatting.

* Revert DeterminePartitionsJobTest, remove cleanup method from IOPeon
This commit is contained in:
Akash Dwivedi 2017-02-13 15:12:14 -08:00 committed by Gian Merlino
parent 9dfcf0763a
commit 8854ce018e
34 changed files with 476 additions and 338 deletions

View File

@ -26,7 +26,6 @@ import com.google.common.base.Strings;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.BenchmarkSchemas;
@ -76,6 +75,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
@ -87,6 +87,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -118,6 +119,7 @@ public class FilterPartitionBenchmark
private IncrementalIndex incIndex; private IncrementalIndex incIndex;
private QueryableIndex qIndex; private QueryableIndex qIndex;
private File indexFile; private File indexFile;
private File tmpDir;
private Filter timeFilterNone; private Filter timeFilterNone;
private Filter timeFilterHalf; private Filter timeFilterHalf;
@ -172,13 +174,12 @@ public class FilterPartitionBenchmark
incIndex.add(row); incIndex.add(row);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
indexFile = INDEX_MERGER_V9.persist( indexFile = INDEX_MERGER_V9.persist(
incIndex, incIndex,
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
qIndex = INDEX_IO.loadIndex(indexFile); qIndex = INDEX_IO.loadIndex(indexFile);
@ -219,6 +220,12 @@ public class FilterPartitionBenchmark
)); ));
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex() private IncrementalIndex makeIncIndex()
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(

View File

@ -24,7 +24,6 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.BenchmarkSchemas;
@ -77,6 +76,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -87,6 +87,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -124,6 +125,7 @@ public class FilteredAggregatorBenchmark
private QueryRunnerFactory factory; private QueryRunnerFactory factory;
private BenchmarkSchemaInfo schemaInfo; private BenchmarkSchemaInfo schemaInfo;
private TimeseriesQuery query; private TimeseriesQuery query;
private File tmpDir;
private static String JS_FN = "function(str) { return 'super-' + str; }"; private static String JS_FN = "function(str) { return 'super-' + str; }";
private static ExtractionFn JS_EXTRACTION_FN = new JavaScriptExtractionFn(JS_FN, false, JavaScriptConfig.getEnabledInstance()); private static ExtractionFn JS_EXTRACTION_FN = new JavaScriptExtractionFn(JS_FN, false, JavaScriptConfig.getEnabledInstance());
@ -187,13 +189,12 @@ public class FilteredAggregatorBenchmark
inputRows.add(row); inputRows.add(row);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
indexFile = INDEX_MERGER_V9.persist( indexFile = INDEX_MERGER_V9.persist(
incIndex, incIndex,
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
qIndex = INDEX_IO.loadIndex(indexFile); qIndex = INDEX_IO.loadIndex(indexFile);
@ -220,6 +221,12 @@ public class FilteredAggregatorBenchmark
.build(); .build();
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics) private IncrementalIndex makeIncIndex(AggregatorFactory[] metrics)
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(

View File

@ -184,7 +184,7 @@ public class FloatCompressionBenchmarkFileGenerator
output.write(ByteBuffer.wrap(baos.toByteArray())); output.write(ByteBuffer.wrap(baos.toByteArray()));
} }
finally { finally {
iopeon.cleanup(); iopeon.close();
br.close(); br.close();
} }
System.out.print(compFile.length() / 1024 + "\n"); System.out.print(compFile.length() / 1024 + "\n");

View File

@ -177,7 +177,7 @@ public class LongCompressionBenchmarkFileGenerator
output.write(ByteBuffer.wrap(baos.toByteArray())); output.write(ByteBuffer.wrap(baos.toByteArray()));
} }
finally { finally {
iopeon.cleanup(); iopeon.close();
br.close(); br.close();
} }
System.out.print(compFile.length() / 1024 + "\n"); System.out.print(compFile.length() / 1024 + "\n");

View File

@ -22,7 +22,6 @@ package io.druid.benchmark.indexing;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.BenchmarkSchemas;
@ -42,6 +41,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -52,6 +52,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -88,6 +89,7 @@ public class IndexMergeBenchmark
private List<QueryableIndex> indexesToMerge; private List<QueryableIndex> indexesToMerge;
private BenchmarkSchemaInfo schemaInfo; private BenchmarkSchemaInfo schemaInfo;
private File tmpDir;
static { static {
JSON_MAPPER = new DefaultObjectMapper(); JSON_MAPPER = new DefaultObjectMapper();
@ -137,13 +139,12 @@ public class IndexMergeBenchmark
incIndex.add(row); incIndex.add(row);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
File indexFile = INDEX_MERGER_V9.persist( File indexFile = INDEX_MERGER_V9.persist(
incIndex, incIndex,
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
@ -152,6 +153,12 @@ public class IndexMergeBenchmark
} }
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex() private IncrementalIndex makeIncIndex()
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(
@ -176,14 +183,23 @@ public class IndexMergeBenchmark
File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-" + System.currentTimeMillis(), ".TEMPFILE"); File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-" + System.currentTimeMillis(), ".TEMPFILE");
tmpFile.delete(); tmpFile.delete();
tmpFile.mkdirs(); tmpFile.mkdirs();
log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); try {
tmpFile.deleteOnExit(); log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory());
File mergedFile = INDEX_MERGER.mergeQueryableIndex(indexesToMerge, rollup, schemaInfo.getAggsArray(), tmpFile, new IndexSpec()); File mergedFile = INDEX_MERGER.mergeQueryableIndex(
indexesToMerge,
rollup,
schemaInfo.getAggsArray(),
tmpFile,
new IndexSpec()
);
blackhole.consume(mergedFile); blackhole.consume(mergedFile);
}
finally {
tmpFile.delete();
}
tmpFile.delete();
} }
@Benchmark @Benchmark
@ -194,13 +210,23 @@ public class IndexMergeBenchmark
File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-V9-" + System.currentTimeMillis(), ".TEMPFILE"); File tmpFile = File.createTempFile("IndexMergeBenchmark-MERGEDFILE-V9-" + System.currentTimeMillis(), ".TEMPFILE");
tmpFile.delete(); tmpFile.delete();
tmpFile.mkdirs(); tmpFile.mkdirs();
log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory()); try {
tmpFile.deleteOnExit(); log.info(tmpFile.getAbsolutePath() + " isFile: " + tmpFile.isFile() + " isDir:" + tmpFile.isDirectory());
File mergedFile = INDEX_MERGER_V9.mergeQueryableIndex(indexesToMerge, rollup, schemaInfo.getAggsArray(), tmpFile, new IndexSpec()); File mergedFile = INDEX_MERGER_V9.mergeQueryableIndex(
indexesToMerge,
rollup,
schemaInfo.getAggsArray(),
tmpFile,
new IndexSpec()
);
blackhole.consume(mergedFile); blackhole.consume(mergedFile);
}
finally {
tmpFile.delete();
}
tmpFile.delete();
} }
} }

View File

@ -22,7 +22,6 @@ package io.druid.benchmark.indexing;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.BenchmarkSchemas;
@ -41,6 +40,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -174,19 +174,21 @@ public class IndexPersistBenchmark
@OutputTimeUnit(TimeUnit.MICROSECONDS) @OutputTimeUnit(TimeUnit.MICROSECONDS)
public void persist(Blackhole blackhole) throws Exception public void persist(Blackhole blackhole) throws Exception
{ {
File tmpFile = Files.createTempDir(); File tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit(); try {
File indexFile = INDEX_MERGER.persist(
incIndex,
tmpDir,
new IndexSpec()
);
File indexFile = INDEX_MERGER.persist( blackhole.consume(indexFile);
incIndex, }
tmpFile, finally {
new IndexSpec() FileUtils.deleteDirectory(tmpDir);
); }
blackhole.consume(indexFile);
tmpFile.delete();
} }
@Benchmark @Benchmark
@ -194,18 +196,20 @@ public class IndexPersistBenchmark
@OutputTimeUnit(TimeUnit.MICROSECONDS) @OutputTimeUnit(TimeUnit.MICROSECONDS)
public void persistV9(Blackhole blackhole) throws Exception public void persistV9(Blackhole blackhole) throws Exception
{ {
File tmpFile = Files.createTempDir(); File tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();; try {
File indexFile = INDEX_MERGER_V9.persist(
incIndex,
tmpDir,
new IndexSpec()
);
File indexFile = INDEX_MERGER_V9.persist( blackhole.consume(indexFile);
incIndex,
tmpFile,
new IndexSpec()
);
blackhole.consume(indexFile); }
finally {
tmpFile.delete(); FileUtils.deleteDirectory(tmpDir);
}
} }
} }

View File

@ -79,6 +79,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -89,6 +90,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -132,6 +134,7 @@ public class SearchBenchmark
private BenchmarkSchemaInfo schemaInfo; private BenchmarkSchemaInfo schemaInfo;
private Druids.SearchQueryBuilder queryBuilder; private Druids.SearchQueryBuilder queryBuilder;
private SearchQuery query; private SearchQuery query;
private File tmpDir;
private ExecutorService executorService; private ExecutorService executorService;
@ -351,15 +354,14 @@ public class SearchBenchmark
incIndexes.add(incIndex); incIndexes.add(incIndex);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
qIndexes = new ArrayList<>(); qIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) { for (int i = 0; i < numSegments; i++) {
File indexFile = INDEX_MERGER_V9.persist( File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i), incIndexes.get(i),
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
@ -378,6 +380,12 @@ public class SearchBenchmark
); );
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex() private IncrementalIndex makeIncIndex()
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(

View File

@ -67,6 +67,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -77,6 +78,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -123,6 +125,7 @@ public class SelectBenchmark
private BenchmarkSchemaInfo schemaInfo; private BenchmarkSchemaInfo schemaInfo;
private Druids.SelectQueryBuilder queryBuilder; private Druids.SelectQueryBuilder queryBuilder;
private SelectQuery query; private SelectQuery query;
private File tmpDir;
private ExecutorService executorService; private ExecutorService executorService;
@ -211,15 +214,14 @@ public class SelectBenchmark
incIndexes.add(incIndex); incIndexes.add(incIndex);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
qIndexes = new ArrayList<>(); qIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) { for (int i = 0; i < numSegments; i++) {
File indexFile = INDEX_MERGER_V9.persist( File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i), incIndexes.get(i),
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile); QueryableIndex qIndex = INDEX_IO.loadIndex(indexFile);
@ -236,6 +238,12 @@ public class SelectBenchmark
); );
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex() private IncrementalIndex makeIncIndex()
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(

View File

@ -24,7 +24,6 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.BenchmarkSchemas;
@ -74,6 +73,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
@ -85,6 +85,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -121,6 +122,7 @@ public class TimeseriesBenchmark
private List<IncrementalIndex> incIndexes; private List<IncrementalIndex> incIndexes;
private List<QueryableIndex> qIndexes; private List<QueryableIndex> qIndexes;
private File tmpDir;
private QueryRunnerFactory factory; private QueryRunnerFactory factory;
private BenchmarkSchemaInfo schemaInfo; private BenchmarkSchemaInfo schemaInfo;
@ -278,15 +280,14 @@ public class TimeseriesBenchmark
incIndexes.add(incIndex); incIndexes.add(incIndex);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
qIndexes = new ArrayList<>(); qIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) { for (int i = 0; i < numSegments; i++) {
File indexFile = INDEX_MERGER_V9.persist( File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i), incIndexes.get(i),
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
@ -303,6 +304,12 @@ public class TimeseriesBenchmark
); );
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex() private IncrementalIndex makeIncIndex()
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(

View File

@ -24,7 +24,6 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.benchmark.datagen.BenchmarkDataGenerator;
import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.BenchmarkSchemas;
@ -72,6 +71,7 @@ import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -82,6 +82,7 @@ import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.infra.Blackhole;
@ -126,6 +127,7 @@ public class TopNBenchmark
private BenchmarkSchemaInfo schemaInfo; private BenchmarkSchemaInfo schemaInfo;
private TopNQueryBuilder queryBuilder; private TopNQueryBuilder queryBuilder;
private TopNQuery query; private TopNQuery query;
private File tmpDir;
private ExecutorService executorService; private ExecutorService executorService;
@ -255,15 +257,14 @@ public class TopNBenchmark
incIndexes.add(incIndex); incIndexes.add(incIndex);
} }
File tmpFile = Files.createTempDir(); tmpDir = Files.createTempDir();
log.info("Using temp dir: " + tmpFile.getAbsolutePath()); log.info("Using temp dir: " + tmpDir.getAbsolutePath());
tmpFile.deleteOnExit();
qIndexes = new ArrayList<>(); qIndexes = new ArrayList<>();
for (int i = 0; i < numSegments; i++) { for (int i = 0; i < numSegments; i++) {
File indexFile = INDEX_MERGER_V9.persist( File indexFile = INDEX_MERGER_V9.persist(
incIndexes.get(i), incIndexes.get(i),
tmpFile, tmpDir,
new IndexSpec() new IndexSpec()
); );
@ -283,6 +284,12 @@ public class TopNBenchmark
); );
} }
@TearDown
public void tearDown() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
private IncrementalIndex makeIncIndex() private IncrementalIndex makeIncIndex()
{ {
return new OnheapIncrementalIndex( return new OnheapIncrementalIndex(

View File

@ -21,7 +21,6 @@ package io.druid.storage.azure;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.microsoft.azure.storage.StorageException; import com.microsoft.azure.storage.StorageException;
import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.FileUtils;
import io.druid.segment.loading.SegmentLoadingException; import io.druid.segment.loading.SegmentLoadingException;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
@ -46,7 +45,6 @@ import static org.junit.Assert.assertTrue;
public class AzureDataSegmentPullerTest extends EasyMockSupport public class AzureDataSegmentPullerTest extends EasyMockSupport
{ {
private AzureStorage azureStorage;
private static final String SEGMENT_FILE_NAME = "segment"; private static final String SEGMENT_FILE_NAME = "segment";
private static final String containerName = "container"; private static final String containerName = "container";
private static final String blobPath = "/path/to/storage/index.zip"; private static final String blobPath = "/path/to/storage/index.zip";
@ -61,6 +59,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
0, 0,
1 1
); );
private AzureStorage azureStorage;
@Before @Before
public void before() public void before()
@ -73,25 +72,29 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
{ {
final String value = "bucket"; final String value = "bucket";
final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value); final File pulledFile = AzureTestUtils.createZipTempFile(SEGMENT_FILE_NAME, value);
pulledFile.deleteOnExit();
final File toDir = Files.createTempDirectory("druid").toFile(); final File toDir = Files.createTempDirectory("druid").toFile();
toDir.deleteOnExit(); try {
final InputStream zipStream = new FileInputStream(pulledFile); final InputStream zipStream = new FileInputStream(pulledFile);
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream); expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream);
replayAll(); replayAll();
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir); FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir);
File expected = new File(toDir, SEGMENT_FILE_NAME); File expected = new File(toDir, SEGMENT_FILE_NAME);
assertEquals(value.length(), result.size()); assertEquals(value.length(), result.size());
assertTrue(expected.exists()); assertTrue(expected.exists());
assertEquals(value.length(), expected.length()); assertEquals(value.length(), expected.length());
verifyAll(); verifyAll();
}
finally {
pulledFile.delete();
org.apache.commons.io.FileUtils.deleteDirectory(toDir);
}
} }
@Test(expected = RuntimeException.class) @Test(expected = RuntimeException.class)
@ -100,27 +103,30 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
{ {
final File outDir = Files.createTempDirectory("druid").toFile(); final File outDir = Files.createTempDirectory("druid").toFile();
outDir.deleteOnExit(); try {
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
new StorageException(
"error",
"error",
404,
null,
null
)
);
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( replayAll();
new StorageException(
"error",
"error",
404,
null,
null
)
);
replayAll(); AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); puller.getSegmentFiles(containerName, blobPath, outDir);
puller.getSegmentFiles(containerName, blobPath, outDir); assertFalse(outDir.exists());
assertFalse(outDir.exists()); verifyAll();
}
verifyAll(); finally {
org.apache.commons.io.FileUtils.deleteDirectory(outDir);
}
} }
@ -128,18 +134,23 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
public void getSegmentFilesTest() throws SegmentLoadingException public void getSegmentFilesTest() throws SegmentLoadingException
{ {
final File outDir = new File(""); final File outDir = new File("");
final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); try {
final AzureDataSegmentPuller puller = createMockBuilder(AzureDataSegmentPuller.class).withConstructor( final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class);
azureStorage final AzureDataSegmentPuller puller = createMockBuilder(AzureDataSegmentPuller.class).withConstructor(
).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); azureStorage
).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock();
expect(puller.getSegmentFiles(containerName, blobPath, outDir)).andReturn(result); expect(puller.getSegmentFiles(containerName, blobPath, outDir)).andReturn(result);
replayAll(); replayAll();
puller.getSegmentFiles(dataSegment, outDir); puller.getSegmentFiles(dataSegment, outDir);
verifyAll(); verifyAll();
}
finally {
outDir.delete();
}
} }

View File

@ -57,38 +57,47 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport
throws IOException, SegmentLoadingException throws IOException, SegmentLoadingException
{ {
final File outDir = Files.createTempDirectory("druid").toFile(); final File outDir = Files.createTempDirectory("druid").toFile();
outDir.deleteOnExit(); try {
GoogleStorage storage = createMock(GoogleStorage.class); GoogleStorage storage = createMock(GoogleStorage.class);
expect(storage.get(bucket, path)).andThrow(new IOException("")); expect(storage.get(bucket, path)).andThrow(new IOException(""));
replayAll(); replayAll();
GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage); GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage);
puller.getSegmentFiles(bucket, path, outDir); puller.getSegmentFiles(bucket, path, outDir);
assertFalse(outDir.exists()); assertFalse(outDir.exists());
verifyAll(); verifyAll();
}
finally {
org.apache.commons.io.FileUtils.deleteDirectory(outDir);
}
} }
@Test @Test
public void getSegmentFilesTest() throws SegmentLoadingException public void getSegmentFilesTest() throws SegmentLoadingException, IOException
{ {
final File outDir = new File(""); final File outDir = new File("");
final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class); try {
GoogleStorage storage = createMock(GoogleStorage.class); final FileUtils.FileCopyResult result = createMock(FileUtils.FileCopyResult.class);
GoogleDataSegmentPuller puller = createMockBuilder(GoogleDataSegmentPuller.class).withConstructor( GoogleStorage storage = createMock(GoogleStorage.class);
storage GoogleDataSegmentPuller puller = createMockBuilder(GoogleDataSegmentPuller.class).withConstructor(
).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock(); storage
).addMockedMethod("getSegmentFiles", String.class, String.class, File.class).createMock();
expect(puller.getSegmentFiles(bucket, path, outDir)).andReturn(result); expect(puller.getSegmentFiles(bucket, path, outDir)).andReturn(result);
replayAll(); replayAll();
puller.getSegmentFiles(dataSegment, outDir); puller.getSegmentFiles(dataSegment, outDir);
verifyAll(); verifyAll();
}
finally {
org.apache.commons.io.FileUtils.deleteDirectory(outDir);
}
} }
@Test @Test
@ -104,7 +113,7 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport
assertTrue(outDir.exists()); assertTrue(outDir.exists());
} }
finally { finally {
outDir.delete(); org.apache.commons.io.FileUtils.deleteDirectory(outDir);
} }
} }
} }

View File

@ -27,6 +27,7 @@ import io.druid.jackson.DefaultObjectMapper;
import io.druid.storage.hdfs.HdfsDataSegmentFinder; import io.druid.storage.hdfs.HdfsDataSegmentFinder;
import io.druid.timeline.DataSegment; import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.NumberedShardSpec;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
@ -133,7 +134,6 @@ public class HdfsDataSegmentFinderTest
mapper.registerSubtypes(new NamedType(NumberedShardSpec.class, "numbered")); mapper.registerSubtypes(new NamedType(NumberedShardSpec.class, "numbered"));
hdfsTmpDir = File.createTempFile("hdfsDataSource", "dir"); hdfsTmpDir = File.createTempFile("hdfsDataSource", "dir");
hdfsTmpDir.deleteOnExit();
if (!hdfsTmpDir.delete()) { if (!hdfsTmpDir.delete()) {
throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()));
} }
@ -145,11 +145,12 @@ public class HdfsDataSegmentFinderTest
} }
@AfterClass @AfterClass
public static void tearDownStatic() public static void tearDownStatic() throws IOException
{ {
if (miniCluster != null) { if (miniCluster != null) {
miniCluster.shutdown(true); miniCluster.shutdown(true);
} }
FileUtils.deleteDirectory(hdfsTmpDir);
} }
@Before @Before

View File

@ -24,6 +24,7 @@ import com.google.common.io.ByteStreams;
import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.CompressionUtils;
import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.StringUtils;
import io.druid.storage.hdfs.HdfsDataSegmentPuller; import io.druid.storage.hdfs.HdfsDataSegmentPuller;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
@ -62,7 +63,6 @@ public class HdfsDataSegmentPullerTest
public static void setupStatic() throws IOException, ClassNotFoundException public static void setupStatic() throws IOException, ClassNotFoundException
{ {
hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir");
hdfsTmpDir.deleteOnExit();
if (!hdfsTmpDir.delete()) { if (!hdfsTmpDir.delete()) {
throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()));
} }
@ -74,7 +74,6 @@ public class HdfsDataSegmentPullerTest
final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data"); final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data");
tmpFile.delete(); tmpFile.delete();
try { try {
tmpFile.deleteOnExit();
Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath()); Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath());
try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) { try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) {
Files.copy(tmpFile.toPath(), stream); Files.copy(tmpFile.toPath(), stream);
@ -91,6 +90,7 @@ public class HdfsDataSegmentPullerTest
if (miniCluster != null) { if (miniCluster != null) {
miniCluster.shutdown(true); miniCluster.shutdown(true);
} }
FileUtils.deleteDirectory(hdfsTmpDir);
} }
@ -112,18 +112,14 @@ public class HdfsDataSegmentPullerTest
public void testZip() throws IOException, SegmentLoadingException public void testZip() throws IOException, SegmentLoadingException
{ {
final File tmpDir = com.google.common.io.Files.createTempDir(); final File tmpDir = com.google.common.io.Files.createTempDir();
tmpDir.deleteOnExit();
final File tmpFile = File.createTempFile("zipContents", ".txt", tmpDir); final File tmpFile = File.createTempFile("zipContents", ".txt", tmpDir);
tmpFile.deleteOnExit();
final Path zipPath = new Path("/tmp/testZip.zip"); final Path zipPath = new Path("/tmp/testZip.zip");
final File outTmpDir = com.google.common.io.Files.createTempDir(); final File outTmpDir = com.google.common.io.Files.createTempDir();
outTmpDir.deleteOnExit();
final URI uri = URI.create(uriBase.toString() + zipPath.toString()); final URI uri = URI.create(uriBase.toString() + zipPath.toString());
tmpFile.deleteOnExit();
try (final OutputStream stream = new FileOutputStream(tmpFile)) { try (final OutputStream stream = new FileOutputStream(tmpFile)) {
ByteStreams.copy(new ByteArrayInputStream(pathByteContents), stream); ByteStreams.copy(new ByteArrayInputStream(pathByteContents), stream);
} }
@ -164,7 +160,6 @@ public class HdfsDataSegmentPullerTest
final Path zipPath = new Path("/tmp/testZip.gz"); final Path zipPath = new Path("/tmp/testZip.gz");
final File outTmpDir = com.google.common.io.Files.createTempDir(); final File outTmpDir = com.google.common.io.Files.createTempDir();
outTmpDir.deleteOnExit();
final File outFile = new File(outTmpDir, "testZip"); final File outFile = new File(outTmpDir, "testZip");
outFile.delete(); outFile.delete();
@ -201,7 +196,6 @@ public class HdfsDataSegmentPullerTest
final Path zipPath = new Path(perTestPath, "test.txt"); final Path zipPath = new Path(perTestPath, "test.txt");
final File outTmpDir = com.google.common.io.Files.createTempDir(); final File outTmpDir = com.google.common.io.Files.createTempDir();
outTmpDir.deleteOnExit();
final File outFile = new File(outTmpDir, "test.txt"); final File outFile = new File(outTmpDir, "test.txt");
outFile.delete(); outFile.delete();

View File

@ -23,6 +23,7 @@ import com.google.common.io.ByteStreams;
import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.StringUtils;
import io.druid.storage.hdfs.HdfsFileTimestampVersionFinder; import io.druid.storage.hdfs.HdfsFileTimestampVersionFinder;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
@ -58,7 +59,6 @@ public class HdfsFileTimestampVersionFinderTest
public static void setupStatic() throws IOException, ClassNotFoundException public static void setupStatic() throws IOException, ClassNotFoundException
{ {
hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir");
hdfsTmpDir.deleteOnExit();
if (!hdfsTmpDir.delete()) { if (!hdfsTmpDir.delete()) {
throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()));
} }
@ -70,7 +70,6 @@ public class HdfsFileTimestampVersionFinderTest
final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data"); final File tmpFile = File.createTempFile("hdfsHandlerTest", ".data");
tmpFile.delete(); tmpFile.delete();
try { try {
tmpFile.deleteOnExit();
Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath()); Files.copy(new ByteArrayInputStream(pathByteContents), tmpFile.toPath());
try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) { try (OutputStream stream = miniCluster.getFileSystem().create(filePath)) {
Files.copy(tmpFile.toPath(), stream); Files.copy(tmpFile.toPath(), stream);
@ -87,6 +86,7 @@ public class HdfsFileTimestampVersionFinderTest
if (miniCluster != null) { if (miniCluster != null) {
miniCluster.shutdown(true); miniCluster.shutdown(true);
} }
FileUtils.deleteDirectory(hdfsTmpDir);
} }

View File

@ -55,7 +55,7 @@ class HadoopIOPeon implements IOPeon
} }
@Override @Override
public void cleanup() throws IOException public void close() throws IOException
{ {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }

View File

@ -33,6 +33,7 @@ import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.commons.io.FileUtils;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -95,79 +96,96 @@ public class DetermineHashedPartitionsJobTest
); );
} }
public DetermineHashedPartitionsJobTest(String dataFilePath, long targetPartitionSize, String interval, int errorMargin, int expectedNumTimeBuckets, int[] expectedNumOfShards) throws IOException public DetermineHashedPartitionsJobTest(
String dataFilePath,
long targetPartitionSize,
String interval,
int errorMargin,
int expectedNumTimeBuckets,
int[] expectedNumOfShards
) throws IOException
{ {
this.expectedNumOfShards = expectedNumOfShards; this.expectedNumOfShards = expectedNumOfShards;
this.expectedNumTimeBuckets = expectedNumTimeBuckets; this.expectedNumTimeBuckets = expectedNumTimeBuckets;
this.errorMargin = errorMargin; this.errorMargin = errorMargin;
File tmpDir = Files.createTempDir(); File tmpDir = Files.createTempDir();
tmpDir.deleteOnExit();
HadoopIngestionSpec ingestionSpec = new HadoopIngestionSpec( try {
new DataSchema(
"test_schema", HadoopIngestionSpec ingestionSpec = new HadoopIngestionSpec(
HadoopDruidIndexerConfig.JSON_MAPPER.convertValue( new DataSchema(
new StringInputRowParser( "test_schema",
new DelimitedParseSpec( HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(
new TimestampSpec("ts", null, null), new StringInputRowParser(
new DimensionsSpec( new DelimitedParseSpec(
DimensionsSpec.getDefaultSchemas(ImmutableList.of("market", "quality", "placement", "placementish")), new TimestampSpec("ts", null, null),
null, new DimensionsSpec(
null DimensionsSpec.getDefaultSchemas(ImmutableList.of(
), "market",
"\t", "quality",
null, "placement",
Arrays.asList( "placementish"
"ts", )),
"market", null,
"quality", null
"placement", ),
"placementish", "\t",
"index" null,
) Arrays.asList(
), "ts",
null "market",
), "quality",
Map.class "placement",
), "placementish",
new AggregatorFactory[]{new DoubleSumAggregatorFactory("index", "index")}, "index"
new UniformGranularitySpec( )
Granularity.DAY, ),
QueryGranularities.NONE, null
ImmutableList.of(new Interval(interval)) ),
), Map.class
HadoopDruidIndexerConfig.JSON_MAPPER ),
), new AggregatorFactory[]{new DoubleSumAggregatorFactory("index", "index")},
new HadoopIOConfig( new UniformGranularitySpec(
ImmutableMap.<String, Object>of( Granularity.DAY,
"paths", QueryGranularities.NONE,
dataFilePath, ImmutableList.of(new Interval(interval))
"type", ),
"static" HadoopDruidIndexerConfig.JSON_MAPPER
), null, tmpDir.getAbsolutePath() ),
), new HadoopIOConfig(
new HadoopTuningConfig( ImmutableMap.<String, Object>of(
tmpDir.getAbsolutePath(), "paths",
null, dataFilePath,
new HashedPartitionsSpec(targetPartitionSize, null, true, null, null), "type",
null, "static"
null, ), null, tmpDir.getAbsolutePath()
null, ),
false, new HadoopTuningConfig(
false, tmpDir.getAbsolutePath(),
false, null,
false, new HashedPartitionsSpec(targetPartitionSize, null, true, null, null),
null, null,
false, null,
false, null,
null, false,
null, false,
null, false,
false, false,
false null,
) false,
); false,
this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec); null,
null,
null,
false,
false
)
);
this.indexerConfig = new HadoopDruidIndexerConfig(ingestionSpec);
}
finally {
FileUtils.deleteDirectory(tmpDir);
}
} }
@Test @Test

View File

@ -69,8 +69,8 @@ public class HadoopIOPeonTest
Assert.assertNotNull(ioPeon.makeInputStream(tmpFolder.newFile(TMP_FILE_NAME).getName())); Assert.assertNotNull(ioPeon.makeInputStream(tmpFolder.newFile(TMP_FILE_NAME).getName()));
} }
@Test(expected = UnsupportedOperationException.class) public void testCleanup() throws IOException @Test(expected = UnsupportedOperationException.class) public void testClose() throws IOException
{ {
ioPeon.cleanup(); ioPeon.close();
} }
} }

View File

@ -26,6 +26,7 @@ import com.google.common.util.concurrent.MoreExecutors;
import io.druid.common.utils.UUIDUtils; import io.druid.common.utils.UUIDUtils;
import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.StringUtils;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -71,7 +72,6 @@ public class HdfsClasspathSetupTest
public static void setupStatic() throws IOException, ClassNotFoundException public static void setupStatic() throws IOException, ClassNotFoundException
{ {
hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir"); hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir");
hdfsTmpDir.deleteOnExit();
if (!hdfsTmpDir.delete()) { if (!hdfsTmpDir.delete()) {
throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()));
} }
@ -100,6 +100,7 @@ public class HdfsClasspathSetupTest
if (miniCluster != null) { if (miniCluster != null) {
miniCluster.shutdown(true); miniCluster.shutdown(true);
} }
FileUtils.deleteDirectory(hdfsTmpDir);
} }
@After @After

View File

@ -53,8 +53,6 @@ import java.util.zip.GZIPOutputStream;
public class CompressionUtilsTest public class CompressionUtilsTest
{ {
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private static final String content; private static final String content;
private static final byte[] expected; private static final byte[] expected;
private static final byte[] gzBytes; private static final byte[] gzBytes;
@ -85,9 +83,19 @@ public class CompressionUtilsTest
gzBytes = gzByteStream.toByteArray(); gzBytes = gzByteStream.toByteArray();
} }
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private File testDir; private File testDir;
private File testFile; private File testFile;
public static void assertGoodDataStream(InputStream stream) throws IOException
{
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) {
ByteStreams.copy(stream, bos);
Assert.assertArrayEquals(expected, bos.toByteArray());
}
}
@Before @Before
public void setUp() throws IOException public void setUp() throws IOException
{ {
@ -99,14 +107,6 @@ public class CompressionUtilsTest
Assert.assertTrue(testFile.getParentFile().equals(testDir)); Assert.assertTrue(testFile.getParentFile().equals(testDir));
} }
public static void assertGoodDataStream(InputStream stream) throws IOException
{
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) {
ByteStreams.copy(stream, bos);
Assert.assertArrayEquals(expected, bos.toByteArray());
}
}
@Test @Test
public void testGoodGzNameResolution() public void testGoodGzNameResolution()
{ {
@ -131,15 +131,24 @@ public class CompressionUtilsTest
{ {
final File tmpDir = temporaryFolder.newFolder("testGoodZipCompressUncompress"); final File tmpDir = temporaryFolder.newFolder("testGoodZipCompressUncompress");
final File zipFile = new File(tmpDir, "compressionUtilTest.zip"); final File zipFile = new File(tmpDir, "compressionUtilTest.zip");
zipFile.deleteOnExit(); try {
CompressionUtils.zip(testDir, zipFile); CompressionUtils.zip(testDir, zipFile);
final File newDir = new File(tmpDir, "newDir"); final File newDir = new File(tmpDir, "newDir");
newDir.mkdir(); newDir.mkdir();
CompressionUtils.unzip(zipFile, newDir); CompressionUtils.unzip(zipFile, newDir);
final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName()); final Path newPath = Paths.get(newDir.getAbsolutePath(), testFile.getName());
Assert.assertTrue(newPath.toFile().exists()); Assert.assertTrue(newPath.toFile().exists());
try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) { try (final FileInputStream inputStream = new FileInputStream(newPath.toFile())) {
assertGoodDataStream(inputStream); assertGoodDataStream(inputStream);
}
}
finally {
if (zipFile.exists()) {
zipFile.delete();
}
if (tmpDir.exists()) {
tmpDir.delete();
}
} }
} }
@ -252,53 +261,6 @@ public class CompressionUtilsTest
} }
} }
private static class ZeroRemainingInputStream extends FilterInputStream
{
private final AtomicInteger pos = new AtomicInteger(0);
protected ZeroRemainingInputStream(InputStream in)
{
super(in);
}
@Override
public synchronized void reset() throws IOException
{
super.reset();
pos.set(0);
}
@Override
public int read(byte b[]) throws IOException
{
final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length);
pos.addAndGet(len);
return read(b, 0, len);
}
@Override
public int read() throws IOException
{
pos.incrementAndGet();
return super.read();
}
@Override
public int read(byte b[], int off, int len) throws IOException
{
final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length);
pos.addAndGet(l);
return super.read(b, off, l);
}
@Override
public int available() throws IOException
{
return 0;
}
}
@Test @Test
// Sanity check to make sure the test class works as expected // Sanity check to make sure the test class works as expected
public void testZeroRemainingInputStream() throws IOException public void testZeroRemainingInputStream() throws IOException
@ -410,7 +372,6 @@ public class CompressionUtilsTest
} }
} }
@Test @Test
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144
public void testGunzipBugStreamWorkarround() throws IOException public void testGunzipBugStreamWorkarround() throws IOException
@ -539,7 +500,6 @@ public class CompressionUtilsTest
Assert.assertEquals(4, flushes.get()); // 2 for suppressed closes, 2 for manual calls to shake out errors Assert.assertEquals(4, flushes.get()); // 2 for suppressed closes, 2 for manual calls to shake out errors
} }
@Test(expected = IOException.class) @Test(expected = IOException.class)
public void testStreamErrorGzip() throws Exception public void testStreamErrorGzip() throws Exception
{ {
@ -596,4 +556,50 @@ public class CompressionUtilsTest
) )
); );
} }
private static class ZeroRemainingInputStream extends FilterInputStream
{
private final AtomicInteger pos = new AtomicInteger(0);
protected ZeroRemainingInputStream(InputStream in)
{
super(in);
}
@Override
public synchronized void reset() throws IOException
{
super.reset();
pos.set(0);
}
@Override
public int read(byte b[]) throws IOException
{
final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length);
pos.addAndGet(len);
return read(b, 0, len);
}
@Override
public int read() throws IOException
{
pos.incrementAndGet();
return super.read();
}
@Override
public int read(byte b[], int off, int len) throws IOException
{
final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length);
pos.addAndGet(l);
return super.read(b, off, l);
}
@Override
public int available() throws IOException
{
return 0;
}
}
} }

View File

@ -642,7 +642,7 @@ public class IndexMerger
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
ioPeon.cleanup(); ioPeon.close();
} }
}); });
try { try {

View File

@ -135,7 +135,7 @@ public class IndexMergerV9 extends IndexMerger
@Override @Override
public void close() throws IOException public void close() throws IOException
{ {
ioPeon.cleanup(); ioPeon.close();
} }
}); });
final FileSmoosher v9Smoosher = new FileSmoosher(outDir); final FileSmoosher v9Smoosher = new FileSmoosher(outDir);

View File

@ -210,7 +210,7 @@ public class StringDimensionMergerLegacy extends StringDimensionMergerV9 impleme
spatialWriter.close(); spatialWriter.close();
serializerUtils.writeString(spatialIndexFile, dimensionName); serializerUtils.writeString(spatialIndexFile, dimensionName);
ByteStreams.copy(spatialWriter.combineStreams(), spatialIndexFile); ByteStreams.copy(spatialWriter.combineStreams(), spatialIndexFile);
spatialIoPeon.cleanup(); spatialIoPeon.close();
} }
} }

View File

@ -19,15 +19,15 @@
package io.druid.segment.data; package io.druid.segment.data;
import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
/** /**
*/ */
public interface IOPeon public interface IOPeon extends Closeable
{ {
public OutputStream makeOutputStream(String filename) throws IOException; public OutputStream makeOutputStream(String filename) throws IOException;
public InputStream makeInputStream(String filename) throws IOException; public InputStream makeInputStream(String filename) throws IOException;
public void cleanup() throws IOException;
} }

View File

@ -53,7 +53,6 @@ public class TmpFileIOPeon implements IOPeon
File retFile = createdFiles.get(filename); File retFile = createdFiles.get(filename);
if (retFile == null) { if (retFile == null) {
retFile = File.createTempFile("filePeon", filename); retFile = File.createTempFile("filePeon", filename);
retFile.deleteOnExit();
createdFiles.put(filename, retFile); createdFiles.put(filename, retFile);
return new BufferedOutputStream(new FileOutputStream(retFile)); return new BufferedOutputStream(new FileOutputStream(retFile));
} else if (allowOverwrite) { } else if (allowOverwrite) {
@ -72,7 +71,7 @@ public class TmpFileIOPeon implements IOPeon
} }
@Override @Override
public void cleanup() throws IOException public void close() throws IOException
{ {
for (File file : createdFiles.values()) { for (File file : createdFiles.values()) {
file.delete(); file.delete();

View File

@ -28,6 +28,7 @@ import io.druid.segment.column.Column;
import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexAdapter; import io.druid.segment.incremental.IncrementalIndexAdapter;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import org.apache.commons.io.FileUtils;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -46,32 +47,40 @@ public class EmptyIndexTest
if (!tmpDir.mkdir()) { if (!tmpDir.mkdir()) {
throw new IllegalStateException("tmp mkdir failed"); throw new IllegalStateException("tmp mkdir failed");
} }
tmpDir.deleteOnExit();
IncrementalIndex emptyIndex = new OnheapIncrementalIndex( try {
0, IncrementalIndex emptyIndex = new OnheapIncrementalIndex(
QueryGranularities.NONE, 0,
new AggregatorFactory[0], QueryGranularities.NONE,
1000 new AggregatorFactory[0],
); 1000
IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( );
new Interval("2012-08-01/P3D"), IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter(
emptyIndex, new Interval("2012-08-01/P3D"),
new ConciseBitmapFactory() emptyIndex,
); new ConciseBitmapFactory()
TestHelper.getTestIndexMerger().merge( );
Lists.<IndexableAdapter>newArrayList(emptyIndexAdapter), TestHelper.getTestIndexMerger().merge(
true, Lists.<IndexableAdapter>newArrayList(emptyIndexAdapter),
new AggregatorFactory[0], true,
tmpDir, new AggregatorFactory[0],
new IndexSpec() tmpDir,
); new IndexSpec()
);
QueryableIndex emptyQueryableIndex = TestHelper.getTestIndexIO().loadIndex(tmpDir); QueryableIndex emptyQueryableIndex = TestHelper.getTestIndexIO().loadIndex(tmpDir);
Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions())); Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions()));
Assert.assertEquals("getMetricNames", 0, Iterables.size(emptyQueryableIndex.getColumnNames())); Assert.assertEquals("getMetricNames", 0, Iterables.size(emptyQueryableIndex.getColumnNames()));
Assert.assertEquals("getDataInterval", new Interval("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); Assert.assertEquals("getDataInterval", new Interval("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval());
Assert.assertEquals("getReadOnlyTimestamps", 0, emptyQueryableIndex.getColumn(Column.TIME_COLUMN_NAME).getLength()); Assert.assertEquals(
"getReadOnlyTimestamps",
0,
emptyQueryableIndex.getColumn(Column.TIME_COLUMN_NAME).getLength()
);
}
finally {
FileUtils.deleteDirectory(tmpDir);
}
} }
} }

View File

@ -46,6 +46,7 @@ import io.druid.query.timeseries.TimeseriesResultValue;
import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.incremental.OnheapIncrementalIndex; import io.druid.segment.incremental.OnheapIncrementalIndex;
import org.apache.commons.io.FileUtils;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.junit.Test; import org.junit.Test;
@ -256,10 +257,14 @@ public class IndexMergerV9WithSpatialIndexTest
File tmpFile = File.createTempFile("billy", "yay"); File tmpFile = File.createTempFile("billy", "yay");
tmpFile.delete(); tmpFile.delete();
tmpFile.mkdirs(); tmpFile.mkdirs();
tmpFile.deleteOnExit();
INDEX_MERGER_V9.persist(theIndex, tmpFile, indexSpec); try {
return INDEX_IO.loadIndex(tmpFile); INDEX_MERGER_V9.persist(theIndex, tmpFile, indexSpec);
return INDEX_IO.loadIndex(tmpFile);
}
finally {
FileUtils.deleteDirectory(tmpFile);
}
} }
private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec) private static QueryableIndex makeMergedQueryableIndex(IndexSpec indexSpec)
@ -470,33 +475,38 @@ public class IndexMergerV9WithSpatialIndexTest
File mergedFile = new File(tmpFile, "merged"); File mergedFile = new File(tmpFile, "merged");
firstFile.mkdirs(); firstFile.mkdirs();
firstFile.deleteOnExit();
secondFile.mkdirs(); secondFile.mkdirs();
secondFile.deleteOnExit();
thirdFile.mkdirs(); thirdFile.mkdirs();
thirdFile.deleteOnExit();
mergedFile.mkdirs(); mergedFile.mkdirs();
mergedFile.deleteOnExit();
INDEX_MERGER_V9.persist(first, DATA_INTERVAL, firstFile, indexSpec); INDEX_MERGER_V9.persist(first, DATA_INTERVAL, firstFile, indexSpec);
INDEX_MERGER_V9.persist(second, DATA_INTERVAL, secondFile, indexSpec); INDEX_MERGER_V9.persist(second, DATA_INTERVAL, secondFile, indexSpec);
INDEX_MERGER_V9.persist(third, DATA_INTERVAL, thirdFile, indexSpec); INDEX_MERGER_V9.persist(third, DATA_INTERVAL, thirdFile, indexSpec);
QueryableIndex mergedRealtime = INDEX_IO.loadIndex( try {
INDEX_MERGER_V9.mergeQueryableIndex( QueryableIndex mergedRealtime = INDEX_IO.loadIndex(
Arrays.asList( INDEX_MERGER_V9.mergeQueryableIndex(
INDEX_IO.loadIndex(firstFile), Arrays.asList(
INDEX_IO.loadIndex(secondFile), INDEX_IO.loadIndex(firstFile),
INDEX_IO.loadIndex(thirdFile) INDEX_IO.loadIndex(secondFile),
), INDEX_IO.loadIndex(thirdFile)
true, ),
METRIC_AGGS, true,
mergedFile, METRIC_AGGS,
indexSpec mergedFile,
) indexSpec
); )
);
return mergedRealtime;
}
finally {
FileUtils.deleteDirectory(firstFile);
FileUtils.deleteDirectory(secondFile);
FileUtils.deleteDirectory(thirdFile);
FileUtils.deleteDirectory(mergedFile);
}
return mergedRealtime;
} }
catch (IOException e) { catch (IOException e) {
throw Throwables.propagate(e); throw Throwables.propagate(e);

View File

@ -92,7 +92,7 @@ public class CompressedIntsIndexedWriterTest
@After @After
public void tearDown() throws Exception public void tearDown() throws Exception
{ {
ioPeon.cleanup(); ioPeon.close();
} }
private void generateVals(final int totalSize, final int maxValue) throws IOException private void generateVals(final int totalSize, final int maxValue) throws IOException

View File

@ -182,7 +182,7 @@ public class CompressedVSizeIndexedV3WriterTest
@After @After
public void tearDown() throws Exception public void tearDown() throws Exception
{ {
ioPeon.cleanup(); ioPeon.close();
} }
@Test @Test

View File

@ -91,7 +91,7 @@ public class CompressedVSizeIntsIndexedWriterTest
@After @After
public void tearDown() throws Exception public void tearDown() throws Exception
{ {
ioPeon.cleanup(); ioPeon.close();
} }
private void generateVals(final int totalSize, final int maxValue) throws IOException private void generateVals(final int totalSize, final int maxValue) throws IOException

View File

@ -61,7 +61,7 @@ class IOPeonForTesting implements IOPeon
} }
@Override @Override
public void cleanup() throws IOException public void close() throws IOException
{ {
outStreams.clear(); outStreams.clear();
} }

View File

@ -50,7 +50,7 @@ public class VSizeIndexedIntsWriterTest
@After @After
public void tearDown() throws Exception public void tearDown() throws Exception
{ {
ioPeon.cleanup(); ioPeon.close();
} }
private void generateVals(final int totalSize, final int maxValue) throws IOException private void generateVals(final int totalSize, final int maxValue) throws IOException

View File

@ -20,9 +20,9 @@
package io.druid.segment.loading; package io.druid.segment.loading;
import com.google.common.io.Files; import com.google.common.io.Files;
import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.CompressionUtils;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
@ -49,10 +49,15 @@ public class LocalDataSegmentPullerTest
public void setup() throws IOException public void setup() throws IOException
{ {
tmpDir = temporaryFolder.newFolder(); tmpDir = temporaryFolder.newFolder();
tmpDir.deleteOnExit();
puller = new LocalDataSegmentPuller(); puller = new LocalDataSegmentPuller();
} }
@After
public void after() throws IOException
{
FileUtils.deleteDirectory(tmpDir);
}
@Test @Test
public void simpleZipTest() throws IOException, SegmentLoadingException public void simpleZipTest() throws IOException, SegmentLoadingException
{ {

View File

@ -96,6 +96,7 @@ public class RealtimePlumberSchoolTest
private DataSchema schema; private DataSchema schema;
private DataSchema schema2; private DataSchema schema2;
private FireDepartmentMetrics metrics; private FireDepartmentMetrics metrics;
private File tmpDir;
public RealtimePlumberSchoolTest(RejectionPolicyFactory rejectionPolicy, boolean buildV9Directly) public RealtimePlumberSchoolTest(RejectionPolicyFactory rejectionPolicy, boolean buildV9Directly)
{ {
@ -124,8 +125,7 @@ public class RealtimePlumberSchoolTest
@Before @Before
public void setUp() throws Exception public void setUp() throws Exception
{ {
final File tmpDir = Files.createTempDir(); tmpDir = Files.createTempDir();
tmpDir.deleteOnExit();
ObjectMapper jsonMapper = new DefaultObjectMapper(); ObjectMapper jsonMapper = new DefaultObjectMapper();
@ -237,6 +237,7 @@ public class RealtimePlumberSchoolTest
schema.getDataSource() schema.getDataSource()
) )
); );
FileUtils.deleteDirectory(tmpDir);
} }
@Test(timeout = 60000) @Test(timeout = 60000)