diff --git a/extensions/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java index 49ba5c2fdde..e0ade6f6971 100644 --- a/extensions/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java +++ b/extensions/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java @@ -26,7 +26,9 @@ import io.druid.data.input.MapBasedRow; import io.druid.granularity.QueryGranularity; import io.druid.query.aggregation.AggregationTestHelper; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import java.io.File; @@ -36,11 +38,14 @@ public class ApproximateHistogramAggregationTest { private AggregationTestHelper helper; + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + public ApproximateHistogramAggregationTest() { ApproximateHistogramDruidModule module = new ApproximateHistogramDruidModule(); module.configure(null); - helper = new AggregationTestHelper(Lists.newArrayList(module.getJacksonModules())); + helper = new AggregationTestHelper(Lists.newArrayList(module.getJacksonModules()), tempFolder); } @Test diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java index a26a7519305..f286a3bc792 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java @@ -29,7 +29,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Closeables; -import com.google.common.io.Files; import com.google.common.util.concurrent.ListenableFuture; import com.metamx.common.guava.CloseQuietly; import com.metamx.common.guava.Sequence; @@ -63,9 +62,9 @@ import io.druid.segment.Segment; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IndexSizeExceededException; import io.druid.segment.incremental.OnheapIncrementalIndex; -import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; +import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.FileInputStream; @@ -89,8 +88,11 @@ public class AggregationTestHelper private final GroupByQueryQueryToolChest toolChest; private final GroupByQueryRunnerFactory factory; - public AggregationTestHelper(List jsonModulesToRegister) + private final TemporaryFolder tempFolder; + + public AggregationTestHelper(List jsonModulesToRegister, TemporaryFolder tempFoler) { + this.tempFolder = tempFoler; mapper = new DefaultObjectMapper(); for(Module mod : jsonModulesToRegister) { @@ -141,13 +143,9 @@ public class AggregationTestHelper String groupByQueryJson ) throws Exception { - File segmentDir = Files.createTempDir(); - try { - createIndex(inputDataFile, parserJson, aggregators, segmentDir, minTimestamp, gran, maxRowCount); - return runQueryOnSegments(Lists.newArrayList(segmentDir), groupByQueryJson); - } finally { - FileUtils.deleteDirectory(segmentDir); - } + File segmentDir = tempFolder.newFolder(); + createIndex(inputDataFile, parserJson, aggregators, segmentDir, minTimestamp, gran, maxRowCount); + return runQueryOnSegments(Lists.newArrayList(segmentDir), groupByQueryJson); } public Sequence createIndexAndRunQueryOnSegment( @@ -160,13 +158,9 @@ public class AggregationTestHelper String groupByQueryJson ) throws Exception { - File segmentDir = Files.createTempDir(); - try { - createIndex(inputDataStream, parserJson, aggregators, segmentDir, minTimestamp, gran, maxRowCount); - return runQueryOnSegments(Lists.newArrayList(segmentDir), groupByQueryJson); - } finally { - FileUtils.deleteDirectory(segmentDir); - } + File segmentDir = tempFolder.newFolder(); + createIndex(inputDataStream, parserJson, aggregators, segmentDir, minTimestamp, gran, maxRowCount); + return runQueryOnSegments(Lists.newArrayList(segmentDir), groupByQueryJson); } public void createIndex( @@ -255,7 +249,7 @@ public class AggregationTestHelper } } catch (IndexSizeExceededException ex) { - File tmp = Files.createTempDir(); + File tmp = tempFolder.newFolder(); toMerge.add(tmp); IndexMerger.persist(index, tmp, null, new IndexSpec()); index.close(); @@ -264,7 +258,7 @@ public class AggregationTestHelper } if (toMerge.size() > 0) { - File tmp = Files.createTempDir(); + File tmp = tempFolder.newFolder(); toMerge.add(tmp); IndexMerger.persist(index, tmp, null, new IndexSpec()); @@ -285,10 +279,6 @@ public class AggregationTestHelper if (index != null) { index.close(); } - - for (File file : toMerge) { - FileUtils.deleteDirectory(file); - } } } diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java index e3de00c44e6..dcc613125e4 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java @@ -27,16 +27,21 @@ import io.druid.granularity.QueryGranularity; import io.druid.jackson.AggregatorsModule; import io.druid.query.aggregation.AggregationTestHelper; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import java.io.File; public class HyperUniquesAggregationTest { + @Rule + public final TemporaryFolder tempFolder = new TemporaryFolder(); + @Test public void testIngestAndQuery() throws Exception { - AggregationTestHelper helper = new AggregationTestHelper(Lists.newArrayList(new AggregatorsModule())); + AggregationTestHelper helper = new AggregationTestHelper(Lists.newArrayList(new AggregatorsModule()), tempFolder); String metricSpec = "[{" + "\"type\": \"hyperUnique\","