diff --git a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java index 0b19d9fd5e4..5eb50b622c6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java @@ -28,7 +28,6 @@ import com.google.common.collect.Multimaps; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.metrics.MonitorScheduler; import io.druid.client.FilteredServerView; -import io.druid.collections.StupidPool; import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; @@ -47,7 +46,6 @@ import org.joda.time.Interval; import java.io.File; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; import java.util.Map; @@ -212,5 +210,4 @@ public class TaskToolbox { return taskWorkDir; } - } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java index fd9365dc05d..89d275b9a7f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolboxFactory.java @@ -24,8 +24,6 @@ import com.google.inject.Inject; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.metrics.MonitorScheduler; import io.druid.client.FilteredServerView; -import io.druid.collections.StupidPool; -import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Processing; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; @@ -38,7 +36,6 @@ import io.druid.segment.loading.DataSegmentPusher; import io.druid.server.coordination.DataSegmentAnnouncer; import java.io.File; -import java.nio.ByteBuffer; import java.util.concurrent.ExecutorService; /** @@ -76,9 +73,7 @@ public class TaskToolboxFactory @Processing ExecutorService queryExecutorService, MonitorScheduler monitorScheduler, SegmentLoaderFactory segmentLoaderFactory, - ObjectMapper objectMapper, - //TODO: have a separate index pool - @Global StupidPool bufferPool + ObjectMapper objectMapper ) { this.config = config; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java b/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java index a34a24b4550..11f6bb2264d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java @@ -31,16 +31,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.metamx.common.Granularity; import com.metamx.common.logger.Logger; -import io.druid.collections.StupidPool; import io.druid.data.input.InputRow; -import io.druid.guice.annotations.Global; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.QueryableIndex; import io.druid.segment.SegmentUtils; -import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.loading.DataSegmentPusher; @@ -52,11 +49,9 @@ import io.druid.segment.realtime.plumber.Sink; import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; import org.joda.time.Interval; -import sun.misc.JavaNioAccess; import java.io.File; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.List; import java.util.Set; @@ -79,7 +74,7 @@ public class YeOldePlumberSchool implements PlumberSchool @JsonProperty("version") String version, @JacksonInject("segmentPusher") DataSegmentPusher dataSegmentPusher, @JacksonInject("tmpSegmentDir") File tmpSegmentDir - ) + ) { this.interval = interval; this.version = version; @@ -216,13 +211,14 @@ public class YeOldePlumberSchool implements PlumberSchool log.info("Spilling index[%d] with rows[%d] to: %s", indexToPersist.getCount(), rowsToPersist, dirToPersist); try { - final IncrementalIndex index = indexToPersist.getIndex(); + IndexMerger.persist( indexToPersist.getIndex(), dirToPersist ); indexToPersist.swapSegment(null); + metrics.incrementRowOutputCount(rowsToPersist); spilled.add(dirToPersist); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 9068535f38a..30325c9d398 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -43,7 +43,6 @@ import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.granularity.QueryGranularity; -import io.druid.offheap.OffheapBufferPool; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.indexing.common.SegmentLoaderFactory; import io.druid.indexing.common.TaskLock; @@ -206,8 +205,7 @@ public class TaskLifecycleTest } ) ), - new DefaultObjectMapper(), - new OffheapBufferPool(1024 * 1024) + new DefaultObjectMapper() ); tr = new ThreadPoolTaskRunner(tb); tq = new TaskQueue(tqc, ts, tr, tac, tl, emitter); diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java index 4f88488e436..8d4bf32b870 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -38,7 +38,6 @@ import io.druid.indexing.overlord.TestRemoteTaskRunnerConfig; import io.druid.indexing.overlord.ThreadPoolTaskRunner; import io.druid.indexing.worker.config.WorkerConfig; import io.druid.jackson.DefaultObjectMapper; -import io.druid.offheap.OffheapBufferPool; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.LocalDataSegmentPuller; import io.druid.segment.loading.OmniSegmentLoader; @@ -139,8 +138,7 @@ public class WorkerTaskMonitorTest } } ) - ), jsonMapper, - new OffheapBufferPool(1024 * 1024) + ), jsonMapper ) ), new WorkerConfig().setCapacity(1) diff --git a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java.orig b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java.orig deleted file mode 100644 index b2cf4965b22..00000000000 --- a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java.orig +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Druid - a distributed column store. - * Copyright (C) 2014 Metamarkets Group Inc. - * - * This program is free software; you can redistribute it and/or - * modify it under the terms of the GNU General Public License - * as published by the Free Software Foundation; either version 2 - * of the License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with this program; if not, write to the Free Software - * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - */ - -package io.druid.query; - -import com.google.common.base.Function; -import com.google.common.base.Predicates; -import com.google.common.base.Supplier; -import com.google.common.base.Throwables; -import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Ordering; -import com.google.common.util.concurrent.Futures; -import com.google.common.util.concurrent.ListenableFuture; -import com.google.common.util.concurrent.ListeningExecutorService; -import com.google.common.util.concurrent.MoreExecutors; -import com.metamx.common.Pair; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.ResourceClosingSequence; -import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import com.metamx.common.logger.Logger; -import io.druid.collections.StupidPool; -import io.druid.data.input.Row; -import io.druid.query.groupby.GroupByQuery; -import io.druid.query.groupby.GroupByQueryConfig; -import io.druid.query.groupby.GroupByQueryHelper; -import io.druid.segment.incremental.IncrementalIndex; - -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.List; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - - -public class GroupByParallelQueryRunner implements QueryRunner -{ - private static final Logger log = new Logger(GroupByParallelQueryRunner.class); - private final Iterable> queryables; - private final ListeningExecutorService exec; - private final Ordering ordering; - private final Supplier configSupplier; -<<<<<<< HEAD - private final StupidPool bufferPool; -======= - private final QueryWatcher queryWatcher; ->>>>>>> master - - - public GroupByParallelQueryRunner( - ExecutorService exec, - Ordering ordering, - Supplier configSupplier, -<<<<<<< HEAD - StupidPool bufferPool, - QueryRunner... queryables - ) - { - this(exec, ordering, configSupplier, bufferPool, Arrays.asList(queryables)); -======= - QueryWatcher queryWatcher, - QueryRunner... queryables - ) - { - this(exec, ordering, configSupplier, queryWatcher, Arrays.asList(queryables)); ->>>>>>> master - } - - public GroupByParallelQueryRunner( - ExecutorService exec, -<<<<<<< HEAD - Ordering ordering, - Supplier configSupplier, - StupidPool bufferPool, -======= - Ordering ordering, Supplier configSupplier, - QueryWatcher queryWatcher, ->>>>>>> master - Iterable> queryables - ) - { - this.exec = MoreExecutors.listeningDecorator(exec); - this.ordering = ordering; - this.queryWatcher = queryWatcher; - this.queryables = Iterables.unmodifiableIterable(Iterables.filter(queryables, Predicates.notNull())); - this.configSupplier = configSupplier; - this.bufferPool = bufferPool; - } - - @Override - public Sequence run(final Query queryParam) - { - - final GroupByQuery query = (GroupByQuery) queryParam; - final Pair> indexAccumulatorPair = GroupByQueryHelper.createIndexAccumulatorPair( - query, - configSupplier.get(), - bufferPool - ); - final int priority = query.getContextPriority(0); - - if (Iterables.isEmpty(queryables)) { - log.warn("No queryables found."); - } - ListenableFuture> futures = Futures.allAsList( - Lists.newArrayList( - Iterables.transform( - queryables, - new Function, ListenableFuture>() - { - @Override - public ListenableFuture apply(final QueryRunner input) - { - return exec.submit( - new AbstractPrioritizedCallable(priority) - { - @Override - public Boolean call() throws Exception - { - try { - input.run(queryParam).accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs); - return true; - } - catch (QueryInterruptedException e) { - throw Throwables.propagate(e); - } - catch (Exception e) { - log.error(e, "Exception with one of the sequences!"); - throw Throwables.propagate(e); - } - } - } - ); - } - } - ) - ) - ); - - // Let the runners complete - try { - queryWatcher.registerQuery(query, futures); - final Number timeout = query.getContextValue("timeout", (Number) null); - if(timeout == null) { - futures.get(); - } else { - futures.get(timeout.longValue(), TimeUnit.MILLISECONDS); - } - } - catch (InterruptedException e) { - log.warn(e, "Query interrupted, cancelling pending results, query id [%s]", query.getId()); - futures.cancel(true); - throw new QueryInterruptedException("Query interrupted"); - } - catch(CancellationException e) { - throw new QueryInterruptedException("Query cancelled"); - } - catch(TimeoutException e) { - log.info("Query timeout, cancelling pending results for query id [%s]", query.getId()); - futures.cancel(true); - throw new QueryInterruptedException("Query timeout"); - } - catch (ExecutionException e) { - throw Throwables.propagate(e.getCause()); - } - - return new ResourceClosingSequence( - Sequences.simple( - indexAccumulatorPair.lhs - .iterableWithPostAggregations(null) - ), indexAccumulatorPair.lhs - ); - } - -} diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index 6a5845b3ef1..ff000dd0a0a 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -103,9 +103,9 @@ public class GroupByQueryQueryToolChest extends QueryToolChest mergeGroupByResults(final GroupByQuery query, QueryRunner runner) { - Sequence result; // If there's a subquery, merge subquery results and then apply the aggregator DataSource dataSource = query.getDataSource(); + final IncrementalIndex index; if (dataSource instanceof QueryDataSource) { GroupByQuery subquery; try { @@ -115,13 +115,15 @@ public class GroupByQueryQueryToolChest extends QueryToolChest subqueryResult = mergeGroupByResults(subquery, runner); - IncrementalIndexStorageAdapter adapter - = new IncrementalIndexStorageAdapter(makeIncrementalIndex(subquery, subqueryResult)); - result = engine.process(query, adapter); + final IncrementalIndex subQueryResultIndex = makeIncrementalIndex(subquery, subqueryResult); + + Sequence result = engine.process(query, new IncrementalIndexStorageAdapter(subQueryResultIndex)); + index = makeIncrementalIndex(query, result); + subQueryResultIndex.close(); } else { - result = runner.run(query); + index = makeIncrementalIndex(query, runner.run(query)); + } - final IncrementalIndex index = makeIncrementalIndex(query, result); return new ResourceClosingSequence(postAggregate(query, index), index); } diff --git a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java index ba3f18c9dcd..0c2c9e7810b 100644 --- a/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/having/GreaterThanHavingSpec.java @@ -58,6 +58,7 @@ public class GreaterThanHavingSpec implements HavingSpec public boolean eval(Row row) { float metricValue = row.getFloatMetric(aggregationName); + return Float.compare(metricValue, value.floatValue()) > 0; } diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueBufferAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueBufferAggregatorTest.java deleted file mode 100644 index d2d291ac3b9..00000000000 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueBufferAggregatorTest.java +++ /dev/null @@ -1,93 +0,0 @@ -package io.druid.query.aggregation.hyperloglog; - -import com.google.common.hash.HashFunction; -import com.google.common.hash.Hashing; -import io.druid.segment.ObjectColumnSelector; -import org.junit.Test; - -import java.nio.ByteBuffer; - -/** - * Created with IntelliJ IDEA. - * User: neo - * Date: 05/06/14 - * Time: 3:14 PM - * To change this template use File | Settings | File Templates. - */ -public class HyperUniqueBufferAggregatorTest -{ - private final HashFunction fn = Hashing.murmur3_128(); - private volatile HyperLogLogCollector collector; - - @Test - public void testAggregation() - { - final HyperUniquesBufferAggregator agg = new HyperUniquesBufferAggregator( - new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return HyperLogLogCollector.class; - } - - @Override - public Object get() - { - return collector; - } - } - ); - ByteBuffer byteBuffer = ByteBuffer.allocate(HyperLogLogCollector.getLatestNumBytesForDenseStorage()); - - for (int i = 0; i < 1000; i++) { - collector = HyperLogLogCollector.makeLatestCollector(); - collector.add(fn.hashInt(i).asBytes()); - agg.aggregate(byteBuffer, 0); - } - - final HyperLogLogCollector collector = HyperLogLogCollector.makeCollector( - ((HyperLogLogCollector) agg.get( - byteBuffer, - 0 - )).toByteBuffer() - ); - System.out.println(collector.estimateCardinality()); - - } - - @Test - public void testAggregation2() - { - final HyperUniquesAggregator agg = new HyperUniquesAggregator( - "abc", - new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return HyperLogLogCollector.class; - } - - @Override - public Object get() - { - return collector; - } - } - ); - - for (int i = 0; i < 1000; i++) { - collector = HyperLogLogCollector.makeLatestCollector(); - collector.add(fn.hashInt(i).asBytes()); - agg.aggregate(); - } - - final HyperLogLogCollector collector = HyperLogLogCollector.makeCollector( - ((HyperLogLogCollector) agg.get( - )).toByteBuffer() - ); - System.out.println(collector.estimateCardinality()); - - } -} diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java index 9fc23be052f..083c290c4cc 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java @@ -25,7 +25,6 @@ import com.metamx.common.Granularity; import com.metamx.common.concurrent.ScheduledExecutors; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; -import io.druid.collections.StupidPool; import io.druid.common.guava.ThreadRenamingCallable; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.indexing.DataSchema; @@ -35,7 +34,6 @@ import io.druid.server.coordination.DataSegmentAnnouncer; import org.joda.time.DateTime; import org.joda.time.Duration; -import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumberSchool.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumberSchool.java index e5a57461f97..026af6f43c8 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumberSchool.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumberSchool.java @@ -25,8 +25,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.metamx.common.Granularity; import com.metamx.emitter.service.ServiceEmitter; -import io.druid.collections.StupidPool; -import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Processing; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.indexing.DataSchema; @@ -37,7 +35,6 @@ import org.joda.time.Duration; import org.joda.time.Period; import java.io.File; -import java.nio.ByteBuffer; import java.util.concurrent.ExecutorService; /** diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java index 0b46de25de2..4a24d0a0e83 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java @@ -718,7 +718,6 @@ public class RealtimePlumber implements Plumber indexToPersist.getIndex(), new File(computePersistDir(schema, interval), String.valueOf(indexToPersist.getCount())) ); - IncrementalIndex index = indexToPersist.getIndex(); indexToPersist.swapSegment( new QueryableIndexSegment( indexToPersist.getSegment().getIdentifier(), diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java index 11ebf015b11..eb52a30ba31 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumberSchool.java @@ -26,6 +26,7 @@ import com.google.common.base.Preconditions; import com.metamx.common.Granularity; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.FilteredServerView; +import io.druid.client.ServerView; import io.druid.guice.annotations.Processing; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.indexing.DataSchema; @@ -50,6 +51,7 @@ public class RealtimePlumberSchool implements PlumberSchool private final SegmentPublisher segmentPublisher; private final FilteredServerView serverView; private final ExecutorService queryExecutorService; + // Backwards compatible private final Period windowPeriod; private final File basePersistDirectory; @@ -160,5 +162,4 @@ public class RealtimePlumberSchool implements PlumberSchool Preconditions.checkNotNull(serverView, "must specify a serverView to do this action."); Preconditions.checkNotNull(emitter, "must specify a serviceEmitter to do this action."); } - } diff --git a/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java b/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java index 5817ed6a9ac..b4d87260150 100644 --- a/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java +++ b/server/src/test/java/io/druid/segment/realtime/FireDepartmentTest.java @@ -27,12 +27,11 @@ import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularity; import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; -import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.indexing.RealtimeIOConfig; +import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.realtime.plumber.RealtimePlumberSchool; import junit.framework.Assert; diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index 7abe823594e..39cfc481968 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -31,12 +31,11 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.granularity.QueryGranularity; import io.druid.query.Query; import io.druid.query.QueryRunner; -import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; -import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.indexing.RealtimeIOConfig; +import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.realtime.plumber.Plumber; import io.druid.segment.realtime.plumber.PlumberSchool; diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index bab7a549b52..eef42cad4b3 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -31,16 +31,15 @@ import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.FilteredServerView; import io.druid.client.ServerView; import io.druid.data.input.InputRow; -import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; +import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.TimestampSpec; import io.druid.granularity.QueryGranularity; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.Query; import io.druid.query.QueryRunnerFactory; -import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema; diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java index 0e84660f682..c6615128410 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java @@ -24,7 +24,6 @@ import com.google.common.collect.Lists; import com.metamx.common.Granularity; import io.druid.data.input.InputRow; import io.druid.granularity.QueryGranularity; -import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.indexing.DataSchema;