From 8340a1b0a1eb6d4a95a71397ada7571b3183276a Mon Sep 17 00:00:00 2001 From: fjy Date: Tue, 3 Jun 2014 10:34:38 -0700 Subject: [PATCH 01/46] Properties module can now take multiple properties files --- .../druid/initialization/Initialization.java | 3 +- .../initialization/PropertiesModule.java | 49 ++++++++++--------- 2 files changed, 28 insertions(+), 24 deletions(-) diff --git a/server/src/main/java/io/druid/initialization/Initialization.java b/server/src/main/java/io/druid/initialization/Initialization.java index 292aa3c95cf..70b810892f8 100644 --- a/server/src/main/java/io/druid/initialization/Initialization.java +++ b/server/src/main/java/io/druid/initialization/Initialization.java @@ -85,6 +85,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLClassLoader; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -351,7 +352,7 @@ public class Initialization return Guice.createInjector( new DruidGuiceExtensions(), new JacksonModule(), - new PropertiesModule("runtime.properties"), + new PropertiesModule(Arrays.asList("global.runtime.properties", "runtime.properties")), new ConfigModule(), new Module() { diff --git a/server/src/main/java/io/druid/server/initialization/PropertiesModule.java b/server/src/main/java/io/druid/server/initialization/PropertiesModule.java index 3aad005dd68..c609504155f 100644 --- a/server/src/main/java/io/druid/server/initialization/PropertiesModule.java +++ b/server/src/main/java/io/druid/server/initialization/PropertiesModule.java @@ -33,6 +33,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.util.List; import java.util.Properties; /** @@ -41,11 +42,11 @@ public class PropertiesModule implements Module { private static final Logger log = new Logger(PropertiesModule.class); - private final String propertiesFile; + private final List propertiesFiles; - public PropertiesModule(String propertiesFile) + public PropertiesModule(List propertiesFiles) { - this.propertiesFile = propertiesFile; + this.propertiesFiles = propertiesFiles; } @Override @@ -57,30 +58,32 @@ public class PropertiesModule implements Module Properties props = new Properties(fileProps); props.putAll(systemProps); - InputStream stream = ClassLoader.getSystemResourceAsStream(propertiesFile); - try { - if (stream == null) { - File workingDirectoryFile = new File(systemProps.getProperty("druid.properties.file", propertiesFile)); - if (workingDirectoryFile.exists()) { - stream = new BufferedInputStream(new FileInputStream(workingDirectoryFile)); + for (String propertiesFile : propertiesFiles) { + InputStream stream = ClassLoader.getSystemResourceAsStream(propertiesFile); + try { + if (stream == null) { + File workingDirectoryFile = new File(systemProps.getProperty("druid.properties.file", propertiesFile)); + if (workingDirectoryFile.exists()) { + stream = new BufferedInputStream(new FileInputStream(workingDirectoryFile)); + } } - } - if (stream != null) { - log.info("Loading properties from %s", propertiesFile); - try { - fileProps.load(new InputStreamReader(stream, Charsets.UTF_8)); - } - catch (IOException e) { - throw Throwables.propagate(e); + if (stream != null) { + log.info("Loading properties from %s", propertiesFile); + try { + fileProps.load(new InputStreamReader(stream, Charsets.UTF_8)); + } + catch (IOException e) { + throw Throwables.propagate(e); + } } } - } - catch (FileNotFoundException e) { - log.wtf(e, "This can only happen if the .exists() call lied. That's f'd up."); - } - finally { - Closeables.closeQuietly(stream); + catch (FileNotFoundException e) { + log.wtf(e, "This can only happen if the .exists() call lied. That's f'd up."); + } + finally { + Closeables.closeQuietly(stream); + } } binder.bind(Properties.class).toInstance(props); From 7233fce50cc7ad37e52d4626d2f1d97940b6cbd9 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 9 Jun 2014 15:59:44 -0700 Subject: [PATCH 02/46] refactor examples to use global runtime --- examples/config/broker/runtime.properties | 4 +-- .../config/coordinator/runtime.properties | 6 ---- examples/config/global.runtime.properties | 24 +++++++++++++++ examples/config/historical/runtime.properties | 2 -- examples/config/overlord/runtime.properties | 28 +++++------------- examples/config/realtime/runtime.properties | 11 ------- pom.xml | 2 +- .../server/initialization/EmitterModule.java | 5 ++-- .../initialization/NoopEmitterModule.java | 29 +++++++++++++++++++ 9 files changed, 65 insertions(+), 46 deletions(-) create mode 100644 examples/config/global.runtime.properties create mode 100644 server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java diff --git a/examples/config/broker/runtime.properties b/examples/config/broker/runtime.properties index 8afae982654..31799a7caf9 100644 --- a/examples/config/broker/runtime.properties +++ b/examples/config/broker/runtime.properties @@ -2,8 +2,6 @@ druid.host=localhost druid.service=broker druid.port=8080 -druid.zk.service.host=localhost - -# Change these to make Druid faster +# Add more threads or larger buffer for faster groupBys druid.processing.buffer.sizeBytes=100000000 druid.processing.numThreads=1 diff --git a/examples/config/coordinator/runtime.properties b/examples/config/coordinator/runtime.properties index 3d68fec772e..c9f16857af4 100644 --- a/examples/config/coordinator/runtime.properties +++ b/examples/config/coordinator/runtime.properties @@ -2,10 +2,4 @@ druid.host=localhost druid.service=coordinator druid.port=8082 -druid.zk.service.host=localhost - -druid.db.connector.connectURI=jdbc\:mysql\://localhost\:3306/druid -druid.db.connector.user=druid -druid.db.connector.password=diurd - druid.coordinator.startDelay=PT70s \ No newline at end of file diff --git a/examples/config/global.runtime.properties b/examples/config/global.runtime.properties new file mode 100644 index 00000000000..bc9058849d8 --- /dev/null +++ b/examples/config/global.runtime.properties @@ -0,0 +1,24 @@ +# Extensions +druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.116","io.druid.extensions:druid-kafka-seven:0.6.116","io.druid.extensions:druid-rabbitmq:0.6.116", "io.druid.extensions:druid-s3-extensions:0.6.116"] + +# Zookeeper +druid.zk.service.host=localhost + +# Metadata Storage +druid.db.connector.connectURI=jdbc\:mysql\://localhost\:3306/druid +druid.db.connector.user=druid +druid.db.connector.password=diurd + +# Deep storage +druid.storage.type=local +druid.storage.storage.storageDirectory=/tmp/druid/localStorage + +# Indexing service discovery +druid.selectors.indexing.serviceName=overlord + + +# Monitoring (disabled for examples) +# druid.monitoring.monitors=["com.metamx.metrics.SysMonitor","com.metamx.metrics.JvmMonitor"] + +# Metrics logging +druid.emitter=noop \ No newline at end of file diff --git a/examples/config/historical/runtime.properties b/examples/config/historical/runtime.properties index 51d19e89af6..dc927f4de1e 100644 --- a/examples/config/historical/runtime.properties +++ b/examples/config/historical/runtime.properties @@ -2,8 +2,6 @@ druid.host=localhost druid.service=historical druid.port=8081 -druid.zk.service.host=localhost - druid.extensions.coordinates=["io.druid.extensions:druid-s3-extensions:0.6.116"] # Dummy read only AWS account (used to download example data) diff --git a/examples/config/overlord/runtime.properties b/examples/config/overlord/runtime.properties index 24745c3147f..dc67d60adaf 100644 --- a/examples/config/overlord/runtime.properties +++ b/examples/config/overlord/runtime.properties @@ -1,22 +1,8 @@ --server --Xmx256m --Duser.timezone=UTC --Dfile.encoding=UTF-8 +druid.host=localhost +druid.port=8083 +druid.service=overlord --Ddruid.host=localhost --Ddruid.port=8080 --Ddruid.service=overlord - --Ddruid.zk.service.host=localhost - --Ddruid.extensions.coordinates=["io.druid.extensions:druid-kafka-seven:0.6.116"] - --Ddruid.db.connector.connectURI=jdbc:mysql://localhost:3306/druid --Ddruid.db.connector.user=druid --Ddruid.db.connector.password=diurd - --Ddruid.selectors.indexing.serviceName=overlord --Ddruid.indexer.queue.startDelay=PT0M --Ddruid.indexer.runner.javaOpts="-server -Xmx256m" --Ddruid.indexer.fork.property.druid.processing.numThreads=1 --Ddruid.indexer.fork.property.druid.computation.buffer.size=100000000 \ No newline at end of file +druid.indexer.queue.startDelay=PT0M +druid.indexer.runner.javaOpts="-server -Xmx256m" +druid.indexer.fork.property.druid.processing.numThreads=1 +druid.indexer.fork.property.druid.computation.buffer.size=100000000 \ No newline at end of file diff --git a/examples/config/realtime/runtime.properties b/examples/config/realtime/runtime.properties index b78547fe8ab..def7470a99b 100644 --- a/examples/config/realtime/runtime.properties +++ b/examples/config/realtime/runtime.properties @@ -2,19 +2,8 @@ druid.host=localhost druid.service=realtime druid.port=8083 -druid.zk.service.host=localhost - -druid.extensions.coordinates=["io.druid.extensions:druid-examples:0.6.116","io.druid.extensions:druid-kafka-seven:0.6.116","io.druid.extensions:druid-rabbitmq:0.6.116"] - # Change this config to db to hand off to the rest of the Druid cluster druid.publish.type=noop -# These configs are only required for real hand off -# druid.db.connector.connectURI=jdbc\:mysql\://localhost\:3306/druid -# druid.db.connector.user=druid -# druid.db.connector.password=diurd - druid.processing.buffer.sizeBytes=100000000 druid.processing.numThreads=1 - -druid.monitoring.monitors=["io.druid.segment.realtime.RealtimeMetricsMonitor"] diff --git a/pom.xml b/pom.xml index fd02b96820e..30832917f0c 100644 --- a/pom.xml +++ b/pom.xml @@ -73,7 +73,7 @@ com.metamx emitter - 0.2.11 + 0.2.12 com.metamx diff --git a/server/src/main/java/io/druid/server/initialization/EmitterModule.java b/server/src/main/java/io/druid/server/initialization/EmitterModule.java index c7b29d3af53..410bd70ac82 100644 --- a/server/src/main/java/io/druid/server/initialization/EmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/EmitterModule.java @@ -67,6 +67,7 @@ public class EmitterModule implements Module { String emitterType = props.getProperty(EMITTER_PROPERTY, ""); + binder.install(new NoopEmitterModule()); binder.install(new LogEmitterModule()); binder.install(new HttpEmitterModule()); @@ -104,7 +105,7 @@ public class EmitterModule implements Module emitter = findEmitter(emitterType, emitterBindings); if (emitter == null) { - emitter = findEmitter(LogEmitterModule.EMITTER_TYPE, emitterBindings); + emitter = findEmitter(NoopEmitterModule.EMITTER_TYPE, emitterBindings); } if (emitter == null) { @@ -115,7 +116,7 @@ public class EmitterModule implements Module knownTypes.add(((Named) annotation).value()); } } - throw new ISE("Uknown emitter type[%s]=[%s], known types[%s]", EMITTER_PROPERTY, emitterType, knownTypes); + throw new ISE("Unknown emitter type[%s]=[%s], known types[%s]", EMITTER_PROPERTY, emitterType, knownTypes); } } diff --git a/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java b/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java new file mode 100644 index 00000000000..721e65d10ef --- /dev/null +++ b/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java @@ -0,0 +1,29 @@ +package io.druid.server.initialization; + +import com.google.inject.Binder; +import com.google.inject.Module; +import com.google.inject.Provides; +import com.google.inject.name.Named; +import com.metamx.emitter.core.Emitter; +import com.metamx.emitter.core.NoopEmitter; +import io.druid.guice.ManageLifecycle; + +/** + */ +public class NoopEmitterModule implements Module +{ + public static final String EMITTER_TYPE = "noop"; + + @Override + public void configure(Binder binder) + { + } + + @Provides + @ManageLifecycle + @Named(EMITTER_TYPE) + public Emitter makeEmitter() + { + return new NoopEmitter(); + } +} From 5380f14905093f1e20e47df741129c342d6f3704 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 9 Jun 2014 16:13:57 -0700 Subject: [PATCH 03/46] add header --- .../initialization/NoopEmitterModule.java | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java b/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java index 721e65d10ef..f874d27021e 100644 --- a/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/NoopEmitterModule.java @@ -1,3 +1,22 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + package io.druid.server.initialization; import com.google.inject.Binder; From 00856f0feca2a9bc9c00183c83eccac3b1558893 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Wed, 18 Jun 2014 16:55:33 -0700 Subject: [PATCH 04/46] add retry feature on broker side; modify QueryRunner inferface and tests --- pom.xml | 2 +- .../main/java/io/druid/query/BaseQuery.java | 8 +- .../io/druid/query/BySegmentQueryRunner.java | 8 +- .../query/BySegmentSkippingQueryRunner.java | 11 +- .../query/ChainedExecutionQueryRunner.java | 5 +- .../io/druid/query/ConcatQueryRunner.java | 7 +- .../query/FinalizeResultsQueryRunner.java | 6 +- .../query/GroupByParallelQueryRunner.java | 5 +- .../query/IntervalChunkingQueryRunner.java | 8 +- .../query/MetricsEmittingQueryRunner.java | 8 +- .../java/io/druid/query/NoopQueryRunner.java | 5 +- .../src/main/java/io/druid/query/Query.java | 4 +- .../main/java/io/druid/query/QueryRunner.java | 7 +- .../ReferenceCountingSegmentQueryRunner.java | 6 +- .../druid/query/ResultMergeQueryRunner.java | 7 +- .../java/io/druid/query/RetryQueryRunner.java | 66 +++++++++ .../druid/query/RetryQueryRunnerConfig.java | 30 ++++ .../io/druid/query/SubqueryQueryRunner.java | 9 +- .../java/io/druid/query/UnionQueryRunner.java | 10 +- .../groupby/GroupByQueryQueryToolChest.java | 13 +- .../groupby/GroupByQueryRunnerFactory.java | 8 +- .../SegmentMetadataQueryRunnerFactory.java | 10 +- .../search/SearchQueryQueryToolChest.java | 9 +- .../druid/query/search/SearchQueryRunner.java | 7 +- .../select/SelectQueryRunnerFactory.java | 10 +- .../spec/SpecificSegmentQueryRunner.java | 15 +- .../druid/query/spec/SpecificSegmentSpec.java | 2 + .../TimeBoundaryQueryQueryToolChest.java | 5 +- .../TimeBoundaryQueryRunnerFactory.java | 7 +- .../timeseries/TimeseriesQueryEngine.java | 4 +- .../TimeseriesQueryRunnerFactory.java | 9 +- .../io/druid/query/topn/TopNQueryEngine.java | 6 +- .../query/topn/TopNQueryQueryToolChest.java | 9 +- .../query/topn/TopNQueryRunnerFactory.java | 9 +- .../segment/IncrementalIndexSegment.java | 1 + .../segment/NullStorageAdapterException.java | 27 ++++ .../druid/segment/QueryableIndexSegment.java | 1 + .../ChainedExecutionQueryRunnerTest.java | 15 +- .../query/groupby/GroupByQueryRunnerTest.java | 65 +++++---- .../GroupByTimeseriesQueryRunnerTest.java | 6 +- .../query/metadata/SegmentAnalyzerTest.java | 5 +- .../metadata/SegmentMetadataQueryTest.java | 6 +- .../query/search/SearchQueryRunnerTest.java | 7 +- .../query/select/SelectQueryRunnerTest.java | 20 ++- .../TimeBoundaryQueryRunnerTest.java | 9 +- .../TimeSeriesUnionQueryRunnerTest.java | 5 +- .../TimeseriesQueryRunnerBonusTest.java | 5 +- .../timeseries/TimeseriesQueryRunnerTest.java | 125 ++++++++-------- .../druid/query/topn/TopNQueryRunnerTest.java | 134 +++++++++--------- .../druid/query/topn/TopNUnionQueryTest.java | 5 +- .../filter/SpatialFilterBonusTest.java | 9 +- .../segment/filter/SpatialFilterTest.java | 9 +- .../druid/client/CachingClusteredClient.java | 6 +- .../io/druid/client/CachingQueryRunner.java | 7 +- .../io/druid/client/DirectDruidClient.java | 19 ++- .../server/ClientQuerySegmentWalker.java | 4 +- .../java/io/druid/server/QueryResource.java | 22 +-- .../bridge/BridgeQuerySegmentWalker.java | 3 +- .../client/CachingClusteredClientTest.java | 45 +++--- .../druid/client/CachingQueryRunnerTest.java | 13 +- .../druid/client/DirectDruidClientTest.java | 23 ++- .../coordination/ServerManagerTest.java | 10 +- .../src/main/java/io/druid/cli/CliBroker.java | 2 + 63 files changed, 604 insertions(+), 339 deletions(-) create mode 100644 processing/src/main/java/io/druid/query/RetryQueryRunner.java create mode 100644 processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java create mode 100644 processing/src/main/java/io/druid/segment/NullStorageAdapterException.java diff --git a/pom.xml b/pom.xml index 6717674b368..46e6a3feb98 100644 --- a/pom.xml +++ b/pom.xml @@ -244,7 +244,7 @@ com.fasterxml.jackson.datatype jackson-datatype-joda - 2.2.3 + 2.4.0 com.fasterxml.jackson.dataformat diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index 32d9c3256f4..3dac2fa0cfd 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -70,14 +70,14 @@ public abstract class BaseQuery implements Query } @Override - public Sequence run(QuerySegmentWalker walker) + public Sequence run(QuerySegmentWalker walker, Map metadata) { - return run(querySegmentSpec.lookup(this, walker)); + return run(querySegmentSpec.lookup(this, walker), metadata); } - public Sequence run(QueryRunner runner) + public Sequence run(QueryRunner runner, Map metadata) { - return runner.run(this); + return runner.run(this, metadata); } @Override diff --git a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java index 44094d0216a..1d41a58b58b 100644 --- a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java @@ -29,6 +29,7 @@ import com.metamx.common.guava.YieldingAccumulator; import org.joda.time.DateTime; import java.util.List; +import java.util.Map; /** */ @@ -51,10 +52,10 @@ public class BySegmentQueryRunner implements QueryRunner @Override @SuppressWarnings("unchecked") - public Sequence run(final Query query) + public Sequence run(final Query query, Map metadata) { if (query.getContextBySegment(false)) { - final Sequence baseSequence = base.run(query); + final Sequence baseSequence = base.run(query, metadata); return new Sequence() { @Override @@ -96,7 +97,6 @@ public class BySegmentQueryRunner implements QueryRunner } }; } - - return base.run(query); + return base.run(query, metadata); } } diff --git a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java index 13ca4dd75df..09cf38ee40b 100644 --- a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java @@ -21,6 +21,9 @@ package io.druid.query; import com.metamx.common.guava.Sequence; +import java.util.List; +import java.util.Map; + /** */ public abstract class BySegmentSkippingQueryRunner implements QueryRunner @@ -35,14 +38,14 @@ public abstract class BySegmentSkippingQueryRunner implements QueryRunner } @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { if (query.getContextBySegment(false)) { - return baseRunner.run(query); + return baseRunner.run(query, metadata); } - return doRun(baseRunner, query); + return doRun(baseRunner, query, metadata); } - protected abstract Sequence doRun(QueryRunner baseRunner, Query query); + protected abstract Sequence doRun(QueryRunner baseRunner, Query query, Map metadata); } diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index 8a5ed51a4df..45a9f5518d8 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -39,6 +39,7 @@ import com.metamx.common.logger.Logger; import java.util.Arrays; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -93,7 +94,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { final int priority = query.getContextPriority(0); @@ -124,7 +125,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner throw new ISE("Input is null?! How is this possible?!"); } - Sequence result = input.run(query); + Sequence result = input.run(query, metadata); if (result == null) { throw new ISE("Got a null result! Segments are missing!"); } diff --git a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java index 9735d0f5a94..482216fe818 100644 --- a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java @@ -23,6 +23,9 @@ import com.google.common.base.Function; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; +import java.util.List; +import java.util.Map; + /** */ public class ConcatQueryRunner implements QueryRunner @@ -36,7 +39,7 @@ public class ConcatQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { return Sequences.concat( Sequences.map( @@ -46,7 +49,7 @@ public class ConcatQueryRunner implements QueryRunner @Override public Sequence apply(final QueryRunner input) { - return input.run(query); + return input.run(query, metadata); } } ) diff --git a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java index eb5bf466daf..565141aad7d 100644 --- a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java +++ b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java @@ -28,6 +28,8 @@ import io.druid.query.aggregation.MetricManipulationFn; import io.druid.query.aggregation.MetricManipulatorFns; import javax.annotation.Nullable; +import java.util.List; +import java.util.Map; /** */ @@ -46,7 +48,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, Map metadata) { final boolean isBySegment = query.getContextBySegment(false); final boolean shouldFinalize = query.getContextFinalize(true); @@ -94,7 +96,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner return Sequences.map( - baseRunner.run(queryToRun), + baseRunner.run(queryToRun, metadata), finalizerFn ); diff --git a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java index c9b14b6314b..ef1bd009523 100644 --- a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java +++ b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java @@ -43,6 +43,7 @@ import io.druid.segment.incremental.IncrementalIndex; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -86,7 +87,7 @@ public class GroupByParallelQueryRunner implements QueryRunner } @Override - public Sequence run(final Query queryParam) + public Sequence run(final Query queryParam, final Map metadata) { final GroupByQuery query = (GroupByQuery) queryParam; @@ -115,7 +116,7 @@ public class GroupByParallelQueryRunner implements QueryRunner public Boolean call() throws Exception { try { - input.run(queryParam).accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs); + input.run(queryParam, metadata).accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs); return true; } catch (QueryInterruptedException e) { diff --git a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java index 0bf4c3928b0..42f5093468f 100644 --- a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java @@ -33,6 +33,7 @@ import javax.annotation.Nullable; import java.util.Arrays; import java.util.Iterator; import java.util.List; +import java.util.Map; /** */ @@ -48,10 +49,10 @@ public class IntervalChunkingQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { if (period.getMillis() == 0) { - return baseRunner.run(query); + return baseRunner.run(query, metadata); } return Sequences.concat( @@ -74,7 +75,8 @@ public class IntervalChunkingQueryRunner implements QueryRunner public Sequence apply(Interval singleInterval) { return baseRunner.run( - query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Arrays.asList(singleInterval))) + query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Arrays.asList(singleInterval))), + metadata ); } } diff --git a/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java b/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java index dbad443cb36..110aadf4ce5 100644 --- a/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java @@ -28,6 +28,8 @@ import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import java.io.IOException; +import java.util.List; +import java.util.Map; /** */ @@ -66,7 +68,7 @@ public class MetricsEmittingQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { final ServiceMetricEvent.Builder builder = builderFn.apply(query); String queryId = query.getId(); @@ -84,7 +86,7 @@ public class MetricsEmittingQueryRunner implements QueryRunner long startTime = System.currentTimeMillis(); try { - retVal = queryRunner.run(query).accumulate(outType, accumulator); + retVal = queryRunner.run(query, metadata).accumulate(outType, accumulator); } catch (RuntimeException e) { builder.setUser10("failed"); @@ -114,7 +116,7 @@ public class MetricsEmittingQueryRunner implements QueryRunner long startTime = System.currentTimeMillis(); try { - retVal = queryRunner.run(query).toYielder(initValue, accumulator); + retVal = queryRunner.run(query, metadata).toYielder(initValue, accumulator); } catch (RuntimeException e) { builder.setUser10("failed"); diff --git a/processing/src/main/java/io/druid/query/NoopQueryRunner.java b/processing/src/main/java/io/druid/query/NoopQueryRunner.java index 355ee4f7a20..0f659d01793 100644 --- a/processing/src/main/java/io/druid/query/NoopQueryRunner.java +++ b/processing/src/main/java/io/druid/query/NoopQueryRunner.java @@ -22,12 +22,15 @@ package io.druid.query; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; +import java.util.List; +import java.util.Map; + /** */ public class NoopQueryRunner implements QueryRunner { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { return Sequences.empty(); } diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java index 04c581152ad..74484b7ab96 100644 --- a/processing/src/main/java/io/druid/query/Query.java +++ b/processing/src/main/java/io/druid/query/Query.java @@ -62,9 +62,9 @@ public interface Query public String getType(); - public Sequence run(QuerySegmentWalker walker); + public Sequence run(QuerySegmentWalker walker, Map metadata); - public Sequence run(QueryRunner runner); + public Sequence run(QueryRunner runner, Map metadata); public List getIntervals(); diff --git a/processing/src/main/java/io/druid/query/QueryRunner.java b/processing/src/main/java/io/druid/query/QueryRunner.java index 62c44ad5163..81b5f4b39ad 100644 --- a/processing/src/main/java/io/druid/query/QueryRunner.java +++ b/processing/src/main/java/io/druid/query/QueryRunner.java @@ -21,9 +21,12 @@ package io.druid.query; import com.metamx.common.guava.Sequence; +import java.util.List; +import java.util.Map; + /** */ public interface QueryRunner { - public Sequence run(Query query); -} + public Sequence run(Query query, Map metadata); +} \ No newline at end of file diff --git a/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java index c366c287ac5..457b645544b 100644 --- a/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java @@ -25,6 +25,8 @@ import com.metamx.common.guava.Sequence; import io.druid.segment.ReferenceCountingSegment; import java.io.Closeable; +import java.util.List; +import java.util.Map; /** */ @@ -43,11 +45,11 @@ public class ReferenceCountingSegmentQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, Map metadata) { final Closeable closeable = adapter.increment(); try { - final Sequence baseSequence = factory.createRunner(adapter).run(query); + final Sequence baseSequence = factory.createRunner(adapter).run(query, metadata); return new ResourceClosingSequence(baseSequence, closeable); } diff --git a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java index 94b670444e8..c33cd032161 100644 --- a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java @@ -24,6 +24,9 @@ import com.metamx.common.guava.Sequence; import com.metamx.common.guava.nary.BinaryFn; import io.druid.common.guava.CombiningSequence; +import java.util.List; +import java.util.Map; + /** */ public abstract class ResultMergeQueryRunner extends BySegmentSkippingQueryRunner @@ -36,9 +39,9 @@ public abstract class ResultMergeQueryRunner extends BySegmentSkippingQueryRu } @Override - public Sequence doRun(QueryRunner baseRunner, Query query) + public Sequence doRun(QueryRunner baseRunner, Query query, Map metadata) { - return CombiningSequence.create(baseRunner.run(query), makeOrdering(query), createMergeFn(query)); + return CombiningSequence.create(baseRunner.run(query, metadata), makeOrdering(query), createMergeFn(query)); } protected abstract Ordering makeOrdering(Query query); diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java new file mode 100644 index 00000000000..2bd8f34a46a --- /dev/null +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -0,0 +1,66 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package io.druid.query; + +import com.metamx.common.guava.Sequence; +import com.metamx.common.guava.Sequences; +import io.druid.query.spec.QuerySegmentSpec; +import io.druid.query.spec.SpecificSegmentSpec; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class RetryQueryRunner implements QueryRunner +{ + private final QueryRunner baseRunner; + private final QueryToolChest> toolChest; + + public RetryQueryRunner(QueryRunner baseRunner, QueryToolChest> toolChest) + { + this.baseRunner = baseRunner; + this.toolChest = toolChest; + } + + @Override + public Sequence run(final Query query, Map metadata) + { + Sequence returningSeq = baseRunner.run(query, metadata); + + for (int i = RetryQueryRunnerConfig.numTries(); i > 0; i--) { + for (int j = metadata.get("missingSegments").size(); j > 0; j--) { + QuerySegmentSpec segmentSpec = new SpecificSegmentSpec((SegmentDescriptor)metadata.get("missingSegments").remove(0)); + returningSeq = toolChest.mergeSequences( + Sequences.simple( + Arrays.asList( + returningSeq, + baseRunner.run( + query.withQuerySegmentSpec(segmentSpec), + metadata + ) + ) + ) + ); + } + } + + return returningSeq; + } +} diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java new file mode 100644 index 00000000000..f917c18c1b5 --- /dev/null +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -0,0 +1,30 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package io.druid.query; + +import com.fasterxml.jackson.annotation.JsonProperty; + +public class RetryQueryRunnerConfig +{ + @JsonProperty + private static int numTries = 1; + + public static int numTries() { return numTries; } +} diff --git a/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java b/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java index 8e13d9219e9..dc7994cd01e 100644 --- a/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java @@ -23,6 +23,9 @@ package io.druid.query; import com.metamx.common.guava.Sequence; +import java.util.List; +import java.util.Map; + /** * If there's a subquery, run it instead of the outer query */ @@ -36,13 +39,13 @@ public class SubqueryQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, Map metadata) { DataSource dataSource = query.getDataSource(); if (dataSource instanceof QueryDataSource) { - return run((Query) ((QueryDataSource) dataSource).getQuery()); + return run((Query) ((QueryDataSource) dataSource).getQuery(), metadata); } else { - return baseRunner.run(query); + return baseRunner.run(query, metadata); } } } diff --git a/processing/src/main/java/io/druid/query/UnionQueryRunner.java b/processing/src/main/java/io/druid/query/UnionQueryRunner.java index 6679e6d7c77..d081d20cafe 100644 --- a/processing/src/main/java/io/druid/query/UnionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/UnionQueryRunner.java @@ -26,7 +26,8 @@ import com.google.common.collect.Lists; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; -import java.util.ArrayList; +import java.util.List; +import java.util.Map; public class UnionQueryRunner implements QueryRunner { @@ -43,7 +44,7 @@ public class UnionQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { DataSource dataSource = query.getDataSource(); if (dataSource instanceof UnionDataSource) { @@ -57,7 +58,8 @@ public class UnionQueryRunner implements QueryRunner public Sequence apply(DataSource singleSource) { return baseRunner.run( - query.withDataSource(singleSource) + query.withDataSource(singleSource), + metadata ); } } @@ -65,7 +67,7 @@ public class UnionQueryRunner implements QueryRunner ) ); } else { - return baseRunner.run(query); + return baseRunner.run(query, metadata); } } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index 0e00ceae46d..6ce42b14b53 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -49,6 +49,7 @@ import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import org.joda.time.Interval; import org.joda.time.Minutes; +import java.util.List; import java.util.Map; /** @@ -79,18 +80,18 @@ public class GroupByQueryQueryToolChest extends QueryToolChest() { @Override - public Sequence run(Query input) + public Sequence run(Query input, Map metadata) { if (Boolean.valueOf((String) input.getContextValue(GROUP_BY_MERGE_KEY, "true"))) { - return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner); + return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner, metadata); } else { - return runner.run(input); + return runner.run(input, metadata); } } }; } - private Sequence mergeGroupByResults(final GroupByQuery query, QueryRunner runner) + private Sequence mergeGroupByResults(final GroupByQuery query, QueryRunner runner, Map metadata) { Sequence result; @@ -104,12 +105,12 @@ public class GroupByQueryQueryToolChest extends QueryToolChest subqueryResult = mergeGroupByResults(subquery, runner); + Sequence subqueryResult = mergeGroupByResults(subquery, runner, metadata); IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(makeIncrementalIndex(subquery, subqueryResult)); result = engine.process(query, adapter); } else { - result = runner.run(query); + result = runner.run(query, metadata); } return postAggregate(query, makeIncrementalIndex(query, result)); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java index e8634089c2f..f9bd43c20dd 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java @@ -45,6 +45,8 @@ import io.druid.query.QueryWatcher; import io.druid.segment.Segment; import io.druid.segment.StorageAdapter; +import java.util.List; +import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; @@ -100,7 +102,7 @@ public class GroupByQueryRunnerFactory implements QueryRunnerFactory() { @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { ListenableFuture> future = queryExecutor.submit( @@ -110,7 +112,7 @@ public class GroupByQueryRunnerFactory implements QueryRunnerFactory call() throws Exception { return new ExecutorExecutingSequence( - input.run(query), + input.run(query, metadata), queryExecutor ); } @@ -166,7 +168,7 @@ public class GroupByQueryRunnerFactory implements QueryRunnerFactory run(Query input) + public Sequence run(Query input, Map metadata) { if (!(input instanceof GroupByQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), GroupByQuery.class); diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index c8e7208638c..395db8908fb 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -45,6 +45,7 @@ import io.druid.segment.QueryableIndex; import io.druid.segment.Segment; import java.util.Arrays; +import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; @@ -74,7 +75,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory() { @Override - public Sequence run(Query inQ) + public Sequence run(Query inQ, Map metadata) { SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; @@ -133,7 +134,10 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory() { @Override - public Sequence run(final Query query) + public Sequence run( + final Query query, + final Map metadata + ) { final int priority = query.getContextPriority(0); ListenableFuture> future = queryExecutor.submit( @@ -142,7 +146,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory call() throws Exception { - return input.run(query); + return input.run(query, metadata); } } ); diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index a0a2487cca4..229101adf95 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -284,7 +284,10 @@ public class SearchQueryQueryToolChest extends QueryToolChest> run(Query> input) + public Sequence> run( + Query> input, + Map metadata + ) { if (!(input instanceof SearchQuery)) { throw new ISE("Can only handle [%s], got [%s]", SearchQuery.class, input.getClass()); @@ -292,13 +295,13 @@ public class SearchQueryQueryToolChest extends QueryToolChest, Result>() { @Override diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java index 53fe74e104f..d95d63887b5 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java @@ -19,7 +19,6 @@ package io.druid.query.search; -import com.google.common.base.Function; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; @@ -52,7 +51,6 @@ import io.druid.segment.filter.Filters; import it.uniroma3.mat.extendedset.intset.ConciseSet; import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet; -import javax.annotation.Nullable; import java.util.List; import java.util.Map; import java.util.TreeSet; @@ -71,7 +69,10 @@ public class SearchQueryRunner implements QueryRunner> } @Override - public Sequence> run(final Query> input) + public Sequence> run( + final Query> input, + Map metadata + ) { if (!(input instanceof SearchQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SearchQuery.class); diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java index 72cce700a6d..702d6295c57 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java @@ -19,14 +19,11 @@ package io.druid.query.select; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import com.metamx.common.ISE; import com.metamx.common.guava.Sequence; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; -import io.druid.query.QueryConfig; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; @@ -34,6 +31,8 @@ import io.druid.query.QueryWatcher; import io.druid.query.Result; import io.druid.segment.Segment; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutorService; /** @@ -91,7 +90,10 @@ public class SelectQueryRunnerFactory } @Override - public Sequence> run(Query> input) + public Sequence> run( + Query> input, + Map metadata + ) { if (!(input instanceof SelectQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SelectQuery.class); diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index e9bcd73139d..cd8134cb2ee 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -22,12 +22,16 @@ package io.druid.query.spec; import com.google.common.base.Throwables; import com.metamx.common.guava.Accumulator; import com.metamx.common.guava.Sequence; +import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Yielder; import com.metamx.common.guava.YieldingAccumulator; import io.druid.query.Query; import io.druid.query.QueryRunner; +import io.druid.segment.NullStorageAdapterException; import java.io.IOException; +import java.util.List; +import java.util.Map; import java.util.concurrent.Callable; /** @@ -47,7 +51,7 @@ public class SpecificSegmentQueryRunner implements QueryRunner } @Override - public Sequence run(final Query input) + public Sequence run(final Query input, final Map metadata) { final Query query = input.withQuerySegmentSpec(specificSpec); @@ -60,7 +64,14 @@ public class SpecificSegmentQueryRunner implements QueryRunner @Override public Sequence call() throws Exception { - return base.run(query); + Sequence returningSeq; + try { + returningSeq = base.run(query, metadata); + } catch (NullStorageAdapterException e) { + metadata.get("missingSegments").add(((SpecificSegmentSpec) specificSpec).getDescriptor()); + returningSeq = Sequences.empty(); + } + return returningSeq; } }); diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java index d83ef60b894..eba3886aa9e 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java @@ -52,6 +52,8 @@ public class SpecificSegmentSpec implements QuerySegmentSpec return walker.getQueryRunnerForSegments(query, Arrays.asList(descriptor)); } + public SegmentDescriptor getDescriptor() { return descriptor; } + @Override public boolean equals(Object o) { diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java index cc4aaa46579..b9a5216bedf 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java @@ -45,6 +45,7 @@ import org.joda.time.DateTime; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.List; +import java.util.Map; /** */ @@ -95,13 +96,13 @@ public class TimeBoundaryQueryQueryToolChest { @Override protected Sequence> doRun( - QueryRunner> baseRunner, Query> input + QueryRunner> baseRunner, Query> input, Map metadata ) { TimeBoundaryQuery query = (TimeBoundaryQuery) input; return Sequences.simple( query.mergeResults( - Sequences.toList(baseRunner.run(query), Lists.>newArrayList()) + Sequences.toList(baseRunner.run(query, metadata), Lists.>newArrayList()) ) ); } diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index 1f78429ead3..3858c0d8cc4 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -34,6 +34,8 @@ import io.druid.segment.Segment; import io.druid.segment.StorageAdapter; import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutorService; /** @@ -82,7 +84,10 @@ public class TimeBoundaryQueryRunnerFactory } @Override - public Sequence> run(Query> input) + public Sequence> run( + Query> input, + Map metadata + ) { if (!(input instanceof TimeBoundaryQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), TimeBoundaryQuery.class); diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java index ad290536b30..ee239c2c572 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java @@ -20,7 +20,6 @@ package io.druid.query.timeseries; import com.google.common.base.Function; -import com.metamx.common.ISE; import com.metamx.common.guava.Sequence; import io.druid.query.QueryRunnerHelper; import io.druid.query.Result; @@ -28,6 +27,7 @@ import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.segment.Cursor; +import io.druid.segment.NullStorageAdapterException; import io.druid.segment.StorageAdapter; import io.druid.segment.filter.Filters; @@ -40,7 +40,7 @@ public class TimeseriesQueryEngine public Sequence> process(final TimeseriesQuery query, final StorageAdapter adapter) { if (adapter == null) { - throw new ISE( + throw new NullStorageAdapterException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java index 724d4818226..1706378ae38 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java @@ -19,13 +19,11 @@ package io.druid.query.timeseries; -import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Inject; import com.metamx.common.ISE; import com.metamx.common.guava.Sequence; import io.druid.query.ChainedExecutionQueryRunner; import io.druid.query.Query; -import io.druid.query.QueryConfig; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; @@ -34,6 +32,8 @@ import io.druid.query.Result; import io.druid.segment.Segment; import io.druid.segment.StorageAdapter; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutorService; /** @@ -91,7 +91,10 @@ public class TimeseriesQueryRunnerFactory } @Override - public Sequence> run(Query> input) + public Sequence> run( + Query> input, + Map metadata + ) { if (!(input instanceof TimeseriesQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), TimeseriesQuery.class); diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java index f0716a60407..1faf1fb7699 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java @@ -22,8 +22,6 @@ package io.druid.query.topn; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; -import com.metamx.common.ISE; -import com.metamx.common.guava.FunctionalIterable; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; import com.metamx.common.logger.Logger; @@ -34,11 +32,11 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.filter.Filter; import io.druid.segment.Capabilities; import io.druid.segment.Cursor; +import io.druid.segment.NullStorageAdapterException; import io.druid.segment.StorageAdapter; import io.druid.segment.filter.Filters; import org.joda.time.Interval; -import javax.sql.rowset.Predicate; import java.nio.ByteBuffer; import java.util.List; @@ -58,7 +56,7 @@ public class TopNQueryEngine public Sequence> query(final TopNQuery query, final StorageAdapter adapter) { if (adapter == null) { - throw new ISE( + throw new NullStorageAdapterException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java index f275651383f..43fc6b9f303 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java @@ -410,7 +410,10 @@ public class TopNQueryQueryToolChest extends QueryToolChest> run(Query> input) + public Sequence> run( + Query> input, + Map metadata + ) { if (!(input instanceof TopNQuery)) { throw new ISE("Can only handle [%s], got [%s]", TopNQuery.class, input.getClass()); @@ -418,13 +421,13 @@ public class TopNQueryQueryToolChest extends QueryToolChest minTopNThreshold) { - return runner.run(query); + return runner.run(query, metadata); } final boolean isBySegment = query.getContextBySegment(false); return Sequences.map( - runner.run(query.withThreshold(minTopNThreshold)), + runner.run(query.withThreshold(minTopNThreshold), metadata), new Function, Result>() { @Override diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java index 524f9ace6a5..7b3c5cb07a9 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java @@ -21,7 +21,6 @@ package io.druid.query.topn; import com.google.inject.Inject; import com.metamx.common.ISE; -import com.metamx.common.guava.BaseSequence; import com.metamx.common.guava.Sequence; import io.druid.collections.StupidPool; import io.druid.guice.annotations.Global; @@ -35,7 +34,8 @@ import io.druid.query.Result; import io.druid.segment.Segment; import java.nio.ByteBuffer; -import java.util.Iterator; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutorService; /** @@ -65,7 +65,10 @@ public class TopNQueryRunnerFactory implements QueryRunnerFactory>() { @Override - public Sequence> run(Query> input) + public Sequence> run( + Query> input, + Map metadata + ) { if (!(input instanceof TopNQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), TopNQuery.class); diff --git a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java index f21f7f1fa09..ab5d7974dcb 100644 --- a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java @@ -31,6 +31,7 @@ public class IncrementalIndexSegment implements Segment { private final IncrementalIndex index; private final String segmentIdentifier; + private boolean nullStorage = false; public IncrementalIndexSegment( IncrementalIndex index, diff --git a/processing/src/main/java/io/druid/segment/NullStorageAdapterException.java b/processing/src/main/java/io/druid/segment/NullStorageAdapterException.java new file mode 100644 index 00000000000..8d2b967afff --- /dev/null +++ b/processing/src/main/java/io/druid/segment/NullStorageAdapterException.java @@ -0,0 +1,27 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package io.druid.segment; + +public class NullStorageAdapterException extends IllegalStateException +{ + public NullStorageAdapterException(String formatText, Object... arguments) { + super(String.format(formatText, arguments)); + } +} diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java b/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java index c2a7ee18aa6..e5075fc80ab 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java @@ -29,6 +29,7 @@ public class QueryableIndexSegment implements Segment { private final QueryableIndex index; private final String identifier; + private boolean nullStorage = false; public QueryableIndexSegment(final String segmentIdentifier, QueryableIndex index) { diff --git a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java index f2555dd7214..4c1b7351c0d 100644 --- a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java @@ -35,6 +35,9 @@ import org.easymock.IAnswer; import org.junit.Assert; import org.junit.Test; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -99,13 +102,14 @@ public class ChainedExecutionQueryRunnerTest runner3 ) ); - + HashMap metadata = new HashMap(); final Sequence seq = chainedRunner.run( Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))) - .build() + .build(), + metadata ); Future resultFuture = Executors.newFixedThreadPool(1).submit( @@ -202,14 +206,15 @@ public class ChainedExecutionQueryRunnerTest runner3 ) ); - + HashMap metadata = new HashMap(); final Sequence seq = chainedRunner.run( Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))) .context(ImmutableMap.of("timeout", (100), "queryId", "test")) - .build() + .build(), + metadata ); Future resultFuture = Executors.newFixedThreadPool(1).submit( @@ -263,7 +268,7 @@ public class ChainedExecutionQueryRunnerTest } @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { hasStarted = true; latch.countDown(); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index be8fe086d78..ac073b6f723 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -74,6 +74,7 @@ import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -343,7 +344,7 @@ public class GroupByQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { // simulate two daily segments final Query query1 = query.withQuerySegmentSpec( @@ -352,7 +353,7 @@ public class GroupByQueryRunnerTest final Query query2 = query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) ); - return Sequences.concat(runner.run(query1), runner.run(query2)); + return Sequences.concat(runner.run(query1, metadata), runner.run(query2, metadata)); } } ); @@ -369,8 +370,9 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L) ); - TestHelper.assertExpectedObjects(expectedResults, runner.run(fullQuery), "direct"); - TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery), "merged"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, runner.run(fullQuery, metadata), "direct"); + TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, metadata), "merged"); List allGranExpectedResults = Arrays.asList( createExpectedRow("2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L), @@ -384,8 +386,8 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L) ); - TestHelper.assertExpectedObjects(allGranExpectedResults, runner.run(allGranQuery), "direct"); - TestHelper.assertExpectedObjects(allGranExpectedResults, mergedRunner.run(allGranQuery), "merged"); + TestHelper.assertExpectedObjects(allGranExpectedResults, runner.run(allGranQuery, metadata), "direct"); + TestHelper.assertExpectedObjects(allGranExpectedResults, mergedRunner.run(allGranQuery, metadata), "merged"); } @Test @@ -427,9 +429,9 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery), String.format("limit: %d", limit) + Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, metadata), String.format("limit: %d", limit) ); } @@ -535,7 +537,7 @@ public class GroupByQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { // simulate two daily segments final Query query1 = query.withQuerySegmentSpec( @@ -544,12 +546,12 @@ public class GroupByQueryRunnerTest final Query query2 = query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) ); - return Sequences.concat(runner.run(query1), runner.run(query2)); + return Sequences.concat(runner.run(query1, metadata), runner.run(query2, metadata)); } } ); - - TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery), "merged"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, metadata), "merged"); } @Test @@ -585,10 +587,11 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build()), "limited" + Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), metadata), "limited" ); } @@ -625,9 +628,10 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build()), "limited" + Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), metadata), "limited" ); } @@ -664,9 +668,10 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build()), "limited" + Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), metadata), "limited" ); } @@ -706,7 +711,7 @@ public class GroupByQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { // simulate two daily segments final Query query1 = query.withQuerySegmentSpec( @@ -715,12 +720,13 @@ public class GroupByQueryRunnerTest final Query query2 = query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) ); - return Sequences.concat(runner.run(query1), runner.run(query2)); + return Sequences.concat(runner.run(query1, metadata), runner.run(query2, metadata)); } } ); - TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery), "merged"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, metadata), "merged"); } @Test @@ -759,8 +765,9 @@ public class GroupByQueryRunnerTest ) ); + HashMap metadata = new HashMap(); QueryRunner mergeRunner = new GroupByQueryQueryToolChest(configSupplier, engine).mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit"); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); } @Test @@ -792,8 +799,8 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-01", "quality", "technology", "rows", 2L), createExpectedRow("2011-04-01", "quality", "travel", "rows", 2L) ); - - TestHelper.assertExpectedObjects(expectedResults, runner.run(query), "normal"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, runner.run(query, metadata), "normal"); final GroupByQueryEngine engine = new GroupByQueryEngine( configSupplier, new StupidPool( @@ -809,7 +816,7 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = new GroupByQueryQueryToolChest(configSupplier, engine).mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit"); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); } @Test @@ -842,7 +849,8 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-01", "quality", "travel", "rows", 2L) ); - TestHelper.assertExpectedObjects(expectedResults, runner.run(query), "normal"); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, runner.run(query, metadata), "normal"); final GroupByQueryEngine engine = new GroupByQueryEngine( configSupplier, new StupidPool( @@ -858,7 +866,7 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = new GroupByQueryQueryToolChest(configSupplier, engine).mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query), "no-limit"); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); } // A subquery identical to the query should yield identical results @@ -1038,7 +1046,8 @@ public class GroupByQueryRunnerTest toolChest ); - Sequence queryResult = theRunner.run(query); + HashMap metadata = new HashMap(); + Sequence queryResult = theRunner.run(query, metadata); return Sequences.toList(queryResult, Lists.newArrayList()); } diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index a9fb506ca0b..dcb6b76e575 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -42,6 +42,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; +import java.util.Map; /** */ @@ -91,7 +92,7 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest QueryRunner timeseriesRunner = new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { TimeseriesQuery tsQuery = (TimeseriesQuery) query; @@ -104,7 +105,8 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest .setDimFilter(tsQuery.getDimensionsFilter()) .setAggregatorSpecs(tsQuery.getAggregatorSpecs()) .setPostAggregatorSpecs(tsQuery.getPostAggregatorSpecs()) - .build() + .build(), + metadata ), new Function>() { diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java index 70c65f8da88..bb2dcbdd85d 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java @@ -25,7 +25,6 @@ import io.druid.query.LegacyDataSource; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.QueryWatcher; import io.druid.query.metadata.metadata.ColumnAnalysis; import io.druid.query.metadata.metadata.SegmentAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; @@ -38,6 +37,7 @@ import io.druid.segment.column.ValueType; import junit.framework.Assert; import org.junit.Test; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -102,6 +102,7 @@ public class SegmentAnalyzerTest final SegmentMetadataQuery query = new SegmentMetadataQuery( new LegacyDataSource("test"), QuerySegmentSpecs.create("2011/2012"), null, null, null ); - return Sequences.toList(query.run(runner), Lists.newArrayList()); + HashMap metadata = new HashMap(); + return Sequences.toList(query.run(runner, metadata), Lists.newArrayList()); } } diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index ed1740460f8..3e627bdca49 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -40,6 +40,8 @@ import org.junit.Assert; import org.junit.Test; import java.util.Arrays; +import java.util.HashMap; +import java.util.List; public class SegmentMetadataQueryTest { @@ -70,9 +72,9 @@ public class SegmentMetadataQueryTest .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) .merge(true) .build(); - + HashMap metadata = new HashMap(); Iterable results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.newArrayList() ); SegmentAnalysis val = results.iterator().next(); diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index c69ee1c5a27..1e64c284b84 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -23,13 +23,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import com.google.common.util.concurrent.ListenableFuture; import com.metamx.common.guava.Sequences; import io.druid.query.Druids; -import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.QueryWatcher; import io.druid.query.Result; import io.druid.query.filter.DimFilter; import io.druid.query.search.search.FragmentSearchQuerySpec; @@ -47,6 +44,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -371,8 +369,9 @@ public class SearchQueryRunnerTest private void checkSearchQuery(SearchQuery searchQuery, Map> expectedResults) { + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(searchQuery), + runner.run(searchQuery, metadata), Lists.>newArrayList() ); diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 07f99165873..88a983be6d6 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -22,15 +22,12 @@ package io.druid.query.select; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.util.concurrent.ListenableFuture; import com.metamx.common.ISE; import com.metamx.common.guava.Sequences; import io.druid.jackson.DefaultObjectMapper; -import io.druid.query.Query; import io.druid.query.QueryConfig; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.QueryWatcher; import io.druid.query.Result; import io.druid.query.TableDataSource; import io.druid.query.filter.SelectorDimFilter; @@ -45,6 +42,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -90,9 +88,9 @@ public class SelectQueryRunnerTest new PagingSpec(null, 3), null ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -159,9 +157,9 @@ public class SelectQueryRunnerTest new PagingSpec(null, 3), null ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -219,9 +217,9 @@ public class SelectQueryRunnerTest new PagingSpec(Maps.newLinkedHashMap(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 3)), 3), null ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -279,9 +277,9 @@ public class SelectQueryRunnerTest new PagingSpec(Maps.newLinkedHashMap(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 3)), 3), null ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 7bc499dca80..28b378e08c9 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -20,13 +20,10 @@ package io.druid.query.timeboundary; import com.google.common.collect.Lists; -import com.google.common.util.concurrent.ListenableFuture; import com.metamx.common.guava.Sequences; import io.druid.query.Druids; -import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.QueryWatcher; import io.druid.query.Result; import org.joda.time.DateTime; import org.junit.Assert; @@ -36,6 +33,8 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Collection; +import java.util.HashMap; +import java.util.List; /** */ @@ -66,9 +65,9 @@ public class TimeBoundaryQueryRunnerTest TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .build(); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(timeBoundaryQuery), + runner.run(timeBoundaryQuery, metadata), Lists.>newArrayList() ); TimeBoundaryResultValue val = results.iterator().next().getValue(); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index 17d61908c3c..bedc433cc69 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -38,6 +38,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; @RunWith(Parameterized.class) @@ -97,9 +98,9 @@ public class TimeSeriesUnionQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 67c91b4be40..92821c4260c 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -43,6 +43,7 @@ import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; +import java.util.HashMap; import java.util.List; public class TimeseriesQueryRunnerBonusTest @@ -110,9 +111,9 @@ public class TimeseriesQueryRunnerBonusTest ) ) .build(); - + HashMap metadata = new HashMap(); return Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); } diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 708a7de1054..17ee38449dd 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -52,6 +52,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; /** @@ -100,9 +101,10 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -166,8 +168,9 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); Result result = results.iterator().next(); @@ -212,9 +215,9 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -278,9 +281,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -325,9 +328,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); @@ -367,9 +370,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1), + runner.run(query1, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -406,7 +409,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results2 = Sequences.toList( - runner.run(query2), + runner.run(query2, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults2, results2); @@ -457,9 +460,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1), + runner.run(query1, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -499,9 +502,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1), + runner.run(query1, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -539,7 +542,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results2 = Sequences.toList( - runner.run(query2), + runner.run(query2, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults2, results2); @@ -572,9 +575,9 @@ public class TimeseriesQueryRunnerTest .build(); List> expectedResults = Arrays.asList(); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -622,9 +625,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -672,9 +675,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -722,9 +725,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -772,9 +775,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -822,9 +825,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -880,9 +883,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -938,9 +941,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -996,9 +999,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1052,9 +1055,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1114,9 +1117,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1158,9 +1161,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1202,9 +1205,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1260,9 +1263,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - + HashMap metadata = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1300,8 +1303,9 @@ public class TimeseriesQueryRunnerTest ) ) ); + HashMap metadata = new HashMap(); Iterable> actualResults = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1340,8 +1344,9 @@ public class TimeseriesQueryRunnerTest ) ) ); + HashMap metadata = new HashMap(); Iterable> actualResults = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1358,7 +1363,7 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - + HashMap metadata = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1367,12 +1372,13 @@ public class TimeseriesQueryRunnerTest .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) - .build() + .build(), + metadata ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1389,7 +1395,7 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - + HashMap metadata = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1399,12 +1405,13 @@ public class TimeseriesQueryRunnerTest .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) - .build() + .build(), + metadata ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1450,7 +1457,7 @@ public class TimeseriesQueryRunnerTest ) ) .build(); - + HashMap metadata = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1460,12 +1467,13 @@ public class TimeseriesQueryRunnerTest .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) - .build() + .build(), + metadata ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1513,7 +1521,7 @@ public class TimeseriesQueryRunnerTest ) ) .build(); - + HashMap metadata = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1523,12 +1531,13 @@ public class TimeseriesQueryRunnerTest .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) - .build() + .build(), + metadata ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query), + runner.run(query, metadata), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 09d383168cf..44a166aa503 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -52,6 +52,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -166,8 +167,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -230,8 +231,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @@ -295,8 +296,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @@ -345,8 +346,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -394,8 +395,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -443,8 +444,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -485,8 +486,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -520,8 +521,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -569,8 +570,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -622,8 +623,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -664,8 +665,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -682,7 +683,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( Lists.>newArrayList( new Result( @@ -690,7 +691,7 @@ public class TopNQueryRunnerTest new TopNResultValue(Lists.>newArrayList()) ) ), - runner.run(query) + runner.run(query, metadata) ); } @@ -721,7 +722,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( Lists.>newArrayList( new Result( @@ -729,7 +730,7 @@ public class TopNQueryRunnerTest new TopNResultValue(Lists.>newArrayList()) ) ), - runner.run(query) + runner.run(query, metadata) ); } @@ -747,7 +748,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( Sequences.toList( runner.run( @@ -761,9 +762,10 @@ public class TopNQueryRunnerTest .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) - .build() + .build(), + metadata ), Lists.>newArrayList() - ), runner.run(query) + ), runner.run(query, metadata) ); } @@ -781,7 +783,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( Sequences.toList( runner.run( @@ -795,10 +797,11 @@ public class TopNQueryRunnerTest .intervals(QueryRunnerTestHelper.firstToThird) .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) - .build() + .build(), + metadata ), Lists.>newArrayList() ) - , runner.run(query) + , runner.run(query, metadata) ); } @@ -840,8 +843,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -889,8 +892,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -945,8 +948,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -993,8 +996,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1034,8 +1037,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1075,8 +1078,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1116,8 +1119,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1157,8 +1160,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1209,8 +1212,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1261,8 +1264,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1313,8 +1316,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1358,8 +1361,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @@ -1404,8 +1407,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1449,8 +1452,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1498,8 +1501,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1583,8 +1586,8 @@ public class TopNQueryRunnerTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } @Test @@ -1666,6 +1669,7 @@ public class TopNQueryRunnerTest ) ) ); - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } } diff --git a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java index 7dc7b645cad..4df1aa50818 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java @@ -42,6 +42,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -173,8 +174,8 @@ public class TopNUnionQueryTest ) ) ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index c8155526a89..7b76e2d0f7d 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -61,6 +61,7 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Random; @@ -448,8 +449,8 @@ public class SpatialFilterBonusTest factory.createRunner(segment), factory.getToolchest() ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } catch (Exception e) { throw Throwables.propagate(e); @@ -535,8 +536,8 @@ public class SpatialFilterBonusTest factory.createRunner(segment), factory.getToolchest() ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java index 84df58a260d..631d06c94d2 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java @@ -61,6 +61,7 @@ import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Random; @@ -478,8 +479,8 @@ public class SpatialFilterTest factory.createRunner(segment), factory.getToolchest() ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } catch (Exception e) { throw Throwables.propagate(e); @@ -565,8 +566,8 @@ public class SpatialFilterTest factory.createRunner(segment), factory.getToolchest() ); - - TestHelper.assertExpectedResults(expectedResults, runner.run(query)); + HashMap metadata = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index cf5f09228f6..339849347ad 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -115,7 +115,7 @@ public class CachingClusteredClient implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { final QueryToolChest> toolChest = warehouse.getToolChest(query); final CacheStrategy> strategy = toolChest.getCacheStrategy(query); @@ -327,11 +327,11 @@ public class CachingClusteredClient implements QueryRunner List intervals = segmentSpec.getIntervals(); if (!server.isAssignable() || !populateCache || isBySegment) { - resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec)); + resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec), metadata); } else { resultSeqToAdd = toolChest.mergeSequences( Sequences.map( - clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec)), + clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec), metadata), new Function>() { private final Function cacheFn = strategy.prepareForCache(); diff --git a/server/src/main/java/io/druid/client/CachingQueryRunner.java b/server/src/main/java/io/druid/client/CachingQueryRunner.java index ad2718e6a45..2a5c932e73c 100644 --- a/server/src/main/java/io/druid/client/CachingQueryRunner.java +++ b/server/src/main/java/io/druid/client/CachingQueryRunner.java @@ -40,6 +40,7 @@ import io.druid.query.SegmentDescriptor; import java.io.IOException; import java.util.Iterator; import java.util.List; +import java.util.Map; public class CachingQueryRunner implements QueryRunner { @@ -72,7 +73,7 @@ public class CachingQueryRunner implements QueryRunner } @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { final CacheStrategy strategy = toolChest.getCacheStrategy(query); @@ -140,7 +141,7 @@ public class CachingQueryRunner implements QueryRunner return Sequences.withEffect( Sequences.map( - base.run(query), + base.run(query, metadata), new Function() { @Override @@ -162,7 +163,7 @@ public class CachingQueryRunner implements QueryRunner MoreExecutors.sameThreadExecutor() ); } else { - return base.run(query); + return base.run(query, metadata); } } diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index b6030f9755b..5bf0747a0cf 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -54,6 +54,7 @@ import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.QueryWatcher; import io.druid.query.Result; +import io.druid.query.SegmentDescriptor; import io.druid.query.aggregation.MetricManipulatorFns; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpHeaders; @@ -64,6 +65,8 @@ import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; @@ -111,7 +114,7 @@ public class DirectDruidClient implements QueryRunner } @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map metadata) { QueryToolChest> toolChest = warehouse.getToolChest(query); boolean isBySegment = query.getContextBySegment(false); @@ -156,6 +159,20 @@ public class DirectDruidClient implements QueryRunner log.debug("Initial response from url[%s]", url); startTime = System.currentTimeMillis(); byteCount += response.getContent().readableBytes(); + + if (!response.getHeader("Missing-Segments").equals("")) { + LinkedList missingSegments = new LinkedList(); + try { + missingSegments = objectMapper.readValue(response.getHeader("Missing-Segments"), LinkedList.class); + for (int i = missingSegments.size(); i > 0; i--) { + missingSegments.add(objectMapper.convertValue(missingSegments.remove(0), SegmentDescriptor.class)); + } + } + catch (IOException e) { + } + metadata.get("missingSegments").addAll(missingSegments); + } + return super.handleResponse(response); } diff --git a/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java b/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java index f0e8aa01153..4a5e6ef40ac 100644 --- a/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java +++ b/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java @@ -31,6 +31,7 @@ import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChestWarehouse; +import io.druid.query.RetryQueryRunner; import io.druid.query.SegmentDescriptor; import io.druid.query.UnionQueryRunner; import org.joda.time.Interval; @@ -86,7 +87,8 @@ public class ClientQuerySegmentWalker implements QuerySegmentWalker return toolChest.makeMetricBuilder(query); } }, - toolChest.preMergeQueryDecoration(baseClient) + toolChest.preMergeQueryDecoration(new RetryQueryRunner(baseClient, toolChest) + ) ).withWaitMeasuredFromNow(), toolChest ) diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index 33bdd519c83..2d13ae3cfa9 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -22,20 +22,15 @@ package io.druid.server; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; -import com.google.api.client.repackaged.com.google.common.base.Throwables; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteStreams; -import com.google.common.io.Closeables; import com.google.inject.Inject; -import com.metamx.common.guava.Accumulator; -import com.metamx.common.guava.Accumulators; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Yielder; import com.metamx.common.guava.YieldingAccumulator; -import com.metamx.common.guava.YieldingAccumulators; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; @@ -51,7 +46,6 @@ import org.joda.time.DateTime; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.POST; import javax.ws.rs.Path; @@ -59,12 +53,13 @@ import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; -import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import java.io.IOException; import java.io.OutputStream; -import java.nio.charset.Charset; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; import java.util.UUID; /** @@ -147,7 +142,9 @@ public class QueryResource log.debug("Got query [%s]", query); } - Sequence results = query.run(texasRanger); + HashMap metadata = new HashMap(); + metadata.put("missingSegments", new LinkedList()); + Sequence results = query.run(texasRanger, metadata); if (results == null) { results = Sequences.empty(); @@ -167,6 +164,12 @@ public class QueryResource } ) ) { + + String missingSegments = ""; + if (!metadata.get("missingSegments").isEmpty()) { + missingSegments = jsonMapper.writeValueAsString(metadata.get("missingSegments")); + } + long requestTime = System.currentTimeMillis() - start; emitter.emit( @@ -209,6 +212,7 @@ public class QueryResource isSmile ? APPLICATION_JSON : APPLICATION_SMILE ) .header("X-Druid-Query-Id", queryId) + .header("Missing-Segments", missingSegments) .build(); } } diff --git a/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java b/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java index d7e4674fab2..5590fd420b8 100644 --- a/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java +++ b/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java @@ -40,6 +40,7 @@ import org.joda.time.Interval; import java.net.URL; import java.util.List; +import java.util.Map; /** */ @@ -86,7 +87,7 @@ public class BridgeQuerySegmentWalker implements QuerySegmentWalker return new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { try { Server instance = brokerSelector.pick(); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 7e151f3818d..9c75067f678 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -106,6 +106,7 @@ import javax.annotation.Nullable; import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -262,7 +263,7 @@ public class CachingClusteredClientTest new DateTime("2011-01-09T01"), 181, 52 ) ); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTimeResults( new DateTime("2011-01-01"), 50, 5000, @@ -283,7 +284,8 @@ public class CachingClusteredClientTest builder.intervals("2011-01-01/2011-01-10") .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) - .build() + .build(), + metadata ) ); } @@ -314,7 +316,7 @@ public class CachingClusteredClientTest new DateTime("2011-11-07", TIMEZONE), 85, 102 ) ); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTimeResults( new DateTime("2011-11-04", TIMEZONE), 50, 5000, @@ -326,7 +328,8 @@ public class CachingClusteredClientTest builder.intervals("2011-11-04/2011-11-08") .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) - .build() + .build(), + metadata ) ); } @@ -442,7 +445,7 @@ public class CachingClusteredClientTest new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTopNResults( new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, @@ -463,7 +466,8 @@ public class CachingClusteredClientTest .metric("imps") .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) - .build() + .build(), + metadata ) ); } @@ -497,7 +501,7 @@ public class CachingClusteredClientTest new DateTime("2011-11-07", TIMEZONE), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986 ) ); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTopNResults( @@ -511,7 +515,8 @@ public class CachingClusteredClientTest .metric("imps") .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) - .build() + .build(), + metadata ) ); } @@ -561,7 +566,7 @@ public class CachingClusteredClientTest ) ); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTopNResults( new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, @@ -580,7 +585,8 @@ public class CachingClusteredClientTest .metric("imps") .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) - .build() + .build(), + metadata ) ); } @@ -629,7 +635,7 @@ public class CachingClusteredClientTest ) ); - + HashMap metadata = new HashMap(); TestHelper.assertExpectedResults( makeTopNResults( new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, @@ -648,7 +654,8 @@ public class CachingClusteredClientTest .metric("avg_imps_per_row_double") .aggregators(AGGS) .postAggregators(POST_AGGS) - .build() + .build(), + metadata ) ); } @@ -756,6 +763,7 @@ public class CachingClusteredClientTest .once(); final Capture capture = new Capture(); + final Capture metadata = new Capture(); queryCaptures.add(capture); QueryRunner queryable = expectations.getQueryRunner(); @@ -768,8 +776,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - - EasyMock.expect(queryable.run(EasyMock.capture(capture))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) .andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)) .once(); @@ -782,7 +789,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) .andReturn(toQueryableTopNResults(segmentIds, intervals, results)) .once(); } else if (query instanceof SearchQuery) { @@ -794,7 +801,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) .andReturn(toQueryableSearchResults(segmentIds, intervals, results)) .once(); } else if (query instanceof TimeBoundaryQuery) { @@ -806,7 +813,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) .andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)) .once(); } else { @@ -830,6 +837,7 @@ public class CachingClusteredClientTest @Override public void run() { + HashMap metadata = new HashMap(); for (int i = 0; i < numTimesToQuery; ++i) { TestHelper.assertExpectedResults( new MergeIterable<>( @@ -863,7 +871,8 @@ public class CachingClusteredClientTest actualQueryInterval ) ) - ) + ), + metadata ) ); } diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index d72da61e751..e669d847f9d 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -57,6 +57,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; @@ -123,7 +124,7 @@ public class CachingQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { return resultSeq; } @@ -140,8 +141,8 @@ public class CachingQueryRunnerTest cacheStrategy.computeCacheKey(query) ); - - Sequence res = runner.run(query); + HashMap metadata = new HashMap(); + Sequence res = runner.run(query, metadata); // base sequence is not closed yet Assert.assertFalse("sequence must not be closed", closable.isClosed()); Assert.assertNull("cache must be empty", cache.get(cacheKey)); @@ -213,7 +214,7 @@ public class CachingQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { return Sequences.empty(); } @@ -221,8 +222,8 @@ public class CachingQueryRunnerTest new CacheConfig() ); - - List results = Sequences.toList(runner.run(query), new ArrayList()); + HashMap metadata = new HashMap(); + List results = Sequences.toList(runner.run(query, metadata), new ArrayList()); Assert.assertEquals(expectedResults, results); } diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index 4ad8ca5cd51..d6fbb1b7c9d 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -36,10 +36,8 @@ import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.jackson.DefaultObjectMapper; import io.druid.query.Druids; -import io.druid.query.Query; import io.druid.query.QueryInterruptedException; import io.druid.query.QueryRunnerTestHelper; -import io.druid.query.QueryWatcher; import io.druid.query.ReflectionQueryToolChestWarehouse; import io.druid.query.Result; import io.druid.query.timeboundary.TimeBoundaryQuery; @@ -57,6 +55,7 @@ import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.net.URL; +import java.util.HashMap; import java.util.List; public class DirectDruidClientTest @@ -118,20 +117,20 @@ public class DirectDruidClientTest serverSelector.addServer(queryableDruidServer2); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); - - Sequence s1 = client1.run(query); + HashMap metadata = new HashMap(); + Sequence s1 = client1.run(query, metadata); Assert.assertEquals(1, client1.getNumOpenConnections()); // simulate read timeout - Sequence s2 = client1.run(query); + Sequence s2 = client1.run(query, metadata); Assert.assertEquals(2, client1.getNumOpenConnections()); futureException.setException(new ReadTimeoutException()); Assert.assertEquals(1, client1.getNumOpenConnections()); // subsequent connections should work - Sequence s3 = client1.run(query); - Sequence s4 = client1.run(query); - Sequence s5 = client1.run(query); + Sequence s3 = client1.run(query, metadata); + Sequence s4 = client1.run(query, metadata); + Sequence s5 = client1.run(query, metadata); Assert.assertTrue(client1.getNumOpenConnections() == 4); @@ -142,8 +141,8 @@ public class DirectDruidClientTest Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); Assert.assertEquals(3, client1.getNumOpenConnections()); - client2.run(query); - client2.run(query); + client2.run(query, metadata); + client2.run(query, metadata); Assert.assertTrue(client2.getNumOpenConnections() == 2); @@ -201,9 +200,9 @@ public class DirectDruidClientTest serverSelector.addServer(queryableDruidServer1); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); - + HashMap metadata = new HashMap(); cancellationFuture.set(new StatusResponseHolder(HttpResponseStatus.OK, new StringBuilder("cancelled"))); - Sequence results = client1.run(query); + Sequence results = client1.run(query, metadata); Assert.assertEquals(0, client1.getNumOpenConnections()); diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 431fdce8318..371bdcc3671 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -71,8 +71,10 @@ import org.junit.Test; import java.io.File; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -421,14 +423,14 @@ public class ServerManagerTest query, intervals ); - return serverManagerExec.submit( new Runnable() { @Override public void run() { - Sequence> seq = runner.run(query); + Map metadata = new HashMap(); + Sequence> seq = runner.run(query, metadata); Sequences.toList(seq, Lists.>newArrayList()); Iterator adaptersIter = factory.getAdapters().iterator(); @@ -677,9 +679,9 @@ public class ServerManagerTest } @Override - public Sequence run(Query query) + public Sequence run(Query query, Map metadata) { - return new BlockingSequence(runner.run(query), waitLatch, waitYieldLatch, notifyLatch); + return new BlockingSequence(runner.run(query, metadata), waitLatch, waitYieldLatch, notifyLatch); } } diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index 2381c13d282..d89b4fc0ba8 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -44,6 +44,7 @@ import io.druid.guice.annotations.Self; import io.druid.query.MapQueryToolChestWarehouse; import io.druid.query.QuerySegmentWalker; import io.druid.query.QueryToolChestWarehouse; +import io.druid.query.RetryQueryRunnerConfig; import io.druid.server.ClientInfoResource; import io.druid.server.ClientQuerySegmentWalker; import io.druid.server.QueryResource; @@ -88,6 +89,7 @@ public class CliBroker extends ServerRunnable JsonConfigProvider.bind(binder, "druid.broker.select.tier", TierSelectorStrategy.class); JsonConfigProvider.bind(binder, "druid.broker.select.tier.custom", CustomTierSelectorStrategyConfig.class); JsonConfigProvider.bind(binder, "druid.broker.balancer", ServerSelectorStrategy.class); + JsonConfigProvider.bind(binder, "druid.broker.retryPolicy", RetryQueryRunnerConfig.class); binder.bind(QuerySegmentWalker.class).to(ClientQuerySegmentWalker.class).in(LazySingleton.class); From d8430b854ded1b736276431261833112801fb0a8 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Wed, 18 Jun 2014 17:01:08 -0700 Subject: [PATCH 05/46] revert changes in pom file --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 46e6a3feb98..6717674b368 100644 --- a/pom.xml +++ b/pom.xml @@ -244,7 +244,7 @@ com.fasterxml.jackson.datatype jackson-datatype-joda - 2.4.0 + 2.2.3 com.fasterxml.jackson.dataformat From f4b1dc032b0be5012eade18b817662db73eeb388 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Wed, 18 Jun 2014 17:15:04 -0700 Subject: [PATCH 06/46] change the way to use RetryQueryRunnerConfig --- .../src/main/java/io/druid/query/RetryQueryRunner.java | 6 ++++-- .../main/java/io/druid/query/RetryQueryRunnerConfig.java | 4 ++-- .../java/io/druid/server/ClientQuerySegmentWalker.java | 8 ++++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 2bd8f34a46a..054ac19313d 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -32,11 +32,13 @@ public class RetryQueryRunner implements QueryRunner { private final QueryRunner baseRunner; private final QueryToolChest> toolChest; + private final RetryQueryRunnerConfig config; - public RetryQueryRunner(QueryRunner baseRunner, QueryToolChest> toolChest) + public RetryQueryRunner(QueryRunner baseRunner, QueryToolChest> toolChest, RetryQueryRunnerConfig config) { this.baseRunner = baseRunner; this.toolChest = toolChest; + this.config = config; } @Override @@ -44,7 +46,7 @@ public class RetryQueryRunner implements QueryRunner { Sequence returningSeq = baseRunner.run(query, metadata); - for (int i = RetryQueryRunnerConfig.numTries(); i > 0; i--) { + for (int i = config.numTries(); i > 0; i--) { for (int j = metadata.get("missingSegments").size(); j > 0; j--) { QuerySegmentSpec segmentSpec = new SpecificSegmentSpec((SegmentDescriptor)metadata.get("missingSegments").remove(0)); returningSeq = toolChest.mergeSequences( diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java index f917c18c1b5..5b5ed2639b5 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class RetryQueryRunnerConfig { @JsonProperty - private static int numTries = 1; + private int numTries = 1; - public static int numTries() { return numTries; } + public int numTries() { return numTries; } } diff --git a/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java b/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java index 4a5e6ef40ac..6c6a2d801f0 100644 --- a/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java +++ b/server/src/main/java/io/druid/server/ClientQuerySegmentWalker.java @@ -32,6 +32,7 @@ import io.druid.query.QuerySegmentWalker; import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.RetryQueryRunner; +import io.druid.query.RetryQueryRunnerConfig; import io.druid.query.SegmentDescriptor; import io.druid.query.UnionQueryRunner; import org.joda.time.Interval; @@ -45,17 +46,20 @@ public class ClientQuerySegmentWalker implements QuerySegmentWalker private final ServiceEmitter emitter; private final CachingClusteredClient baseClient; private final QueryToolChestWarehouse warehouse; + private final RetryQueryRunnerConfig retryConfig; @Inject public ClientQuerySegmentWalker( ServiceEmitter emitter, CachingClusteredClient baseClient, - QueryToolChestWarehouse warehouse + QueryToolChestWarehouse warehouse, + RetryQueryRunnerConfig retryConfig ) { this.emitter = emitter; this.baseClient = baseClient; this.warehouse = warehouse; + this.retryConfig = retryConfig; } @Override @@ -87,7 +91,7 @@ public class ClientQuerySegmentWalker implements QuerySegmentWalker return toolChest.makeMetricBuilder(query); } }, - toolChest.preMergeQueryDecoration(new RetryQueryRunner(baseClient, toolChest) + toolChest.preMergeQueryDecoration(new RetryQueryRunner(baseClient, toolChest, retryConfig) ) ).withWaitMeasuredFromNow(), toolChest From 8515a117874415354f9b55aa40daa3635fae07bf Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Wed, 18 Jun 2014 18:05:09 -0700 Subject: [PATCH 07/46] change the signature of run() --- .../main/java/io/druid/query/BaseQuery.java | 8 +- .../io/druid/query/BySegmentQueryRunner.java | 6 +- .../query/BySegmentSkippingQueryRunner.java | 9 +- .../query/ChainedExecutionQueryRunner.java | 4 +- .../io/druid/query/ConcatQueryRunner.java | 4 +- .../query/FinalizeResultsQueryRunner.java | 4 +- .../query/GroupByParallelQueryRunner.java | 4 +- .../query/IntervalChunkingQueryRunner.java | 6 +- .../query/MetricsEmittingQueryRunner.java | 6 +- .../java/io/druid/query/NoopQueryRunner.java | 2 +- .../src/main/java/io/druid/query/Query.java | 4 +- .../main/java/io/druid/query/QueryRunner.java | 3 +- .../ReferenceCountingSegmentQueryRunner.java | 4 +- .../druid/query/ResultMergeQueryRunner.java | 4 +- .../java/io/druid/query/RetryQueryRunner.java | 10 +- .../io/druid/query/SubqueryQueryRunner.java | 6 +- .../java/io/druid/query/UnionQueryRunner.java | 6 +- .../groupby/GroupByQueryQueryToolChest.java | 12 +- .../groupby/GroupByQueryRunnerFactory.java | 6 +- .../SegmentMetadataQueryRunnerFactory.java | 7 +- .../search/SearchQueryQueryToolChest.java | 6 +- .../druid/query/search/SearchQueryRunner.java | 2 +- .../select/SelectQueryRunnerFactory.java | 2 +- .../spec/SpecificSegmentQueryRunner.java | 6 +- .../TimeBoundaryQueryQueryToolChest.java | 4 +- .../TimeBoundaryQueryRunnerFactory.java | 2 +- .../TimeseriesQueryRunnerFactory.java | 2 +- .../query/topn/TopNQueryQueryToolChest.java | 6 +- .../query/topn/TopNQueryRunnerFactory.java | 2 +- .../ChainedExecutionQueryRunnerTest.java | 11 +- .../query/groupby/GroupByQueryRunnerTest.java | 72 +++++----- .../query/metadata/SegmentAnalyzerTest.java | 4 +- .../metadata/SegmentMetadataQueryTest.java | 4 +- .../query/search/SearchQueryRunnerTest.java | 4 +- .../query/select/SelectQueryRunnerTest.java | 16 +-- .../TimeBoundaryQueryRunnerTest.java | 5 +- .../TimeSeriesUnionQueryRunnerTest.java | 4 +- .../TimeseriesQueryRunnerBonusTest.java | 4 +- .../timeseries/TimeseriesQueryRunnerTest.java | 124 ++++++++-------- .../druid/query/topn/TopNQueryRunnerTest.java | 132 +++++++++--------- .../druid/query/topn/TopNUnionQueryTest.java | 4 +- .../filter/SpatialFilterBonusTest.java | 8 +- .../segment/filter/SpatialFilterTest.java | 8 +- .../druid/client/CachingClusteredClient.java | 6 +- .../io/druid/client/CachingQueryRunner.java | 6 +- .../io/druid/client/DirectDruidClient.java | 4 +- .../java/io/druid/server/QueryResource.java | 10 +- .../bridge/BridgeQuerySegmentWalker.java | 2 +- .../client/CachingClusteredClientTest.java | 38 ++--- .../druid/client/CachingQueryRunnerTest.java | 12 +- .../druid/client/DirectDruidClientTest.java | 20 +-- .../coordination/ServerManagerTest.java | 8 +- 52 files changed, 324 insertions(+), 329 deletions(-) diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index 3dac2fa0cfd..ad1c96a23d6 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -70,14 +70,14 @@ public abstract class BaseQuery implements Query } @Override - public Sequence run(QuerySegmentWalker walker, Map metadata) + public Sequence run(QuerySegmentWalker walker, Map context) { - return run(querySegmentSpec.lookup(this, walker), metadata); + return run(querySegmentSpec.lookup(this, walker), context); } - public Sequence run(QueryRunner runner, Map metadata) + public Sequence run(QueryRunner runner, Map context) { - return runner.run(this, metadata); + return runner.run(this, context); } @Override diff --git a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java index 1d41a58b58b..a537c7d48f5 100644 --- a/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentQueryRunner.java @@ -52,10 +52,10 @@ public class BySegmentQueryRunner implements QueryRunner @Override @SuppressWarnings("unchecked") - public Sequence run(final Query query, Map metadata) + public Sequence run(final Query query, Map context) { if (query.getContextBySegment(false)) { - final Sequence baseSequence = base.run(query, metadata); + final Sequence baseSequence = base.run(query, context); return new Sequence() { @Override @@ -97,6 +97,6 @@ public class BySegmentQueryRunner implements QueryRunner } }; } - return base.run(query, metadata); + return base.run(query, context); } } diff --git a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java index 09cf38ee40b..5f9651f5222 100644 --- a/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/BySegmentSkippingQueryRunner.java @@ -21,7 +21,6 @@ package io.druid.query; import com.metamx.common.guava.Sequence; -import java.util.List; import java.util.Map; /** @@ -38,14 +37,14 @@ public abstract class BySegmentSkippingQueryRunner implements QueryRunner } @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { if (query.getContextBySegment(false)) { - return baseRunner.run(query, metadata); + return baseRunner.run(query, context); } - return doRun(baseRunner, query, metadata); + return doRun(baseRunner, query, context); } - protected abstract Sequence doRun(QueryRunner baseRunner, Query query, Map metadata); + protected abstract Sequence doRun(QueryRunner baseRunner, Query query, Map context); } diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index 45a9f5518d8..81340e16236 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -94,7 +94,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { final int priority = query.getContextPriority(0); @@ -125,7 +125,7 @@ public class ChainedExecutionQueryRunner implements QueryRunner throw new ISE("Input is null?! How is this possible?!"); } - Sequence result = input.run(query, metadata); + Sequence result = input.run(query, context); if (result == null) { throw new ISE("Got a null result! Segments are missing!"); } diff --git a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java index 482216fe818..74c4a6481f5 100644 --- a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java @@ -39,7 +39,7 @@ public class ConcatQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { return Sequences.concat( Sequences.map( @@ -49,7 +49,7 @@ public class ConcatQueryRunner implements QueryRunner @Override public Sequence apply(final QueryRunner input) { - return input.run(query, metadata); + return input.run(query, context); } } ) diff --git a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java index 565141aad7d..8fb5efeb4d2 100644 --- a/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java +++ b/processing/src/main/java/io/druid/query/FinalizeResultsQueryRunner.java @@ -48,7 +48,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, Map metadata) + public Sequence run(final Query query, Map context) { final boolean isBySegment = query.getContextBySegment(false); final boolean shouldFinalize = query.getContextFinalize(true); @@ -96,7 +96,7 @@ public class FinalizeResultsQueryRunner implements QueryRunner return Sequences.map( - baseRunner.run(queryToRun, metadata), + baseRunner.run(queryToRun, context), finalizerFn ); diff --git a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java index ef1bd009523..48db79f1ec3 100644 --- a/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java +++ b/processing/src/main/java/io/druid/query/GroupByParallelQueryRunner.java @@ -87,7 +87,7 @@ public class GroupByParallelQueryRunner implements QueryRunner } @Override - public Sequence run(final Query queryParam, final Map metadata) + public Sequence run(final Query queryParam, final Map context) { final GroupByQuery query = (GroupByQuery) queryParam; @@ -116,7 +116,7 @@ public class GroupByParallelQueryRunner implements QueryRunner public Boolean call() throws Exception { try { - input.run(queryParam, metadata).accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs); + input.run(queryParam, context).accumulate(indexAccumulatorPair.lhs, indexAccumulatorPair.rhs); return true; } catch (QueryInterruptedException e) { diff --git a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java index 42f5093468f..f36a7f4edfa 100644 --- a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java @@ -49,10 +49,10 @@ public class IntervalChunkingQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { if (period.getMillis() == 0) { - return baseRunner.run(query, metadata); + return baseRunner.run(query, context); } return Sequences.concat( @@ -76,7 +76,7 @@ public class IntervalChunkingQueryRunner implements QueryRunner { return baseRunner.run( query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Arrays.asList(singleInterval))), - metadata + context ); } } diff --git a/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java b/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java index 110aadf4ce5..355916f4b74 100644 --- a/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/MetricsEmittingQueryRunner.java @@ -68,7 +68,7 @@ public class MetricsEmittingQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { final ServiceMetricEvent.Builder builder = builderFn.apply(query); String queryId = query.getId(); @@ -86,7 +86,7 @@ public class MetricsEmittingQueryRunner implements QueryRunner long startTime = System.currentTimeMillis(); try { - retVal = queryRunner.run(query, metadata).accumulate(outType, accumulator); + retVal = queryRunner.run(query, context).accumulate(outType, accumulator); } catch (RuntimeException e) { builder.setUser10("failed"); @@ -116,7 +116,7 @@ public class MetricsEmittingQueryRunner implements QueryRunner long startTime = System.currentTimeMillis(); try { - retVal = queryRunner.run(query, metadata).toYielder(initValue, accumulator); + retVal = queryRunner.run(query, context).toYielder(initValue, accumulator); } catch (RuntimeException e) { builder.setUser10("failed"); diff --git a/processing/src/main/java/io/druid/query/NoopQueryRunner.java b/processing/src/main/java/io/druid/query/NoopQueryRunner.java index 0f659d01793..d2f3863ab62 100644 --- a/processing/src/main/java/io/druid/query/NoopQueryRunner.java +++ b/processing/src/main/java/io/druid/query/NoopQueryRunner.java @@ -30,7 +30,7 @@ import java.util.Map; public class NoopQueryRunner implements QueryRunner { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { return Sequences.empty(); } diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java index 74484b7ab96..3a6c38dc028 100644 --- a/processing/src/main/java/io/druid/query/Query.java +++ b/processing/src/main/java/io/druid/query/Query.java @@ -62,9 +62,9 @@ public interface Query public String getType(); - public Sequence run(QuerySegmentWalker walker, Map metadata); + public Sequence run(QuerySegmentWalker walker, Map context); - public Sequence run(QueryRunner runner, Map metadata); + public Sequence run(QueryRunner runner, Map context); public List getIntervals(); diff --git a/processing/src/main/java/io/druid/query/QueryRunner.java b/processing/src/main/java/io/druid/query/QueryRunner.java index 81b5f4b39ad..d7a3f8af36f 100644 --- a/processing/src/main/java/io/druid/query/QueryRunner.java +++ b/processing/src/main/java/io/druid/query/QueryRunner.java @@ -21,12 +21,11 @@ package io.druid.query; import com.metamx.common.guava.Sequence; -import java.util.List; import java.util.Map; /** */ public interface QueryRunner { - public Sequence run(Query query, Map metadata); + public Sequence run(Query query, Map context); } \ No newline at end of file diff --git a/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java index 457b645544b..f104d8db026 100644 --- a/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ReferenceCountingSegmentQueryRunner.java @@ -45,11 +45,11 @@ public class ReferenceCountingSegmentQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, Map metadata) + public Sequence run(final Query query, Map context) { final Closeable closeable = adapter.increment(); try { - final Sequence baseSequence = factory.createRunner(adapter).run(query, metadata); + final Sequence baseSequence = factory.createRunner(adapter).run(query, context); return new ResourceClosingSequence(baseSequence, closeable); } diff --git a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java index c33cd032161..f5378cfc4ed 100644 --- a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java @@ -39,9 +39,9 @@ public abstract class ResultMergeQueryRunner extends BySegmentSkippingQueryRu } @Override - public Sequence doRun(QueryRunner baseRunner, Query query, Map metadata) + public Sequence doRun(QueryRunner baseRunner, Query query, Map context) { - return CombiningSequence.create(baseRunner.run(query, metadata), makeOrdering(query), createMergeFn(query)); + return CombiningSequence.create(baseRunner.run(query, context), makeOrdering(query), createMergeFn(query)); } protected abstract Ordering makeOrdering(Query query); diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 054ac19313d..34781cfbb07 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -42,20 +42,20 @@ public class RetryQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, Map metadata) + public Sequence run(final Query query, Map context) { - Sequence returningSeq = baseRunner.run(query, metadata); + Sequence returningSeq = baseRunner.run(query, context); for (int i = config.numTries(); i > 0; i--) { - for (int j = metadata.get("missingSegments").size(); j > 0; j--) { - QuerySegmentSpec segmentSpec = new SpecificSegmentSpec((SegmentDescriptor)metadata.get("missingSegments").remove(0)); + for (int j = ((List)context.get("missingSegments")).size(); j > 0; j--) { + QuerySegmentSpec segmentSpec = new SpecificSegmentSpec((SegmentDescriptor)((List) context.get("missingSegments")).remove(0)); returningSeq = toolChest.mergeSequences( Sequences.simple( Arrays.asList( returningSeq, baseRunner.run( query.withQuerySegmentSpec(segmentSpec), - metadata + context ) ) ) diff --git a/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java b/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java index dc7994cd01e..d16a660e25a 100644 --- a/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/SubqueryQueryRunner.java @@ -39,13 +39,13 @@ public class SubqueryQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, Map metadata) + public Sequence run(final Query query, Map context) { DataSource dataSource = query.getDataSource(); if (dataSource instanceof QueryDataSource) { - return run((Query) ((QueryDataSource) dataSource).getQuery(), metadata); + return run((Query) ((QueryDataSource) dataSource).getQuery(), context); } else { - return baseRunner.run(query, metadata); + return baseRunner.run(query, context); } } } diff --git a/processing/src/main/java/io/druid/query/UnionQueryRunner.java b/processing/src/main/java/io/druid/query/UnionQueryRunner.java index d081d20cafe..dcaab858b9f 100644 --- a/processing/src/main/java/io/druid/query/UnionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/UnionQueryRunner.java @@ -44,7 +44,7 @@ public class UnionQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { DataSource dataSource = query.getDataSource(); if (dataSource instanceof UnionDataSource) { @@ -59,7 +59,7 @@ public class UnionQueryRunner implements QueryRunner { return baseRunner.run( query.withDataSource(singleSource), - metadata + context ); } } @@ -67,7 +67,7 @@ public class UnionQueryRunner implements QueryRunner ) ); } else { - return baseRunner.run(query, metadata); + return baseRunner.run(query, context); } } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java index 6ce42b14b53..d6ce385ed21 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -80,18 +80,18 @@ public class GroupByQueryQueryToolChest extends QueryToolChest() { @Override - public Sequence run(Query input, Map metadata) + public Sequence run(Query input, Map context) { if (Boolean.valueOf((String) input.getContextValue(GROUP_BY_MERGE_KEY, "true"))) { - return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner, metadata); + return mergeGroupByResults(((GroupByQuery) input).withOverriddenContext(NO_MERGE_CONTEXT), runner, context); } else { - return runner.run(input, metadata); + return runner.run(input, context); } } }; } - private Sequence mergeGroupByResults(final GroupByQuery query, QueryRunner runner, Map metadata) + private Sequence mergeGroupByResults(final GroupByQuery query, QueryRunner runner, Map context) { Sequence result; @@ -105,12 +105,12 @@ public class GroupByQueryQueryToolChest extends QueryToolChest subqueryResult = mergeGroupByResults(subquery, runner, metadata); + Sequence subqueryResult = mergeGroupByResults(subquery, runner, context); IncrementalIndexStorageAdapter adapter = new IncrementalIndexStorageAdapter(makeIncrementalIndex(subquery, subqueryResult)); result = engine.process(query, adapter); } else { - result = runner.run(query, metadata); + result = runner.run(query, context); } return postAggregate(query, makeIncrementalIndex(query, result)); diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java index f9bd43c20dd..95fdea5d158 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryRunnerFactory.java @@ -102,7 +102,7 @@ public class GroupByQueryRunnerFactory implements QueryRunnerFactory() { @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { ListenableFuture> future = queryExecutor.submit( @@ -112,7 +112,7 @@ public class GroupByQueryRunnerFactory implements QueryRunnerFactory call() throws Exception { return new ExecutorExecutingSequence( - input.run(query, metadata), + input.run(query, context), queryExecutor ); } @@ -168,7 +168,7 @@ public class GroupByQueryRunnerFactory implements QueryRunnerFactory run(Query input, Map metadata) + public Sequence run(Query input, Map context) { if (!(input instanceof GroupByQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), GroupByQuery.class); diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 395db8908fb..5c527d20e78 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -45,7 +45,6 @@ import io.druid.segment.QueryableIndex; import io.druid.segment.Segment; import java.util.Arrays; -import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; @@ -75,7 +74,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory() { @Override - public Sequence run(Query inQ, Map metadata) + public Sequence run(Query inQ, Map context) { SegmentMetadataQuery query = (SegmentMetadataQuery) inQ; @@ -136,7 +135,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory run( final Query query, - final Map metadata + final Map context ) { final int priority = query.getContextPriority(0); @@ -146,7 +145,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory call() throws Exception { - return input.run(query, metadata); + return input.run(query, context); } } ); diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index 229101adf95..1c02924cc84 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -286,7 +286,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest> run( Query> input, - Map metadata + Map context ) { if (!(input instanceof SearchQuery)) { @@ -295,13 +295,13 @@ public class SearchQueryQueryToolChest extends QueryToolChest, Result>() { @Override diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java index d95d63887b5..12ef77064b8 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java @@ -71,7 +71,7 @@ public class SearchQueryRunner implements QueryRunner> @Override public Sequence> run( final Query> input, - Map metadata + Map context ) { if (!(input instanceof SearchQuery)) { diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java index 702d6295c57..5210a56ae6a 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryRunnerFactory.java @@ -92,7 +92,7 @@ public class SelectQueryRunnerFactory @Override public Sequence> run( Query> input, - Map metadata + Map context ) { if (!(input instanceof SelectQuery)) { diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index cd8134cb2ee..80eaa28fa33 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -51,7 +51,7 @@ public class SpecificSegmentQueryRunner implements QueryRunner } @Override - public Sequence run(final Query input, final Map metadata) + public Sequence run(final Query input, final Map context) { final Query query = input.withQuerySegmentSpec(specificSpec); @@ -66,9 +66,9 @@ public class SpecificSegmentQueryRunner implements QueryRunner { Sequence returningSeq; try { - returningSeq = base.run(query, metadata); + returningSeq = base.run(query, context); } catch (NullStorageAdapterException e) { - metadata.get("missingSegments").add(((SpecificSegmentSpec) specificSpec).getDescriptor()); + ((List)context.get("missingSegments")).add(((SpecificSegmentSpec) specificSpec).getDescriptor()); returningSeq = Sequences.empty(); } return returningSeq; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java index b9a5216bedf..5700aa68185 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java @@ -96,13 +96,13 @@ public class TimeBoundaryQueryQueryToolChest { @Override protected Sequence> doRun( - QueryRunner> baseRunner, Query> input, Map metadata + QueryRunner> baseRunner, Query> input, Map context ) { TimeBoundaryQuery query = (TimeBoundaryQuery) input; return Sequences.simple( query.mergeResults( - Sequences.toList(baseRunner.run(query, metadata), Lists.>newArrayList()) + Sequences.toList(baseRunner.run(query, context), Lists.>newArrayList()) ) ); } diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index 3858c0d8cc4..57f15221791 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -86,7 +86,7 @@ public class TimeBoundaryQueryRunnerFactory @Override public Sequence> run( Query> input, - Map metadata + Map context ) { if (!(input instanceof TimeBoundaryQuery)) { diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java index 1706378ae38..3da84f7ff24 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryRunnerFactory.java @@ -93,7 +93,7 @@ public class TimeseriesQueryRunnerFactory @Override public Sequence> run( Query> input, - Map metadata + Map context ) { if (!(input instanceof TimeseriesQuery)) { diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java index 43fc6b9f303..d574e2ffc83 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryQueryToolChest.java @@ -412,7 +412,7 @@ public class TopNQueryQueryToolChest extends QueryToolChest> run( Query> input, - Map metadata + Map context ) { if (!(input instanceof TopNQuery)) { @@ -421,13 +421,13 @@ public class TopNQueryQueryToolChest extends QueryToolChest minTopNThreshold) { - return runner.run(query, metadata); + return runner.run(query, context); } final boolean isBySegment = query.getContextBySegment(false); return Sequences.map( - runner.run(query.withThreshold(minTopNThreshold), metadata), + runner.run(query.withThreshold(minTopNThreshold), context), new Function, Result>() { @Override diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java index 7b3c5cb07a9..54d5286254b 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryRunnerFactory.java @@ -67,7 +67,7 @@ public class TopNQueryRunnerFactory implements QueryRunnerFactory> run( Query> input, - Map metadata + Map context ) { if (!(input instanceof TopNQuery)) { diff --git a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java index 4c1b7351c0d..9166b7f5b09 100644 --- a/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/ChainedExecutionQueryRunnerTest.java @@ -36,7 +36,6 @@ import org.junit.Assert; import org.junit.Test; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -102,14 +101,14 @@ public class ChainedExecutionQueryRunnerTest runner3 ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); final Sequence seq = chainedRunner.run( Druids.newTimeseriesQueryBuilder() .dataSource("test") .intervals("2014/2015") .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))) .build(), - metadata + context ); Future resultFuture = Executors.newFixedThreadPool(1).submit( @@ -206,7 +205,7 @@ public class ChainedExecutionQueryRunnerTest runner3 ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); final Sequence seq = chainedRunner.run( Druids.newTimeseriesQueryBuilder() .dataSource("test") @@ -214,7 +213,7 @@ public class ChainedExecutionQueryRunnerTest .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))) .context(ImmutableMap.of("timeout", (100), "queryId", "test")) .build(), - metadata + context ); Future resultFuture = Executors.newFixedThreadPool(1).submit( @@ -268,7 +267,7 @@ public class ChainedExecutionQueryRunnerTest } @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { hasStarted = true; latch.countDown(); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index ac073b6f723..d754cbebc62 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -344,7 +344,7 @@ public class GroupByQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { // simulate two daily segments final Query query1 = query.withQuerySegmentSpec( @@ -353,7 +353,7 @@ public class GroupByQueryRunnerTest final Query query2 = query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) ); - return Sequences.concat(runner.run(query1, metadata), runner.run(query2, metadata)); + return Sequences.concat(runner.run(query1, context), runner.run(query2, context)); } } ); @@ -370,9 +370,9 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, runner.run(fullQuery, metadata), "direct"); - TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, metadata), "merged"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, runner.run(fullQuery, context), "direct"); + TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged"); List allGranExpectedResults = Arrays.asList( createExpectedRow("2011-04-02", "alias", "automotive", "rows", 2L, "idx", 269L), @@ -386,8 +386,8 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L) ); - TestHelper.assertExpectedObjects(allGranExpectedResults, runner.run(allGranQuery, metadata), "direct"); - TestHelper.assertExpectedObjects(allGranExpectedResults, mergedRunner.run(allGranQuery, metadata), "merged"); + TestHelper.assertExpectedObjects(allGranExpectedResults, runner.run(allGranQuery, context), "direct"); + TestHelper.assertExpectedObjects(allGranExpectedResults, mergedRunner.run(allGranQuery, context), "merged"); } @Test @@ -429,9 +429,9 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, metadata), String.format("limit: %d", limit) + Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit) ); } @@ -537,7 +537,7 @@ public class GroupByQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { // simulate two daily segments final Query query1 = query.withQuerySegmentSpec( @@ -546,12 +546,12 @@ public class GroupByQueryRunnerTest final Query query2 = query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) ); - return Sequences.concat(runner.run(query1, metadata), runner.run(query2, metadata)); + return Sequences.concat(runner.run(query1, context), runner.run(query2, context)); } } ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, metadata), "merged"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged"); } @Test @@ -587,11 +587,11 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit"); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), metadata), "limited" + Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited" ); } @@ -628,10 +628,10 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit"); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), metadata), "limited" + Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited" ); } @@ -668,10 +668,10 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit"); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), metadata), "limited" + Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited" ); } @@ -711,7 +711,7 @@ public class GroupByQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { // simulate two daily segments final Query query1 = query.withQuerySegmentSpec( @@ -720,13 +720,13 @@ public class GroupByQueryRunnerTest final Query query2 = query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) ); - return Sequences.concat(runner.run(query1, metadata), runner.run(query2, metadata)); + return Sequences.concat(runner.run(query1, context), runner.run(query2, context)); } } ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, metadata), "merged"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(fullQuery, context), "merged"); } @Test @@ -765,9 +765,9 @@ public class GroupByQueryRunnerTest ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); QueryRunner mergeRunner = new GroupByQueryQueryToolChest(configSupplier, engine).mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit"); } @Test @@ -799,8 +799,8 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-01", "quality", "technology", "rows", 2L), createExpectedRow("2011-04-01", "quality", "travel", "rows", 2L) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, runner.run(query, metadata), "normal"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, runner.run(query, context), "normal"); final GroupByQueryEngine engine = new GroupByQueryEngine( configSupplier, new StupidPool( @@ -816,7 +816,7 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = new GroupByQueryQueryToolChest(configSupplier, engine).mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit"); } @Test @@ -849,8 +849,8 @@ public class GroupByQueryRunnerTest createExpectedRow("2011-04-01", "quality", "travel", "rows", 2L) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedObjects(expectedResults, runner.run(query, metadata), "normal"); + HashMap context = new HashMap(); + TestHelper.assertExpectedObjects(expectedResults, runner.run(query, context), "normal"); final GroupByQueryEngine engine = new GroupByQueryEngine( configSupplier, new StupidPool( @@ -866,7 +866,7 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = new GroupByQueryQueryToolChest(configSupplier, engine).mergeResults(runner); - TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, metadata), "no-limit"); + TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(query, context), "no-limit"); } // A subquery identical to the query should yield identical results @@ -1046,8 +1046,8 @@ public class GroupByQueryRunnerTest toolChest ); - HashMap metadata = new HashMap(); - Sequence queryResult = theRunner.run(query, metadata); + HashMap context = new HashMap(); + Sequence queryResult = theRunner.run(query, context); return Sequences.toList(queryResult, Lists.newArrayList()); } diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java index bb2dcbdd85d..e6b50c2178f 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentAnalyzerTest.java @@ -102,7 +102,7 @@ public class SegmentAnalyzerTest final SegmentMetadataQuery query = new SegmentMetadataQuery( new LegacyDataSource("test"), QuerySegmentSpecs.create("2011/2012"), null, null, null ); - HashMap metadata = new HashMap(); - return Sequences.toList(query.run(runner, metadata), Lists.newArrayList()); + HashMap context = new HashMap(); + return Sequences.toList(query.run(runner, context), Lists.newArrayList()); } } diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 3e627bdca49..091d1e2b1b8 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -72,9 +72,9 @@ public class SegmentMetadataQueryTest .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) .merge(true) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.newArrayList() ); SegmentAnalysis val = results.iterator().next(); diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index 1e64c284b84..fb14a4e8a0a 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -369,9 +369,9 @@ public class SearchQueryRunnerTest private void checkSearchQuery(SearchQuery searchQuery, Map> expectedResults) { - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(searchQuery, metadata), + runner.run(searchQuery, context), Lists.>newArrayList() ); diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 88a983be6d6..ba8bc3c6320 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -88,9 +88,9 @@ public class SelectQueryRunnerTest new PagingSpec(null, 3), null ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -157,9 +157,9 @@ public class SelectQueryRunnerTest new PagingSpec(null, 3), null ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -217,9 +217,9 @@ public class SelectQueryRunnerTest new PagingSpec(Maps.newLinkedHashMap(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 3)), 3), null ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -277,9 +277,9 @@ public class SelectQueryRunnerTest new PagingSpec(Maps.newLinkedHashMap(ImmutableMap.of(QueryRunnerTestHelper.segmentId, 3)), 3), null ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 28b378e08c9..3e7b919c5f5 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -34,7 +34,6 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Collection; import java.util.HashMap; -import java.util.List; /** */ @@ -65,9 +64,9 @@ public class TimeBoundaryQueryRunnerTest TimeBoundaryQuery timeBoundaryQuery = Druids.newTimeBoundaryQueryBuilder() .dataSource("testing") .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(timeBoundaryQuery, metadata), + runner.run(timeBoundaryQuery, context), Lists.>newArrayList() ); TimeBoundaryResultValue val = results.iterator().next().getValue(); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index bedc433cc69..8ad6c40dbec 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -98,9 +98,9 @@ public class TimeSeriesUnionQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 92821c4260c..056fdef948d 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -111,9 +111,9 @@ public class TimeseriesQueryRunnerBonusTest ) ) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); return Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); } diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 17ee38449dd..1d1fcb21b00 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -101,10 +101,10 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -168,9 +168,9 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); Result result = results.iterator().next(); @@ -215,9 +215,9 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -281,9 +281,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -328,9 +328,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); @@ -370,9 +370,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1, metadata), + runner.run(query1, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -409,7 +409,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results2 = Sequences.toList( - runner.run(query2, metadata), + runner.run(query2, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults2, results2); @@ -460,9 +460,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1, metadata), + runner.run(query1, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -502,9 +502,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1, metadata), + runner.run(query1, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -542,7 +542,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results2 = Sequences.toList( - runner.run(query2, metadata), + runner.run(query2, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults2, results2); @@ -575,9 +575,9 @@ public class TimeseriesQueryRunnerTest .build(); List> expectedResults = Arrays.asList(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -625,9 +625,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -675,9 +675,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -725,9 +725,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -775,9 +775,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -825,9 +825,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -883,9 +883,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -941,9 +941,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -999,9 +999,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1055,9 +1055,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1117,9 +1117,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1161,9 +1161,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1205,9 +1205,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1263,9 +1263,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1303,9 +1303,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> actualResults = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1344,9 +1344,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> actualResults = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1363,7 +1363,7 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1373,12 +1373,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - metadata + context ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1395,7 +1395,7 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1406,12 +1406,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - metadata + context ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1457,7 +1457,7 @@ public class TimeseriesQueryRunnerTest ) ) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1468,12 +1468,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - metadata + context ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1521,7 +1521,7 @@ public class TimeseriesQueryRunnerTest ) ) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1532,12 +1532,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - metadata + context ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, metadata), + runner.run(query, context), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 44a166aa503..fd3522028ef 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -167,8 +167,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -231,8 +231,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @@ -296,8 +296,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @@ -346,8 +346,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -395,8 +395,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -444,8 +444,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -486,8 +486,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -521,8 +521,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -570,8 +570,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -623,8 +623,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -665,8 +665,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -683,7 +683,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( Lists.>newArrayList( new Result( @@ -691,7 +691,7 @@ public class TopNQueryRunnerTest new TopNResultValue(Lists.>newArrayList()) ) ), - runner.run(query, metadata) + runner.run(query, context) ); } @@ -722,7 +722,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( Lists.>newArrayList( new Result( @@ -730,7 +730,7 @@ public class TopNQueryRunnerTest new TopNResultValue(Lists.>newArrayList()) ) ), - runner.run(query, metadata) + runner.run(query, context) ); } @@ -748,7 +748,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( Sequences.toList( runner.run( @@ -763,9 +763,9 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - metadata + context ), Lists.>newArrayList() - ), runner.run(query, metadata) + ), runner.run(query, context) ); } @@ -783,7 +783,7 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( Sequences.toList( runner.run( @@ -798,10 +798,10 @@ public class TopNQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - metadata + context ), Lists.>newArrayList() ) - , runner.run(query, metadata) + , runner.run(query, context) ); } @@ -843,8 +843,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -892,8 +892,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -948,8 +948,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -996,8 +996,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1037,8 +1037,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1078,8 +1078,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1119,8 +1119,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1160,8 +1160,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1212,8 +1212,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1264,8 +1264,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1316,8 +1316,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1361,8 +1361,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @@ -1407,8 +1407,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1452,8 +1452,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1501,8 +1501,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1586,8 +1586,8 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } @Test @@ -1669,7 +1669,7 @@ public class TopNQueryRunnerTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } } diff --git a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java index 4df1aa50818..35f80552127 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java @@ -174,8 +174,8 @@ public class TopNUnionQueryTest ) ) ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index 7b76e2d0f7d..7c28ac39612 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -449,8 +449,8 @@ public class SpatialFilterBonusTest factory.createRunner(segment), factory.getToolchest() ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } catch (Exception e) { throw Throwables.propagate(e); @@ -536,8 +536,8 @@ public class SpatialFilterBonusTest factory.createRunner(segment), factory.getToolchest() ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java index 631d06c94d2..ed46d81b0c9 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java @@ -479,8 +479,8 @@ public class SpatialFilterTest factory.createRunner(segment), factory.getToolchest() ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } catch (Exception e) { throw Throwables.propagate(e); @@ -566,8 +566,8 @@ public class SpatialFilterTest factory.createRunner(segment), factory.getToolchest() ); - HashMap metadata = new HashMap(); - TestHelper.assertExpectedResults(expectedResults, runner.run(query, metadata)); + HashMap context = new HashMap(); + TestHelper.assertExpectedResults(expectedResults, runner.run(query, context)); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 339849347ad..4983c7273dc 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -115,7 +115,7 @@ public class CachingClusteredClient implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { final QueryToolChest> toolChest = warehouse.getToolChest(query); final CacheStrategy> strategy = toolChest.getCacheStrategy(query); @@ -327,11 +327,11 @@ public class CachingClusteredClient implements QueryRunner List intervals = segmentSpec.getIntervals(); if (!server.isAssignable() || !populateCache || isBySegment) { - resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec), metadata); + resultSeqToAdd = clientQueryable.run(query.withQuerySegmentSpec(segmentSpec), context); } else { resultSeqToAdd = toolChest.mergeSequences( Sequences.map( - clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec), metadata), + clientQueryable.run(rewrittenQuery.withQuerySegmentSpec(segmentSpec), context), new Function>() { private final Function cacheFn = strategy.prepareForCache(); diff --git a/server/src/main/java/io/druid/client/CachingQueryRunner.java b/server/src/main/java/io/druid/client/CachingQueryRunner.java index 2a5c932e73c..47f2ae8facf 100644 --- a/server/src/main/java/io/druid/client/CachingQueryRunner.java +++ b/server/src/main/java/io/druid/client/CachingQueryRunner.java @@ -73,7 +73,7 @@ public class CachingQueryRunner implements QueryRunner } @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { final CacheStrategy strategy = toolChest.getCacheStrategy(query); @@ -141,7 +141,7 @@ public class CachingQueryRunner implements QueryRunner return Sequences.withEffect( Sequences.map( - base.run(query, metadata), + base.run(query, context), new Function() { @Override @@ -163,7 +163,7 @@ public class CachingQueryRunner implements QueryRunner MoreExecutors.sameThreadExecutor() ); } else { - return base.run(query, metadata); + return base.run(query, context); } } diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 5bf0747a0cf..541ae1df62e 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -114,7 +114,7 @@ public class DirectDruidClient implements QueryRunner } @Override - public Sequence run(final Query query, final Map metadata) + public Sequence run(final Query query, final Map context) { QueryToolChest> toolChest = warehouse.getToolChest(query); boolean isBySegment = query.getContextBySegment(false); @@ -170,7 +170,7 @@ public class DirectDruidClient implements QueryRunner } catch (IOException e) { } - metadata.get("missingSegments").addAll(missingSegments); + ((List) context.get("missingSegments")).addAll(missingSegments); } return super.handleResponse(response); diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index 2d13ae3cfa9..ebc33c9670f 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -142,9 +142,9 @@ public class QueryResource log.debug("Got query [%s]", query); } - HashMap metadata = new HashMap(); - metadata.put("missingSegments", new LinkedList()); - Sequence results = query.run(texasRanger, metadata); + HashMap context = new HashMap(); + context.put("missingSegments", new LinkedList()); + Sequence results = query.run(texasRanger, context); if (results == null) { results = Sequences.empty(); @@ -166,8 +166,8 @@ public class QueryResource ) { String missingSegments = ""; - if (!metadata.get("missingSegments").isEmpty()) { - missingSegments = jsonMapper.writeValueAsString(metadata.get("missingSegments")); + if (!((List)context.get("missingSegments")).isEmpty()) { + missingSegments = jsonMapper.writeValueAsString(context.get("missingSegments")); } long requestTime = System.currentTimeMillis() - start; diff --git a/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java b/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java index 5590fd420b8..4f260002ccb 100644 --- a/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java +++ b/server/src/main/java/io/druid/server/bridge/BridgeQuerySegmentWalker.java @@ -87,7 +87,7 @@ public class BridgeQuerySegmentWalker implements QuerySegmentWalker return new QueryRunner() { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { try { Server instance = brokerSelector.pick(); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 9c75067f678..45a3f544900 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -263,7 +263,7 @@ public class CachingClusteredClientTest new DateTime("2011-01-09T01"), 181, 52 ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTimeResults( new DateTime("2011-01-01"), 50, 5000, @@ -285,7 +285,7 @@ public class CachingClusteredClientTest .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) .build(), - metadata + context ) ); } @@ -316,7 +316,7 @@ public class CachingClusteredClientTest new DateTime("2011-11-07", TIMEZONE), 85, 102 ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTimeResults( new DateTime("2011-11-04", TIMEZONE), 50, 5000, @@ -329,7 +329,7 @@ public class CachingClusteredClientTest .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) .build(), - metadata + context ) ); } @@ -445,7 +445,7 @@ public class CachingClusteredClientTest new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTopNResults( new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, @@ -467,7 +467,7 @@ public class CachingClusteredClientTest .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) .build(), - metadata + context ) ); } @@ -501,7 +501,7 @@ public class CachingClusteredClientTest new DateTime("2011-11-07", TIMEZONE), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986 ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTopNResults( @@ -516,7 +516,7 @@ public class CachingClusteredClientTest .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) .build(), - metadata + context ) ); } @@ -566,7 +566,7 @@ public class CachingClusteredClientTest ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeRenamedTopNResults( new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, @@ -586,7 +586,7 @@ public class CachingClusteredClientTest .aggregators(RENAMED_AGGS) .postAggregators(RENAMED_POST_AGGS) .build(), - metadata + context ) ); } @@ -635,7 +635,7 @@ public class CachingClusteredClientTest ) ); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeTopNResults( new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, @@ -655,7 +655,7 @@ public class CachingClusteredClientTest .aggregators(AGGS) .postAggregators(POST_AGGS) .build(), - metadata + context ) ); } @@ -763,7 +763,7 @@ public class CachingClusteredClientTest .once(); final Capture capture = new Capture(); - final Capture metadata = new Capture(); + final Capture context = new Capture(); queryCaptures.add(capture); QueryRunner queryable = expectations.getQueryRunner(); @@ -776,7 +776,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))) .andReturn(toQueryableTimeseriesResults(expectBySegment, segmentIds, intervals, results)) .once(); @@ -789,7 +789,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))) .andReturn(toQueryableTopNResults(segmentIds, intervals, results)) .once(); } else if (query instanceof SearchQuery) { @@ -801,7 +801,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))) .andReturn(toQueryableSearchResults(segmentIds, intervals, results)) .once(); } else if (query instanceof TimeBoundaryQuery) { @@ -813,7 +813,7 @@ public class CachingClusteredClientTest intervals.add(expectation.getInterval()); results.add(expectation.getResults()); } - EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(metadata))) + EasyMock.expect(queryable.run(EasyMock.capture(capture), EasyMock.capture(context))) .andReturn(toQueryableTimeBoundaryResults(segmentIds, intervals, results)) .once(); } else { @@ -837,7 +837,7 @@ public class CachingClusteredClientTest @Override public void run() { - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); for (int i = 0; i < numTimesToQuery; ++i) { TestHelper.assertExpectedResults( new MergeIterable<>( @@ -872,7 +872,7 @@ public class CachingClusteredClientTest ) ) ), - metadata + context ) ); } diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index e669d847f9d..a358ec44798 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -124,7 +124,7 @@ public class CachingQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { return resultSeq; } @@ -141,8 +141,8 @@ public class CachingQueryRunnerTest cacheStrategy.computeCacheKey(query) ); - HashMap metadata = new HashMap(); - Sequence res = runner.run(query, metadata); + HashMap context = new HashMap(); + Sequence res = runner.run(query, context); // base sequence is not closed yet Assert.assertFalse("sequence must not be closed", closable.isClosed()); Assert.assertNull("cache must be empty", cache.get(cacheKey)); @@ -214,7 +214,7 @@ public class CachingQueryRunnerTest new QueryRunner() { @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { return Sequences.empty(); } @@ -222,8 +222,8 @@ public class CachingQueryRunnerTest new CacheConfig() ); - HashMap metadata = new HashMap(); - List results = Sequences.toList(runner.run(query, metadata), new ArrayList()); + HashMap context = new HashMap(); + List results = Sequences.toList(runner.run(query, context), new ArrayList()); Assert.assertEquals(expectedResults, results); } diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index d6fbb1b7c9d..8d2ea4daf0d 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -117,20 +117,20 @@ public class DirectDruidClientTest serverSelector.addServer(queryableDruidServer2); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); - HashMap metadata = new HashMap(); - Sequence s1 = client1.run(query, metadata); + HashMap context = new HashMap(); + Sequence s1 = client1.run(query, context); Assert.assertEquals(1, client1.getNumOpenConnections()); // simulate read timeout - Sequence s2 = client1.run(query, metadata); + Sequence s2 = client1.run(query, context); Assert.assertEquals(2, client1.getNumOpenConnections()); futureException.setException(new ReadTimeoutException()); Assert.assertEquals(1, client1.getNumOpenConnections()); // subsequent connections should work - Sequence s3 = client1.run(query, metadata); - Sequence s4 = client1.run(query, metadata); - Sequence s5 = client1.run(query, metadata); + Sequence s3 = client1.run(query, context); + Sequence s4 = client1.run(query, context); + Sequence s5 = client1.run(query, context); Assert.assertTrue(client1.getNumOpenConnections() == 4); @@ -141,8 +141,8 @@ public class DirectDruidClientTest Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); Assert.assertEquals(3, client1.getNumOpenConnections()); - client2.run(query, metadata); - client2.run(query, metadata); + client2.run(query, context); + client2.run(query, context); Assert.assertTrue(client2.getNumOpenConnections() == 2); @@ -200,9 +200,9 @@ public class DirectDruidClientTest serverSelector.addServer(queryableDruidServer1); TimeBoundaryQuery query = Druids.newTimeBoundaryQueryBuilder().dataSource("test").build(); - HashMap metadata = new HashMap(); + HashMap context = new HashMap(); cancellationFuture.set(new StatusResponseHolder(HttpResponseStatus.OK, new StringBuilder("cancelled"))); - Sequence results = client1.run(query, metadata); + Sequence results = client1.run(query, context); Assert.assertEquals(0, client1.getNumOpenConnections()); diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 371bdcc3671..ed06679858b 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -429,8 +429,8 @@ public class ServerManagerTest @Override public void run() { - Map metadata = new HashMap(); - Sequence> seq = runner.run(query, metadata); + Map context = new HashMap(); + Sequence> seq = runner.run(query, context); Sequences.toList(seq, Lists.>newArrayList()); Iterator adaptersIter = factory.getAdapters().iterator(); @@ -679,9 +679,9 @@ public class ServerManagerTest } @Override - public Sequence run(Query query, Map metadata) + public Sequence run(Query query, Map context) { - return new BlockingSequence(runner.run(query, metadata), waitLatch, waitYieldLatch, notifyLatch); + return new BlockingSequence(runner.run(query, context), waitLatch, waitYieldLatch, notifyLatch); } } From 35e080bbc14a0853f13dc3e29425c8719894d329 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 14:14:54 -0700 Subject: [PATCH 08/46] fix concurrency issue with the map; introduce new exception; add incomplete retry query runner test --- .../java/io/druid/query/RetryQueryRunner.java | 22 +++- .../druid/query/RetryQueryRunnerConfig.java | 2 + .../spec/SpecificSegmentQueryRunner.java | 4 +- .../timeseries/TimeseriesQueryEngine.java | 4 +- .../io/druid/query/topn/TopNQueryEngine.java | 4 +- ...tion.java => SegmentMissingException.java} | 6 +- .../io/druid/query/RetryQueryRunnerTest.java | 116 ++++++++++++++++++ .../java/io/druid/server/QueryResource.java | 9 +- 8 files changed, 149 insertions(+), 18 deletions(-) rename processing/src/main/java/io/druid/segment/{NullStorageAdapterException.java => SegmentMissingException.java} (85%) create mode 100644 processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 34781cfbb07..0c60d630433 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -21,8 +21,8 @@ package io.druid.query; import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequences; -import io.druid.query.spec.QuerySegmentSpec; -import io.druid.query.spec.SpecificSegmentSpec; +import io.druid.query.spec.MultipleSpecificSegmentSpec; +import io.druid.segment.SegmentMissingException; import java.util.Arrays; import java.util.List; @@ -30,6 +30,7 @@ import java.util.Map; public class RetryQueryRunner implements QueryRunner { + public static String missingSegments = "missingSegments"; private final QueryRunner baseRunner; private final QueryToolChest> toolChest; private final RetryQueryRunnerConfig config; @@ -46,23 +47,32 @@ public class RetryQueryRunner implements QueryRunner { Sequence returningSeq = baseRunner.run(query, context); - for (int i = config.numTries(); i > 0; i--) { - for (int j = ((List)context.get("missingSegments")).size(); j > 0; j--) { - QuerySegmentSpec segmentSpec = new SpecificSegmentSpec((SegmentDescriptor)((List) context.get("missingSegments")).remove(0)); + + for (int i = config.numTries(); i > 0 && !((List)context.get(missingSegments)).isEmpty(); i--) { + List segList= (List)context.get(missingSegments); + ((List)context.get(missingSegments)).clear(); returningSeq = toolChest.mergeSequences( Sequences.simple( Arrays.asList( returningSeq, baseRunner.run( - query.withQuerySegmentSpec(segmentSpec), + query.withQuerySegmentSpec(new MultipleSpecificSegmentSpec(segList)), context ) ) ) ); + } + + if (!config.returnPartialResults() && !((List)context.get(missingSegments)).isEmpty()) { + String failedSegments = ""; + for (SegmentDescriptor segment : (List) context.get("missingSegments")) { + failedSegments = failedSegments + segment.toString() + " "; } + throw new SegmentMissingException("The following segments are missing: " + failedSegments); } return returningSeq; } } + diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java index 5b5ed2639b5..5759b2794bb 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -25,6 +25,8 @@ public class RetryQueryRunnerConfig { @JsonProperty private int numTries = 1; + private boolean returnPartialResults = false; public int numTries() { return numTries; } + public boolean returnPartialResults() { return returnPartialResults; } } diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index 80eaa28fa33..9a457b96e04 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -27,7 +27,7 @@ import com.metamx.common.guava.Yielder; import com.metamx.common.guava.YieldingAccumulator; import io.druid.query.Query; import io.druid.query.QueryRunner; -import io.druid.segment.NullStorageAdapterException; +import io.druid.segment.SegmentMissingException; import java.io.IOException; import java.util.List; @@ -67,7 +67,7 @@ public class SpecificSegmentQueryRunner implements QueryRunner Sequence returningSeq; try { returningSeq = base.run(query, context); - } catch (NullStorageAdapterException e) { + } catch (SegmentMissingException e) { ((List)context.get("missingSegments")).add(((SpecificSegmentSpec) specificSpec).getDescriptor()); returningSeq = Sequences.empty(); } diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java index ee239c2c572..bcc3e13512f 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java @@ -27,7 +27,7 @@ import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.segment.Cursor; -import io.druid.segment.NullStorageAdapterException; +import io.druid.segment.SegmentMissingException; import io.druid.segment.StorageAdapter; import io.druid.segment.filter.Filters; @@ -40,7 +40,7 @@ public class TimeseriesQueryEngine public Sequence> process(final TimeseriesQuery query, final StorageAdapter adapter) { if (adapter == null) { - throw new NullStorageAdapterException( + throw new SegmentMissingException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } diff --git a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java index 1faf1fb7699..65f2be86580 100644 --- a/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java +++ b/processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java @@ -32,7 +32,7 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.filter.Filter; import io.druid.segment.Capabilities; import io.druid.segment.Cursor; -import io.druid.segment.NullStorageAdapterException; +import io.druid.segment.SegmentMissingException; import io.druid.segment.StorageAdapter; import io.druid.segment.filter.Filters; import org.joda.time.Interval; @@ -56,7 +56,7 @@ public class TopNQueryEngine public Sequence> query(final TopNQuery query, final StorageAdapter adapter) { if (adapter == null) { - throw new NullStorageAdapterException( + throw new SegmentMissingException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } diff --git a/processing/src/main/java/io/druid/segment/NullStorageAdapterException.java b/processing/src/main/java/io/druid/segment/SegmentMissingException.java similarity index 85% rename from processing/src/main/java/io/druid/segment/NullStorageAdapterException.java rename to processing/src/main/java/io/druid/segment/SegmentMissingException.java index 8d2b967afff..aade5e560ca 100644 --- a/processing/src/main/java/io/druid/segment/NullStorageAdapterException.java +++ b/processing/src/main/java/io/druid/segment/SegmentMissingException.java @@ -19,9 +19,11 @@ package io.druid.segment; -public class NullStorageAdapterException extends IllegalStateException +import com.metamx.common.ISE; + +public class SegmentMissingException extends ISE { - public NullStorageAdapterException(String formatText, Object... arguments) { + public SegmentMissingException(String formatText, Object... arguments) { super(String.format(formatText, arguments)); } } diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java new file mode 100644 index 00000000000..45da8bcad0e --- /dev/null +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -0,0 +1,116 @@ +package io.druid.query; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.google.common.base.Function; +import com.google.common.collect.Lists; +import com.google.common.collect.MapMaker; +import com.google.common.collect.Ordering; +import com.metamx.common.guava.Sequence; +import com.metamx.common.guava.Sequences; +import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.collections.OrderedMergeSequence; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.query.aggregation.LongSumAggregatorFactory; +import io.druid.query.aggregation.MetricManipulationFn; +import io.druid.query.timeseries.TimeseriesQuery; +import io.druid.query.timeseries.TimeseriesResultValue; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class RetryQueryRunnerTest +{ + + final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.dayGran) + .intervals(QueryRunnerTestHelper.firstToThird) + .aggregators( + Arrays.asList( + QueryRunnerTestHelper.rowsCount, + new LongSumAggregatorFactory( + "idx", + "index" + ), + QueryRunnerTestHelper.qualityUniques + ) + ) + .build(); + + + @Test + public void testRunWithMissingSegments() throws Exception + { + Map context = new MapMaker().makeMap(); + context.put("missingSegments", Lists.newArrayList()); + RetryQueryRunner runner = new RetryQueryRunner( + new QueryRunner() + { + @Override + public Sequence run(Query query, Map context) + { + ((List)context.get(RetryQueryRunner.missingSegments)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1)); + return Sequences.empty(); + } + }, + new QueryToolChest() + { + @Override + public QueryRunner mergeResults(QueryRunner runner) + { + return null; + } + + @Override + public Sequence mergeSequences(Sequence seqOfSequences) + { + return new OrderedMergeSequence>(getOrdering(), seqOfSequences); + } + + @Override + public ServiceMetricEvent.Builder makeMetricBuilder(Query query) + { + return null; + } + + @Override + public Function makePreComputeManipulatorFn( + Query query, MetricManipulationFn fn + ) + { + return null; + } + + @Override + public TypeReference getResultTypeReference() + { + return null; + } + + public Ordering> getOrdering() + { + return Ordering.natural(); + } + }, + new RetryQueryRunnerConfig() + { + private int numTries = 1; + private boolean returnPartialResults = true; + + public int numTries() { return numTries; } + public boolean returnPartialResults() { return returnPartialResults; } + } + ); + + Iterable> actualResults = Sequences.toList( + runner.run(query, context), + Lists.>newArrayList() + ); + + Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0); + } +} \ No newline at end of file diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index ebc33c9670f..353e7014e06 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -22,9 +22,11 @@ package io.druid.server; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; +import com.google.api.client.util.Lists; import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.MapMaker; import com.google.common.io.ByteStreams; import com.google.inject.Inject; import com.metamx.common.guava.Sequence; @@ -57,9 +59,8 @@ import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import java.io.IOException; import java.io.OutputStream; -import java.util.HashMap; -import java.util.LinkedList; import java.util.List; +import java.util.Map; import java.util.UUID; /** @@ -142,8 +143,8 @@ public class QueryResource log.debug("Got query [%s]", query); } - HashMap context = new HashMap(); - context.put("missingSegments", new LinkedList()); + Map context = new MapMaker().makeMap(); + context.put("missingSegments", Lists.newArrayList()); Sequence results = query.run(texasRanger, context); if (results == null) { From 8e85097999f5b35a480d99d288490f6fb6578f2e Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 14:38:11 -0700 Subject: [PATCH 09/46] fix header so it passes the entire context --- .../io/druid/client/DirectDruidClient.java | 29 ++++++++++++------- .../java/io/druid/server/QueryResource.java | 6 ++-- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 541ae1df62e..e69cea4e417 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -54,6 +54,7 @@ import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.QueryWatcher; import io.druid.query.Result; +import io.druid.query.RetryQueryRunner; import io.druid.query.SegmentDescriptor; import io.druid.query.aggregation.MetricManipulatorFns; import org.jboss.netty.handler.codec.http.HttpChunk; @@ -64,8 +65,8 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.net.URL; +import java.util.ArrayList; import java.util.Iterator; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.CancellationException; @@ -160,18 +161,24 @@ public class DirectDruidClient implements QueryRunner startTime = System.currentTimeMillis(); byteCount += response.getContent().readableBytes(); - if (!response.getHeader("Missing-Segments").equals("")) { - LinkedList missingSegments = new LinkedList(); - try { - missingSegments = objectMapper.readValue(response.getHeader("Missing-Segments"), LinkedList.class); - for (int i = missingSegments.size(); i > 0; i--) { - missingSegments.add(objectMapper.convertValue(missingSegments.remove(0), SegmentDescriptor.class)); - } + + List missingSegments = new ArrayList(); + try { + Map headerContext = objectMapper.readValue(response.getHeader("Context"), Map.class); + missingSegments = (List)headerContext.get(RetryQueryRunner.missingSegments); + for (int i = missingSegments.size(); i > 0; i--) { + missingSegments.add( + objectMapper.convertValue( + missingSegments.remove(0), + SegmentDescriptor.class + ) + ); } - catch (IOException e) { - } - ((List) context.get("missingSegments")).addAll(missingSegments); } + catch (IOException e) { + e.printStackTrace(); + } + ((List) context.get("missingSegments")).addAll(missingSegments); return super.handleResponse(response); } diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index 353e7014e06..ce6e255cd0d 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -166,9 +166,9 @@ public class QueryResource ) ) { - String missingSegments = ""; + String headerContext = ""; if (!((List)context.get("missingSegments")).isEmpty()) { - missingSegments = jsonMapper.writeValueAsString(context.get("missingSegments")); + headerContext = jsonMapper.writeValueAsString(context); } long requestTime = System.currentTimeMillis() - start; @@ -213,7 +213,7 @@ public class QueryResource isSmile ? APPLICATION_JSON : APPLICATION_SMILE ) .header("X-Druid-Query-Id", queryId) - .header("Missing-Segments", missingSegments) + .header("Context", headerContext) .build(); } } From c4048d06dd84d4f1bb4075a55fe02cb9f98e56b9 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 14:42:04 -0700 Subject: [PATCH 10/46] delete an unused flag in QueryableIndexSegment --- .../src/main/java/io/druid/segment/QueryableIndexSegment.java | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java b/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java index e5075fc80ab..c2a7ee18aa6 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexSegment.java @@ -29,7 +29,6 @@ public class QueryableIndexSegment implements Segment { private final QueryableIndex index; private final String identifier; - private boolean nullStorage = false; public QueryableIndexSegment(final String segmentIdentifier, QueryableIndex index) { From ff980091c5af58cd317ea94c3c0277fc6a3e98c3 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 14:43:58 -0700 Subject: [PATCH 11/46] delete an unused flag in IncrementalIndexSegment --- .../src/main/java/io/druid/segment/IncrementalIndexSegment.java | 1 - 1 file changed, 1 deletion(-) diff --git a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java index ab5d7974dcb..f21f7f1fa09 100644 --- a/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java +++ b/processing/src/main/java/io/druid/segment/IncrementalIndexSegment.java @@ -31,7 +31,6 @@ public class IncrementalIndexSegment implements Segment { private final IncrementalIndex index; private final String segmentIdentifier; - private boolean nullStorage = false; public IncrementalIndexSegment( IncrementalIndex index, From bdb35e2d7e64b957450e06db73e02856e0d09644 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 17:50:50 -0700 Subject: [PATCH 12/46] fix retry logic and change the default value of retry to 0 --- .../java/io/druid/query/RetryQueryRunner.java | 68 +++++++++++-------- .../druid/query/RetryQueryRunnerConfig.java | 2 +- .../druid/client/CachingClusteredClient.java | 1 + 3 files changed, 41 insertions(+), 30 deletions(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 0c60d630433..a6f267c371a 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -20,11 +20,11 @@ package io.druid.query; import com.metamx.common.guava.Sequence; -import com.metamx.common.guava.Sequences; -import io.druid.query.spec.MultipleSpecificSegmentSpec; +import com.metamx.common.guava.Yielder; +import com.metamx.common.guava.YieldingAccumulator; +import com.metamx.common.guava.YieldingSequenceBase; import io.druid.segment.SegmentMissingException; -import java.util.Arrays; import java.util.List; import java.util.Map; @@ -35,7 +35,11 @@ public class RetryQueryRunner implements QueryRunner private final QueryToolChest> toolChest; private final RetryQueryRunnerConfig config; - public RetryQueryRunner(QueryRunner baseRunner, QueryToolChest> toolChest, RetryQueryRunnerConfig config) + public RetryQueryRunner( + QueryRunner baseRunner, + QueryToolChest> toolChest, + RetryQueryRunnerConfig config + ) { this.baseRunner = baseRunner; this.toolChest = toolChest; @@ -43,36 +47,42 @@ public class RetryQueryRunner implements QueryRunner } @Override - public Sequence run(final Query query, Map context) + public Sequence run(final Query query, final Map context) { - Sequence returningSeq = baseRunner.run(query, context); + final Sequence returningSeq = baseRunner.run(query, context); + return new YieldingSequenceBase() + { + @Override + public Yielder toYielder( + OutType initValue, YieldingAccumulator accumulator + ) + { + Yielder yielder = returningSeq.toYielder(initValue, accumulator); - for (int i = config.numTries(); i > 0 && !((List)context.get(missingSegments)).isEmpty(); i--) { - List segList= (List)context.get(missingSegments); - ((List)context.get(missingSegments)).clear(); - returningSeq = toolChest.mergeSequences( - Sequences.simple( - Arrays.asList( - returningSeq, - baseRunner.run( - query.withQuerySegmentSpec(new MultipleSpecificSegmentSpec(segList)), - context - ) - ) - ) - ); - } + if (((List) context.get(missingSegments)).isEmpty()) { + return yielder; + } - if (!config.returnPartialResults() && !((List)context.get(missingSegments)).isEmpty()) { - String failedSegments = ""; - for (SegmentDescriptor segment : (List) context.get("missingSegments")) { - failedSegments = failedSegments + segment.toString() + " "; + for (int i = config.numTries(); i > 0 && !((List) context.get(missingSegments)).isEmpty(); i--) { + ((List) context.get(missingSegments)).clear(); + yielder = baseRunner.run(query, context).toYielder(initValue, accumulator); + if (((List) context.get(missingSegments)).isEmpty()) { + break; + } + } + + if (!config.returnPartialResults() && !((List) context.get(missingSegments)).isEmpty()) { + String failedSegments = ""; + for (SegmentDescriptor segment : (List) context.get("missingSegments")) { + failedSegments = failedSegments + segment.toString() + " "; + } + throw new SegmentMissingException("The following segments are missing: " + failedSegments); + } + + return yielder; } - throw new SegmentMissingException("The following segments are missing: " + failedSegments); - } - - return returningSeq; + }; } } diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java index 5759b2794bb..cbbb4a4d49d 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class RetryQueryRunnerConfig { @JsonProperty - private int numTries = 1; + private int numTries = 0; private boolean returnPartialResults = false; public int numTries() { return numTries; } diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 4983c7273dc..3d893469cd3 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -253,6 +253,7 @@ public class CachingClusteredClient implements QueryRunner addSequencesFromServer(listOfSequences); addSequencesFromCache(listOfSequences); + Collections.sort( listOfSequences, Ordering.natural().onResultOf(Pair.>lhsFn()) From 4e2b5b743f7f449abd89053b534af50d7346c9e4 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 18:18:58 -0700 Subject: [PATCH 13/46] add more unit tests --- .../io/druid/query/RetryQueryRunnerTest.java | 171 +++++++++++++++++- .../timeseries/TimeseriesQueryRunnerTest.java | 28 +-- .../druid/client/CachingClusteredClient.java | 1 - 3 files changed, 184 insertions(+), 16 deletions(-) diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 45da8bcad0e..8391d36544a 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -14,10 +14,12 @@ import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesResultValue; +import io.druid.segment.SegmentMissingException; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -97,6 +99,94 @@ public class RetryQueryRunnerTest } }, new RetryQueryRunnerConfig() + { + private int numTries = 0; + private boolean returnPartialResults = true; + + public int numTries() { return numTries; } + public boolean returnPartialResults() { return returnPartialResults; } + } + ); + + Iterable> actualResults = Sequences.toList( + runner.run(query, context), + Lists.>newArrayList() + ); + + Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 1); + Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0); + } + + @Test + public void testRetry() throws Exception + { + Map context = new MapMaker().makeMap(); + context.put("count", 0); + context.put("missingSegments", Lists.newArrayList()); + RetryQueryRunner runner = new RetryQueryRunner( + new QueryRunner() + { + @Override + public Sequence run(Query query, Map context) + { + if (context.get("count") == 0) { + ((List) context.get(RetryQueryRunner.missingSegments)).add( + new SegmentDescriptor( + new Interval( + 178888, + 1999999 + ), "test", 1 + ) + ); + context.put("count", 1); + return Sequences.empty(); + } else { + ArrayList lst = new ArrayList(); + lst.add("hello world"); + return Sequences.simple(lst); + } + } + }, + new QueryToolChest() + { + @Override + public QueryRunner mergeResults(QueryRunner runner) + { + return null; + } + + @Override + public Sequence mergeSequences(Sequence seqOfSequences) + { + return new OrderedMergeSequence>(getOrdering(), seqOfSequences); + } + + @Override + public ServiceMetricEvent.Builder makeMetricBuilder(Query query) + { + return null; + } + + @Override + public Function makePreComputeManipulatorFn( + Query query, MetricManipulationFn fn + ) + { + return null; + } + + @Override + public TypeReference getResultTypeReference() + { + return null; + } + + public Ordering> getOrdering() + { + return Ordering.natural(); + } + }, + new RetryQueryRunnerConfig() { private int numTries = 1; private boolean returnPartialResults = true; @@ -111,6 +201,85 @@ public class RetryQueryRunnerTest Lists.>newArrayList() ); - Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0); + actualResults = Sequences.toList( + runner.run(query, context), + Lists.>newArrayList() + ); + + Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1); + Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 0); } + + @Test(expected= SegmentMissingException.class) + public void testException() throws Exception + { + Map context = new MapMaker().makeMap(); + context.put("missingSegments", Lists.newArrayList()); + RetryQueryRunner runner = new RetryQueryRunner( + new QueryRunner() + { + @Override + public Sequence run(Query query, Map context) + { + ((List)context.get(RetryQueryRunner.missingSegments)).add(new SegmentDescriptor(new Interval(178888, 1999999), "test", 1)); + return Sequences.empty(); + } + }, + new QueryToolChest() + { + @Override + public QueryRunner mergeResults(QueryRunner runner) + { + return null; + } + + @Override + public Sequence mergeSequences(Sequence seqOfSequences) + { + return new OrderedMergeSequence>(getOrdering(), seqOfSequences); + } + + @Override + public ServiceMetricEvent.Builder makeMetricBuilder(Query query) + { + return null; + } + + @Override + public Function makePreComputeManipulatorFn( + Query query, MetricManipulationFn fn + ) + { + return null; + } + + @Override + public TypeReference getResultTypeReference() + { + return null; + } + + public Ordering> getOrdering() + { + return Ordering.natural(); + } + }, + new RetryQueryRunnerConfig() + { + private int numTries = 1; + private boolean returnPartialResults = false; + + public int numTries() { return numTries; } + public boolean returnPartialResults() { return returnPartialResults; } + } + ); + + Iterable> actualResults = Sequences.toList( + runner.run(query, context), + Lists.>newArrayList() + ); + + Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 1); + } + } \ No newline at end of file diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 1d1fcb21b00..acd60aa051e 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -252,20 +252,20 @@ public class TimeseriesQueryRunnerTest public void testTimeseries() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) - .aggregators( - Arrays.asList( - QueryRunnerTestHelper.rowsCount, - new LongSumAggregatorFactory( - "idx", - "index" - ), - QueryRunnerTestHelper.qualityUniques - ) - ) - .build(); + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.dayGran) + .intervals(QueryRunnerTestHelper.firstToThird) + .aggregators( + Arrays.asList( + QueryRunnerTestHelper.rowsCount, + new LongSumAggregatorFactory( + "idx", + "index" + ), + QueryRunnerTestHelper.qualityUniques + ) + ) + .build(); List> expectedResults = Arrays.asList( new Result( diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 3d893469cd3..4983c7273dc 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -253,7 +253,6 @@ public class CachingClusteredClient implements QueryRunner addSequencesFromServer(listOfSequences); addSequencesFromCache(listOfSequences); - Collections.sort( listOfSequences, Ordering.natural().onResultOf(Pair.>lhsFn()) From 0244172ccbd7a29ad1584c252f0a35c230f6cae8 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 18:22:36 -0700 Subject: [PATCH 14/46] change missigSegments to a static variable --- .../java/io/druid/query/RetryQueryRunner.java | 4 +-- .../spec/SpecificSegmentQueryRunner.java | 2 +- .../io/druid/query/RetryQueryRunnerTest.java | 6 ++-- .../timeseries/TimeseriesQueryRunnerTest.java | 28 +++++++++---------- .../io/druid/client/DirectDruidClient.java | 2 +- .../java/io/druid/server/QueryResource.java | 5 ++-- 6 files changed, 24 insertions(+), 23 deletions(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index a6f267c371a..62d7b4e8da4 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -30,7 +30,7 @@ import java.util.Map; public class RetryQueryRunner implements QueryRunner { - public static String missingSegments = "missingSegments"; + public static String missingSegments = RetryQueryRunner.missingSegments; private final QueryRunner baseRunner; private final QueryToolChest> toolChest; private final RetryQueryRunnerConfig config; @@ -74,7 +74,7 @@ public class RetryQueryRunner implements QueryRunner if (!config.returnPartialResults() && !((List) context.get(missingSegments)).isEmpty()) { String failedSegments = ""; - for (SegmentDescriptor segment : (List) context.get("missingSegments")) { + for (SegmentDescriptor segment : (List) context.get(RetryQueryRunner.missingSegments)) { failedSegments = failedSegments + segment.toString() + " "; } throw new SegmentMissingException("The following segments are missing: " + failedSegments); diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index 9a457b96e04..add0b2c754e 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -68,7 +68,7 @@ public class SpecificSegmentQueryRunner implements QueryRunner try { returningSeq = base.run(query, context); } catch (SegmentMissingException e) { - ((List)context.get("missingSegments")).add(((SpecificSegmentSpec) specificSpec).getDescriptor()); + ((List)context.get(RetryQueryRunner.missingSegments)).add(((SpecificSegmentSpec) specificSpec).getDescriptor()); returningSeq = Sequences.empty(); } return returningSeq; diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 8391d36544a..595f3133ec6 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -48,7 +48,7 @@ public class RetryQueryRunnerTest public void testRunWithMissingSegments() throws Exception { Map context = new MapMaker().makeMap(); - context.put("missingSegments", Lists.newArrayList()); + context.put(RetryQueryRunner.missingSegments, Lists.newArrayList()); RetryQueryRunner runner = new RetryQueryRunner( new QueryRunner() { @@ -122,7 +122,7 @@ public class RetryQueryRunnerTest { Map context = new MapMaker().makeMap(); context.put("count", 0); - context.put("missingSegments", Lists.newArrayList()); + context.put(RetryQueryRunner.missingSegments, Lists.newArrayList()); RetryQueryRunner runner = new RetryQueryRunner( new QueryRunner() { @@ -214,7 +214,7 @@ public class RetryQueryRunnerTest public void testException() throws Exception { Map context = new MapMaker().makeMap(); - context.put("missingSegments", Lists.newArrayList()); + context.put(RetryQueryRunner.missingSegments, Lists.newArrayList()); RetryQueryRunner runner = new RetryQueryRunner( new QueryRunner() { diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index acd60aa051e..1d1fcb21b00 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -252,20 +252,20 @@ public class TimeseriesQueryRunnerTest public void testTimeseries() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) - .aggregators( - Arrays.asList( - QueryRunnerTestHelper.rowsCount, - new LongSumAggregatorFactory( - "idx", - "index" - ), - QueryRunnerTestHelper.qualityUniques - ) - ) - .build(); + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.dayGran) + .intervals(QueryRunnerTestHelper.firstToThird) + .aggregators( + Arrays.asList( + QueryRunnerTestHelper.rowsCount, + new LongSumAggregatorFactory( + "idx", + "index" + ), + QueryRunnerTestHelper.qualityUniques + ) + ) + .build(); List> expectedResults = Arrays.asList( new Result( diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index e69cea4e417..204da241ff9 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -178,7 +178,7 @@ public class DirectDruidClient implements QueryRunner catch (IOException e) { e.printStackTrace(); } - ((List) context.get("missingSegments")).addAll(missingSegments); + ((List) context.get(RetryQueryRunner.missingSegments)).addAll(missingSegments); return super.handleResponse(response); } diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index ce6e255cd0d..dffb66de79d 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -42,6 +42,7 @@ import io.druid.query.DataSourceUtil; import io.druid.query.Query; import io.druid.query.QueryInterruptedException; import io.druid.query.QuerySegmentWalker; +import io.druid.query.RetryQueryRunner; import io.druid.server.log.RequestLogger; import org.joda.time.DateTime; @@ -144,7 +145,7 @@ public class QueryResource } Map context = new MapMaker().makeMap(); - context.put("missingSegments", Lists.newArrayList()); + context.put(RetryQueryRunner.missingSegments, Lists.newArrayList()); Sequence results = query.run(texasRanger, context); if (results == null) { @@ -167,7 +168,7 @@ public class QueryResource ) { String headerContext = ""; - if (!((List)context.get("missingSegments")).isEmpty()) { + if (!((List)context.get(RetryQueryRunner.missingSegments)).isEmpty()) { headerContext = jsonMapper.writeValueAsString(context); } From 1cb3fc250ef4a7340e09bd9d8231ded4d42986b1 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Thu, 19 Jun 2014 18:29:00 -0700 Subject: [PATCH 15/46] change missigSegments to a static variable --- processing/src/main/java/io/druid/query/RetryQueryRunner.java | 2 +- .../java/io/druid/query/spec/SpecificSegmentQueryRunner.java | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 62d7b4e8da4..cc4aa8af402 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -30,7 +30,7 @@ import java.util.Map; public class RetryQueryRunner implements QueryRunner { - public static String missingSegments = RetryQueryRunner.missingSegments; + public static String missingSegments = "missingSegments"; private final QueryRunner baseRunner; private final QueryToolChest> toolChest; private final RetryQueryRunnerConfig config; diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index add0b2c754e..77beb6cb745 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -27,6 +27,7 @@ import com.metamx.common.guava.Yielder; import com.metamx.common.guava.YieldingAccumulator; import io.druid.query.Query; import io.druid.query.QueryRunner; +import io.druid.query.RetryQueryRunner; import io.druid.segment.SegmentMissingException; import java.io.IOException; From 341829133382f449efda8a0a23a14614e9c01a36 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Fri, 20 Jun 2014 13:15:46 -0700 Subject: [PATCH 16/46] add a test for retrying multiple times --- .../druid/query/RetryQueryRunnerConfig.java | 1 + .../io/druid/query/RetryQueryRunnerTest.java | 96 +++++++++++++++++++ 2 files changed, 97 insertions(+) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java index cbbb4a4d49d..2b8bb730b68 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -25,6 +25,7 @@ public class RetryQueryRunnerConfig { @JsonProperty private int numTries = 0; + @JsonProperty private boolean returnPartialResults = false; public int numTries() { return numTries; } diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 595f3133ec6..bf10a38863a 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -117,6 +117,8 @@ public class RetryQueryRunnerTest Assert.assertTrue("Should return an empty sequence as a result", ((List) actualResults).size() == 0); } + + @Test public void testRetry() throws Exception { @@ -210,6 +212,100 @@ public class RetryQueryRunnerTest Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 0); } + @Test + public void testRetryMultiple() throws Exception + { + Map context = new MapMaker().makeMap(); + context.put("count", 0); + context.put(RetryQueryRunner.missingSegments, Lists.newArrayList()); + RetryQueryRunner runner = new RetryQueryRunner( + new QueryRunner() + { + @Override + public Sequence run(Query query, Map context) + { + if ((int)context.get("count") < 3) { + ((List) context.get(RetryQueryRunner.missingSegments)).add( + new SegmentDescriptor( + new Interval( + 178888, + 1999999 + ), "test", 1 + ) + ); + context.put("count", (int)context.get("count") + 1); + return Sequences.empty(); + } else { + ArrayList lst = new ArrayList(); + lst.add("hello world"); + return Sequences.simple(lst); + } + } + }, + new QueryToolChest() + { + @Override + public QueryRunner mergeResults(QueryRunner runner) + { + return null; + } + + @Override + public Sequence mergeSequences(Sequence seqOfSequences) + { + return new OrderedMergeSequence>(getOrdering(), seqOfSequences); + } + + @Override + public ServiceMetricEvent.Builder makeMetricBuilder(Query query) + { + return null; + } + + @Override + public Function makePreComputeManipulatorFn( + Query query, MetricManipulationFn fn + ) + { + return null; + } + + @Override + public TypeReference getResultTypeReference() + { + return null; + } + + public Ordering> getOrdering() + { + return Ordering.natural(); + } + }, + new RetryQueryRunnerConfig() + { + private int numTries = 4; + private boolean returnPartialResults = true; + + public int numTries() { return numTries; } + public boolean returnPartialResults() { return returnPartialResults; } + } + ); + + Iterable> actualResults = Sequences.toList( + runner.run(query, context), + Lists.>newArrayList() + ); + + actualResults = Sequences.toList( + runner.run(query, context), + Lists.>newArrayList() + ); + + Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1); + Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 0); + } + + @Test(expected= SegmentMissingException.class) public void testException() throws Exception { From 5e7dfaf298a634b996080204194406d72c61d6b8 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Fri, 20 Jun 2014 13:45:04 -0700 Subject: [PATCH 17/46] change retry tests --- .../java/io/druid/query/RetryQueryRunnerConfig.java | 2 +- .../test/java/io/druid/query/RetryQueryRunnerTest.java | 10 ---------- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java index 2b8bb730b68..1846c15339f 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class RetryQueryRunnerConfig { @JsonProperty - private int numTries = 0; + private int numTries = 4; @JsonProperty private boolean returnPartialResults = false; diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index bf10a38863a..16c8a51504a 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -203,11 +203,6 @@ public class RetryQueryRunnerTest Lists.>newArrayList() ); - actualResults = Sequences.toList( - runner.run(query, context), - Lists.>newArrayList() - ); - Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1); Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 0); } @@ -296,11 +291,6 @@ public class RetryQueryRunnerTest Lists.>newArrayList() ); - actualResults = Sequences.toList( - runner.run(query, context), - Lists.>newArrayList() - ); - Assert.assertTrue("Should return a list with one element", ((List) actualResults).size() == 1); Assert.assertTrue("Should have nothing in missingSegment list", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 0); } From cc8a4d08471beabd970601f8685878a6c8400e34 Mon Sep 17 00:00:00 2001 From: jisookim0513 Date: Fri, 20 Jun 2014 13:49:33 -0700 Subject: [PATCH 18/46] revert changes to RetryQueryRunnerConfig --- .../src/main/java/io/druid/query/RetryQueryRunnerConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java index 1846c15339f..2b8bb730b68 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunnerConfig.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; public class RetryQueryRunnerConfig { @JsonProperty - private int numTries = 4; + private int numTries = 0; @JsonProperty private boolean returnPartialResults = false; From c40a315c819ac472605837c6d26d8158bc01561b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Thu, 8 May 2014 18:06:26 -0700 Subject: [PATCH 19/46] initial support for LZ4 compression --- pom.xml | 2 +- processing/pom.xml | 4 + .../segment/FloatMetricColumnSerializer.java | 4 +- .../java/io/druid/segment/IndexMerger.java | 4 +- .../CompressedFloatBufferObjectStrategy.java | 20 +- .../data/CompressedFloatsIndexedSupplier.java | 61 ++++- .../CompressedFloatsSupplierSerializer.java | 17 +- .../CompressedLongBufferObjectStrategy.java | 19 +- .../data/CompressedLongsIndexedSupplier.java | 47 +++- .../CompressedLongsSupplierSerializer.java | 16 +- .../data/CompressedObjectStrategy.java | 254 +++++++++++++++--- .../FixedSizeCompressedObjectStrategy.java | 50 ++++ .../data/InMemoryCompressedFloats.java | 10 +- .../segment/data/InMemoryCompressedLongs.java | 10 +- .../CompressedFloatsIndexedSupplierTest.java | 5 +- ...ompressedFloatsSupplierSerializerTest.java | 12 +- .../CompressedLongsIndexedSupplierTest.java | 5 +- ...CompressedLongsSupplierSerializerTest.java | 8 +- 18 files changed, 447 insertions(+), 101 deletions(-) create mode 100644 processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java diff --git a/pom.xml b/pom.xml index 2b7f88276e8..642a0cf6887 100644 --- a/pom.xml +++ b/pom.xml @@ -379,7 +379,7 @@ net.jpountz.lz4 lz4 - 1.1.2 + 1.2.0 com.google.protobuf diff --git a/processing/pom.xml b/processing/pom.xml index 755d2f553f7..31ac9652129 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -82,6 +82,10 @@ com.davekoelle alphanum + + net.jpountz.lz4 + lz4 + diff --git a/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java b/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java index 4caa6b95fe2..520708fe015 100644 --- a/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java @@ -21,6 +21,7 @@ package io.druid.segment; import com.google.common.io.Files; import io.druid.segment.data.CompressedFloatsSupplierSerializer; +import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.IOPeon; import java.io.File; @@ -51,7 +52,8 @@ public class FloatMetricColumnSerializer implements MetricColumnSerializer public void open() throws IOException { writer = CompressedFloatsSupplierSerializer.create( - ioPeon, String.format("%s_little", metricName), IndexIO.BYTE_ORDER + ioPeon, String.format("%s_little", metricName), IndexIO.BYTE_ORDER, + CompressedObjectStrategy.CompressionStrategy.LZ4 // TODO define this somewhere else ); writer.open(); diff --git a/processing/src/main/java/io/druid/segment/IndexMerger.java b/processing/src/main/java/io/druid/segment/IndexMerger.java index 99f854da9af..87cfa25a2ed 100644 --- a/processing/src/main/java/io/druid/segment/IndexMerger.java +++ b/processing/src/main/java/io/druid/segment/IndexMerger.java @@ -54,6 +54,7 @@ import io.druid.query.aggregation.ToLowerCaseAggregatorFactory; import io.druid.segment.column.ColumnConfig; import io.druid.segment.data.ByteBufferWriter; import io.druid.segment.data.CompressedLongsSupplierSerializer; +import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.ConciseCompressedIndexedInts; import io.druid.segment.data.GenericIndexed; import io.druid.segment.data.GenericIndexedWriter; @@ -594,7 +595,8 @@ public class IndexMerger Iterable theRows = rowMergerFn.apply(boats); CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create( - ioPeon, "little_end_time", IndexIO.BYTE_ORDER + ioPeon, "little_end_time", IndexIO.BYTE_ORDER, + CompressedObjectStrategy.CompressionStrategy.LZ4 // TODO define this somewhere else ); timeWriter.open(); diff --git a/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java index 7a9906cd364..e6294462afe 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java @@ -28,14 +28,16 @@ import java.nio.FloatBuffer; /** */ -public class CompressedFloatBufferObjectStrategy extends CompressedObjectStrategy +public class CompressedFloatBufferObjectStrategy extends FixedSizeCompressedObjectStrategy { - public static CompressedFloatBufferObjectStrategy getBufferForOrder(ByteOrder order) + public static CompressedFloatBufferObjectStrategy getBufferForOrder(final ByteOrder order, final CompressionStrategy compression, final int sizePer) { - return new CompressedFloatBufferObjectStrategy(order); + return new CompressedFloatBufferObjectStrategy(order, compression, sizePer); } - private CompressedFloatBufferObjectStrategy(final ByteOrder order) + private final int sizePer; + + private CompressedFloatBufferObjectStrategy(final ByteOrder order, final CompressionStrategy compression, final int sizePer) { super( order, @@ -64,7 +66,15 @@ public class CompressedFloatBufferObjectStrategy extends CompressedObjectStrateg { return into.asFloatBuffer().put(from); } - } + }, + compression ); + this.sizePer = sizePer; + } + + @Override + public int getSize() + { + return sizePer; } } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java index 1beccc5426f..35a3b03f0b2 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedFloatsIndexedSupplier.java @@ -40,22 +40,26 @@ import java.util.Iterator; */ public class CompressedFloatsIndexedSupplier implements Supplier { - public static final byte version = 0x1; + public static final byte LZF_VERSION = 0x1; + public static final byte version = 0x2; public static final int MAX_FLOATS_IN_BUFFER = (0xFFFF >> 2); private final int totalSize; private final int sizePer; private final GenericIndexed> baseFloatBuffers; + private final CompressedObjectStrategy.CompressionStrategy compression; CompressedFloatsIndexedSupplier( int totalSize, int sizePer, - GenericIndexed> baseFloatBuffers + GenericIndexed> baseFloatBuffers, + CompressedObjectStrategy.CompressionStrategy compression ) { this.totalSize = totalSize; this.sizePer = sizePer; this.baseFloatBuffers = baseFloatBuffers; + this.compression = compression; } public int size() @@ -151,7 +155,7 @@ public class CompressedFloatsIndexedSupplier implements Supplier public long getSerializedSize() { - return baseFloatBuffers.getSerializedSize() + 1 + 4 + 4; + return baseFloatBuffers.getSerializedSize() + 1 + 4 + 4 + 1; } public void writeToChannel(WritableByteChannel channel) throws IOException @@ -159,6 +163,7 @@ public class CompressedFloatsIndexedSupplier implements Supplier channel.write(ByteBuffer.wrap(new byte[]{version})); channel.write(ByteBuffer.wrap(Ints.toByteArray(totalSize))); channel.write(ByteBuffer.wrap(Ints.toByteArray(sizePer))); + channel.write(ByteBuffer.wrap(new byte[]{compression.getId()})); baseFloatBuffers.writeToChannel(channel); } @@ -167,7 +172,8 @@ public class CompressedFloatsIndexedSupplier implements Supplier return new CompressedFloatsIndexedSupplier( totalSize, sizePer, - GenericIndexed.fromIterable(baseFloatBuffers, CompressedFloatBufferObjectStrategy.getBufferForOrder(order)) + GenericIndexed.fromIterable(baseFloatBuffers, CompressedFloatBufferObjectStrategy.getBufferForOrder(order, compression, sizePer)), + compression ); } @@ -191,23 +197,53 @@ public class CompressedFloatsIndexedSupplier implements Supplier byte versionFromBuffer = buffer.get(); if (versionFromBuffer == version) { + final int totalSize = buffer.getInt(); + final int sizePer = buffer.getInt(); + final CompressedObjectStrategy.CompressionStrategy compression = + CompressedObjectStrategy.CompressionStrategy.forId(buffer.get()); + return new CompressedFloatsIndexedSupplier( - buffer.getInt(), - buffer.getInt(), - GenericIndexed.read(buffer, CompressedFloatBufferObjectStrategy.getBufferForOrder(order)) + totalSize, + sizePer, + GenericIndexed.read( + buffer, + CompressedFloatBufferObjectStrategy.getBufferForOrder( + order, + compression, + sizePer + ) + ), + compression + ); + } else if (versionFromBuffer == LZF_VERSION) { + final int totalSize = buffer.getInt(); + final int sizePer = buffer.getInt(); + final CompressedObjectStrategy.CompressionStrategy compression = CompressedObjectStrategy.CompressionStrategy.LZF; + return new CompressedFloatsIndexedSupplier( + totalSize, + sizePer, + GenericIndexed.read( + buffer, + CompressedFloatBufferObjectStrategy.getBufferForOrder( + order, + compression, + sizePer + ) + ), + compression ); } throw new IAE("Unknown version[%s]", versionFromBuffer); } - public static CompressedFloatsIndexedSupplier fromFloatBuffer(FloatBuffer buffer, final ByteOrder order) + public static CompressedFloatsIndexedSupplier fromFloatBuffer(FloatBuffer buffer, final ByteOrder order, CompressedObjectStrategy.CompressionStrategy compression) { - return fromFloatBuffer(buffer, MAX_FLOATS_IN_BUFFER, order); + return fromFloatBuffer(buffer, MAX_FLOATS_IN_BUFFER, order, compression); } public static CompressedFloatsIndexedSupplier fromFloatBuffer( - final FloatBuffer buffer, final int chunkFactor, final ByteOrder order + final FloatBuffer buffer, final int chunkFactor, final ByteOrder order, final CompressedObjectStrategy.CompressionStrategy compression ) { Preconditions.checkArgument( @@ -254,8 +290,9 @@ public class CompressedFloatsIndexedSupplier implements Supplier }; } }, - CompressedFloatBufferObjectStrategy.getBufferForOrder(order) - ) + CompressedFloatBufferObjectStrategy.getBufferForOrder(order, compression, chunkFactor) + ), + compression ); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedFloatsSupplierSerializer.java b/processing/src/main/java/io/druid/segment/data/CompressedFloatsSupplierSerializer.java index da967898dba..89fa28d2c84 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedFloatsSupplierSerializer.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedFloatsSupplierSerializer.java @@ -36,27 +36,29 @@ import java.nio.FloatBuffer; public class CompressedFloatsSupplierSerializer { public static CompressedFloatsSupplierSerializer create( - IOPeon ioPeon, final String filenameBase, final ByteOrder order + IOPeon ioPeon, final String filenameBase, final ByteOrder order, final CompressedObjectStrategy.CompressionStrategy compression ) throws IOException { - return create(ioPeon, filenameBase, CompressedFloatsIndexedSupplier.MAX_FLOATS_IN_BUFFER, order); + return create(ioPeon, filenameBase, CompressedFloatsIndexedSupplier.MAX_FLOATS_IN_BUFFER, order, compression); } public static CompressedFloatsSupplierSerializer create( - IOPeon ioPeon, final String filenameBase, final int sizePer, final ByteOrder order + IOPeon ioPeon, final String filenameBase, final int sizePer, final ByteOrder order, final CompressedObjectStrategy.CompressionStrategy compression ) throws IOException { final CompressedFloatsSupplierSerializer retVal = new CompressedFloatsSupplierSerializer( sizePer, new GenericIndexedWriter>( - ioPeon, filenameBase, CompressedFloatBufferObjectStrategy.getBufferForOrder(order) - ) + ioPeon, filenameBase, CompressedFloatBufferObjectStrategy.getBufferForOrder(order, compression, sizePer) + ), + compression ); return retVal; } private final int sizePer; private final GenericIndexedWriter> flattener; + private final CompressedObjectStrategy.CompressionStrategy compression; private int numInserted = 0; @@ -64,11 +66,13 @@ public class CompressedFloatsSupplierSerializer public CompressedFloatsSupplierSerializer( int sizePer, - GenericIndexedWriter> flattener + GenericIndexedWriter> flattener, + CompressedObjectStrategy.CompressionStrategy compression ) { this.sizePer = sizePer; this.flattener = flattener; + this.compression = compression; endBuffer = FloatBuffer.allocate(sizePer); endBuffer.mark(); @@ -110,6 +114,7 @@ public class CompressedFloatsSupplierSerializer out.write(CompressedFloatsIndexedSupplier.version); out.write(Ints.toByteArray(numInserted)); out.write(Ints.toByteArray(sizePer)); + out.write(new byte[]{compression.getId()}); ByteStreams.copy(flattener.combineStreams(), out); } } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java index 823c86a0516..b0b511278f1 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java @@ -28,14 +28,16 @@ import java.nio.LongBuffer; /** */ -public class CompressedLongBufferObjectStrategy extends CompressedObjectStrategy +public class CompressedLongBufferObjectStrategy extends FixedSizeCompressedObjectStrategy { - public static CompressedLongBufferObjectStrategy getBufferForOrder(ByteOrder order) + public static CompressedLongBufferObjectStrategy getBufferForOrder(final ByteOrder order, final CompressionStrategy compression, final int sizePer) { - return new CompressedLongBufferObjectStrategy(order); + return new CompressedLongBufferObjectStrategy(order, compression, sizePer); } - private CompressedLongBufferObjectStrategy(final ByteOrder order) + private final int sizePer; + + private CompressedLongBufferObjectStrategy(final ByteOrder order, final CompressionStrategy compression, final int sizePer) { super( order, @@ -64,8 +66,15 @@ public class CompressedLongBufferObjectStrategy extends CompressedObjectStrategy { return into.asLongBuffer().put(from); } - } + }, + compression ); + this.sizePer = sizePer; } + @Override + public int getSize() + { + return sizePer; + } } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java index 8ad267168a5..56998d09886 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedLongsIndexedSupplier.java @@ -40,21 +40,25 @@ import java.util.Iterator; */ public class CompressedLongsIndexedSupplier implements Supplier { - public static final byte version = 0x1; + public static final byte LZF_VERSION = 0x1; + public static final byte version = 0x2; private final int totalSize; private final int sizePer; private final GenericIndexed> baseLongBuffers; + private final CompressedObjectStrategy.CompressionStrategy compression; CompressedLongsIndexedSupplier( int totalSize, int sizePer, - GenericIndexed> baseLongBuffers + GenericIndexed> baseLongBuffers, + CompressedObjectStrategy.CompressionStrategy compression ) { this.totalSize = totalSize; this.sizePer = sizePer; this.baseLongBuffers = baseLongBuffers; + this.compression = compression; } public int size() @@ -162,7 +166,7 @@ public class CompressedLongsIndexedSupplier implements Supplier public long getSerializedSize() { - return baseLongBuffers.getSerializedSize() + 1 + 4 + 4; + return baseLongBuffers.getSerializedSize() + 1 + 4 + 4 + 1; } public void writeToChannel(WritableByteChannel channel) throws IOException @@ -170,15 +174,17 @@ public class CompressedLongsIndexedSupplier implements Supplier channel.write(ByteBuffer.wrap(new byte[]{version})); channel.write(ByteBuffer.wrap(Ints.toByteArray(totalSize))); channel.write(ByteBuffer.wrap(Ints.toByteArray(sizePer))); + channel.write(ByteBuffer.wrap(new byte[]{compression.getId()})); baseLongBuffers.writeToChannel(channel); } - public CompressedLongsIndexedSupplier convertByteOrder(ByteOrder order) + public CompressedLongsIndexedSupplier convertByteOrder(ByteOrder order, CompressedObjectStrategy.CompressionStrategy compression) { return new CompressedLongsIndexedSupplier( totalSize, sizePer, - GenericIndexed.fromIterable(baseLongBuffers, CompressedLongBufferObjectStrategy.getBufferForOrder(order)) + GenericIndexed.fromIterable(baseLongBuffers, CompressedLongBufferObjectStrategy.getBufferForOrder(order, compression, sizePer)), + compression ); } @@ -196,23 +202,37 @@ public class CompressedLongsIndexedSupplier implements Supplier byte versionFromBuffer = buffer.get(); if (versionFromBuffer == version) { + final int totalSize = buffer.getInt(); + final int sizePer = buffer.getInt(); + final CompressedObjectStrategy.CompressionStrategy compression = CompressedObjectStrategy.CompressionStrategy.forId(buffer.get()); return new CompressedLongsIndexedSupplier( - buffer.getInt(), - buffer.getInt(), - GenericIndexed.read(buffer, CompressedLongBufferObjectStrategy.getBufferForOrder(order)) + totalSize, + sizePer, + GenericIndexed.read(buffer, CompressedLongBufferObjectStrategy.getBufferForOrder(order, compression, sizePer)), + compression + ); + } else if (versionFromBuffer == LZF_VERSION) { + final int totalSize = buffer.getInt(); + final int sizePer = buffer.getInt(); + final CompressedObjectStrategy.CompressionStrategy compression = CompressedObjectStrategy.CompressionStrategy.LZF; + return new CompressedLongsIndexedSupplier( + totalSize, + sizePer, + GenericIndexed.read(buffer, CompressedLongBufferObjectStrategy.getBufferForOrder(order, compression, sizePer)), + compression ); } throw new IAE("Unknown version[%s]", versionFromBuffer); } - public static CompressedLongsIndexedSupplier fromLongBuffer(LongBuffer buffer, final ByteOrder byteOrder) + public static CompressedLongsIndexedSupplier fromLongBuffer(LongBuffer buffer, final ByteOrder byteOrder, CompressedObjectStrategy.CompressionStrategy compression) { - return fromLongBuffer(buffer, 0xFFFF / Longs.BYTES, byteOrder); + return fromLongBuffer(buffer, 0xFFFF / Longs.BYTES, byteOrder, compression); } public static CompressedLongsIndexedSupplier fromLongBuffer( - final LongBuffer buffer, final int chunkFactor, final ByteOrder byteOrder + final LongBuffer buffer, final int chunkFactor, final ByteOrder byteOrder, CompressedObjectStrategy.CompressionStrategy compression ) { Preconditions.checkArgument( @@ -259,8 +279,9 @@ public class CompressedLongsIndexedSupplier implements Supplier }; } }, - CompressedLongBufferObjectStrategy.getBufferForOrder(byteOrder) - ) + CompressedLongBufferObjectStrategy.getBufferForOrder(byteOrder, compression, chunkFactor) + ), + compression ); } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedLongsSupplierSerializer.java b/processing/src/main/java/io/druid/segment/data/CompressedLongsSupplierSerializer.java index b0e63a8e391..97a7545009a 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedLongsSupplierSerializer.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedLongsSupplierSerializer.java @@ -37,20 +37,23 @@ import java.nio.LongBuffer; public class CompressedLongsSupplierSerializer { public static CompressedLongsSupplierSerializer create( - IOPeon ioPeon, final String filenameBase, final ByteOrder order + IOPeon ioPeon, final String filenameBase, final ByteOrder order, final CompressedObjectStrategy.CompressionStrategy compression ) throws IOException { + final int sizePer = 0xFFFF / Longs.BYTES; final CompressedLongsSupplierSerializer retVal = new CompressedLongsSupplierSerializer( - 0xFFFF / Longs.BYTES, + sizePer, new GenericIndexedWriter>( - ioPeon, filenameBase, CompressedLongBufferObjectStrategy.getBufferForOrder(order) - ) + ioPeon, filenameBase, CompressedLongBufferObjectStrategy.getBufferForOrder(order, compression, sizePer) + ), + compression ); return retVal; } private final int sizePer; private final GenericIndexedWriter> flattener; + private final CompressedObjectStrategy.CompressionStrategy compression; private int numInserted = 0; @@ -58,11 +61,13 @@ public class CompressedLongsSupplierSerializer public CompressedLongsSupplierSerializer( int sizePer, - GenericIndexedWriter> flattener + GenericIndexedWriter> flattener, + CompressedObjectStrategy.CompressionStrategy compression ) { this.sizePer = sizePer; this.flattener = flattener; + this.compression = compression; endBuffer = LongBuffer.allocate(sizePer); endBuffer.mark(); @@ -104,6 +109,7 @@ public class CompressedLongsSupplierSerializer out.write(CompressedLongsIndexedSupplier.version); out.write(Ints.toByteArray(numInserted)); out.write(Ints.toByteArray(sizePer)); + out.write(new byte[]{compression.getId()}); ByteStreams.copy(flattener.combineStreams(), out); } } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java index 4a0b5723b41..e0865e8ec80 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java @@ -21,31 +21,216 @@ package io.druid.segment.data; import com.google.common.base.Throwables; import com.metamx.common.guava.CloseQuietly; +import com.google.common.collect.Maps; import com.ning.compress.lzf.ChunkEncoder; import com.ning.compress.lzf.LZFChunk; import com.ning.compress.lzf.LZFDecoder; import io.druid.collections.ResourceHolder; import io.druid.segment.CompressedPools; +import net.jpountz.lz4.LZ4Factory; +import net.jpountz.lz4.LZ4FastDecompressor; +import net.jpountz.lz4.LZ4SafeDecompressor; +import net.jpountz.lz4.LZ4UnknownSizeDecompressor; import java.io.IOException; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.Map; /** */ public class CompressedObjectStrategy implements ObjectStrategy> { - private final ByteOrder order; - private final BufferConverter converter; + public static enum CompressionStrategy { + LZF ((byte)0x0) + { + @Override + public Decompressor getDecompressor() + { + return new LZFDecompressor(); + } + + @Override + public Compressor getCompressor() + { + return new LZFCompressor(); + } + }, + + LZ4 ((byte)0x1) { + @Override + public Decompressor getDecompressor() + { + return new LZ4Decompressor(); + } + + @Override + public Compressor getCompressor() + { + return new LZ4Compressor(); + } + }; + + final byte id; + + CompressionStrategy(byte id) { + this.id = id; + } + + public byte getId() + { + return id; + } + public abstract Compressor getCompressor(); + public abstract Decompressor getDecompressor(); + + static final Map idMap = Maps.newHashMap(); + static { + for(CompressionStrategy strategy : CompressionStrategy.values()) idMap.put(strategy.getId(), strategy); + } + + public static CompressionStrategy forId(byte id) + { + return idMap.get(id); + } + } + + public static interface Decompressor + { + /** + * Implementations of this method are expected to call out.flip() after writing to the output buffer + * + * @param in + * @param numBytes + * @param out + */ + public void decompress(ByteBuffer in, int numBytes, ByteBuffer out); + public void decompress(ByteBuffer in, int numBytes, ByteBuffer out, int decompressedSize); + } + + public static interface Compressor + { + /** + * Currently assumes buf is an array backed ByteBuffer + * + * @param bytes + * @return + */ + public byte[] compress(byte[] bytes); + } + + public static class LZFDecompressor implements Decompressor + { + @Override + public void decompress(ByteBuffer in, int numBytes, ByteBuffer out) + { + final byte[] bytes = new byte[numBytes]; + in.get(bytes); + + try (final ResourceHolder outputBytesHolder = CompressedPools.getOutputBytes()) { + final byte[] outputBytes = outputBytesHolder.get(); + final int numDecompressedBytes = LZFDecoder.decode(bytes, outputBytes); + out.put(outputBytes, 0, numDecompressedBytes); + out.flip(); + } + catch (IOException e) { + Throwables.propagate(e); + } + } + + @Override + public void decompress(ByteBuffer in, int numBytes, ByteBuffer out, int decompressedSize) + { + decompress(in, numBytes, out); + } + } + + public static class LZFCompressor implements Compressor + { + @Override + public byte[] compress(byte[] bytes) + { + final ResourceHolder encoder = CompressedPools.getChunkEncoder(); + LZFChunk chunk = encoder.get().encodeChunk(bytes, 0, bytes.length); + CloseQuietly.close(encoder); + + return chunk.getData(); + } + } + + public static class LZ4Decompressor implements Decompressor + { + private final LZ4SafeDecompressor lz4 = LZ4Factory.fastestJavaInstance().safeDecompressor(); + private final LZ4FastDecompressor lz4Fast = LZ4Factory.fastestJavaInstance().fastDecompressor(); + + @Override + public void decompress(ByteBuffer in, int numBytes, ByteBuffer out) + { + final byte[] bytes = new byte[numBytes]; + in.get(bytes); + + try (final ResourceHolder outputBytesHolder = CompressedPools.getOutputBytes()) { + final byte[] outputBytes = outputBytesHolder.get(); + final int numDecompressedBytes = lz4.decompress(bytes, 0, bytes.length, outputBytes, 0, outputBytes.length); + + out.put(outputBytes, 0, numDecompressedBytes); + out.flip(); + } + catch (IOException e) { + Throwables.propagate(e); + } + } + + @Override + public void decompress(ByteBuffer in, int numBytes, ByteBuffer out, int decompressedSize) + { + final byte[] bytes = new byte[numBytes]; + in.get(bytes); + + try (final ResourceHolder outputBytesHolder = CompressedPools.getOutputBytes()) { + final byte[] outputBytes = outputBytesHolder.get(); + lz4Fast.decompress(bytes, 0, outputBytes, 0, decompressedSize); + + out.put(outputBytes, 0, decompressedSize); + out.flip(); + } + catch (IOException e) { + Throwables.propagate(e); + } + } + } + + public static class LZ4Compressor implements Compressor + { + private final net.jpountz.lz4.LZ4Compressor lz4 = LZ4Factory.fastestJavaInstance().highCompressor(); + + @Override + public byte[] compress(byte[] bytes) + { + final byte[] intermediate = new byte[lz4.maxCompressedLength(bytes.length)]; + final int outputBytes = lz4.compress(bytes, 0, bytes.length, intermediate, 0, intermediate.length); + final byte[] out = new byte[outputBytes]; + System.arraycopy(intermediate, 0, out, 0, outputBytes); + return out; + } + } + + protected final ByteOrder order; + protected final BufferConverter converter; + protected final Decompressor decompressor; + private final Compressor compressor; protected CompressedObjectStrategy( final ByteOrder order, - final BufferConverter converter + final BufferConverter converter, + final CompressionStrategy compression ) { this.order = order; this.converter = converter; + this.decompressor = compression.getDecompressor(); + this.compressor = compression.getCompressor(); } @Override @@ -58,56 +243,49 @@ public class CompressedObjectStrategy implements ObjectStrateg @Override public ResourceHolder fromByteBuffer(ByteBuffer buffer, int numBytes) { - byte[] bytes = new byte[numBytes]; - buffer.get(bytes); - final ResourceHolder bufHolder = CompressedPools.getByteBuf(order); final ByteBuffer buf = bufHolder.get(); buf.position(0); buf.limit(buf.capacity()); - try { - final ResourceHolder outputBytesHolder = CompressedPools.getOutputBytes(); - - byte[] outputBytes = outputBytesHolder.get(); - int numDecompressedBytes = LZFDecoder.decode(bytes, outputBytes); - buf.put(outputBytes, 0, numDecompressedBytes); - buf.flip(); - - CloseQuietly.close(outputBytesHolder); - - return new ResourceHolder() + decompress(buffer, numBytes, buf); + return new ResourceHolder() + { + @Override + public T get() { - @Override - public T get() - { - return converter.convert(buf); - } + return converter.convert(buf); + } - @Override - public void close() throws IOException - { - bufHolder.close(); - } - }; - } - catch (IOException e) { - throw Throwables.propagate(e); - } + @Override + public void close() throws IOException + { + bufHolder.close(); + } + }; + } + + protected void decompress( + ByteBuffer buffer, + int numBytes, + ByteBuffer buf + ) + { + decompressor.decompress(buffer, numBytes, buf); } @Override public byte[] toBytes(ResourceHolder holder) { T val = holder.get(); - ByteBuffer buf = ByteBuffer.allocate(converter.sizeOf(val.remaining())).order(order); + ByteBuffer buf = bufferFor(val); converter.combine(buf, val); + return compressor.compress(buf.array()); + } - final ResourceHolder encoder = CompressedPools.getChunkEncoder(); - LZFChunk chunk = encoder.get().encodeChunk(buf.array(), 0, buf.array().length); - CloseQuietly.close(encoder); - - return chunk.getData(); + protected ByteBuffer bufferFor(T val) + { + return ByteBuffer.allocate(converter.sizeOf(val.remaining())).order(order); } @Override diff --git a/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java new file mode 100644 index 00000000000..c79e0edc615 --- /dev/null +++ b/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java @@ -0,0 +1,50 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013, 2014 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package io.druid.segment.data; + +import java.nio.Buffer; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +public abstract class FixedSizeCompressedObjectStrategy extends CompressedObjectStrategy +{ + protected FixedSizeCompressedObjectStrategy( + ByteOrder order, + BufferConverter converter, + CompressionStrategy compression + ) + { + super(order, converter, compression); + } + + public abstract int getSize(); + + @Override + protected ByteBuffer bufferFor(T val) + { + return ByteBuffer.allocate(converter.sizeOf(getSize())).order(order); + } + + @Override + protected void decompress(ByteBuffer buffer, int numBytes, ByteBuffer buf) + { + decompressor.decompress(buffer, numBytes, buf, converter.sizeOf(getSize())); + } +} diff --git a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java index d056158fa7d..e8ae40efbab 100644 --- a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java +++ b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java @@ -37,6 +37,7 @@ import java.util.List; */ public class InMemoryCompressedFloats implements IndexedFloats { + public static final CompressedObjectStrategy.CompressionStrategy COMPRESSION = CompressedObjectStrategy.CompressionStrategy.LZ4; private final CompressedFloatBufferObjectStrategy strategy; private final int sizePer; @@ -56,7 +57,11 @@ public class InMemoryCompressedFloats implements IndexedFloats ) { this.sizePer = sizePer; - strategy = CompressedFloatBufferObjectStrategy.getBufferForOrder(order); + strategy = CompressedFloatBufferObjectStrategy.getBufferForOrder( + order, + COMPRESSION, + sizePer + ); endBuffer = FloatBuffer.allocate(sizePer); endBuffer.mark(); @@ -184,7 +189,8 @@ public class InMemoryCompressedFloats implements IndexedFloats Arrays.>asList(StupidResourceHolder.create(endBufCopy)) ), strategy - ) + ), + COMPRESSION ); } diff --git a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java index e0ef6fac375..9fd314569d5 100644 --- a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java +++ b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java @@ -38,6 +38,7 @@ import java.util.List; */ public class InMemoryCompressedLongs implements IndexedLongs { + public static final CompressedObjectStrategy.CompressionStrategy COMPRESSION = CompressedObjectStrategy.CompressionStrategy.LZ4; private final CompressedLongBufferObjectStrategy strategy; private final int sizePer; @@ -57,7 +58,11 @@ public class InMemoryCompressedLongs implements IndexedLongs ) { this.sizePer = sizePer; - strategy = CompressedLongBufferObjectStrategy.getBufferForOrder(order); + strategy = CompressedLongBufferObjectStrategy.getBufferForOrder( + order, + COMPRESSION, + sizePer + ); endBuffer = LongBuffer.allocate(sizePer); endBuffer.mark(); @@ -195,7 +200,8 @@ public class InMemoryCompressedLongs implements IndexedLongs Arrays.>asList(StupidResourceHolder.create(longBufCopy)) ), strategy - ) + ), + COMPRESSION ); } diff --git a/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java index d9cf4c5b6ab..e481ee8cef6 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java @@ -68,7 +68,8 @@ public class CompressedFloatsIndexedSupplierTest supplier = CompressedFloatsIndexedSupplier.fromFloatBuffer( FloatBuffer.wrap(vals), 5, - ByteOrder.nativeOrder() + ByteOrder.nativeOrder(), + CompressedObjectStrategy.CompressionStrategy.LZ4 ); indexed = supplier.get(); @@ -82,7 +83,7 @@ public class CompressedFloatsIndexedSupplierTest ByteArrayOutputStream baos = new ByteArrayOutputStream(); final CompressedFloatsIndexedSupplier theSupplier = CompressedFloatsIndexedSupplier.fromFloatBuffer( - FloatBuffer.wrap(vals), 5, ByteOrder.nativeOrder() + FloatBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), CompressedObjectStrategy.CompressionStrategy.LZ4 ); theSupplier.writeToChannel(Channels.newChannel(baos)); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java index 7b670110269..3b888ae132f 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java @@ -39,13 +39,19 @@ public class CompressedFloatsSupplierSerializerTest public void testSanity() throws Exception { final ByteOrder order = ByteOrder.nativeOrder(); + final int sizePer = 999; CompressedFloatsSupplierSerializer serializer = new CompressedFloatsSupplierSerializer( - 999, + sizePer, new GenericIndexedWriter>( new IOPeonForTesting(), "test", - CompressedFloatBufferObjectStrategy.getBufferForOrder(order) - ) + CompressedFloatBufferObjectStrategy.getBufferForOrder( + order, + CompressedObjectStrategy.CompressionStrategy.LZ4, + sizePer + ) + ), + CompressedObjectStrategy.CompressionStrategy.LZ4 ); serializer.open(); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java index 768100fd559..dd33d532498 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java @@ -66,7 +66,8 @@ public class CompressedLongsIndexedSupplierTest supplier = CompressedLongsIndexedSupplier.fromLongBuffer( LongBuffer.wrap(vals), 5, - ByteOrder.nativeOrder() + ByteOrder.nativeOrder(), + CompressedObjectStrategy.CompressionStrategy.LZ4 ); indexed = supplier.get(); @@ -78,7 +79,7 @@ public class CompressedLongsIndexedSupplierTest ByteArrayOutputStream baos = new ByteArrayOutputStream(); final CompressedLongsIndexedSupplier theSupplier = CompressedLongsIndexedSupplier.fromLongBuffer( - LongBuffer.wrap(vals), 5, ByteOrder.nativeOrder() + LongBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), CompressedObjectStrategy.CompressionStrategy.LZ4 ); theSupplier.writeToChannel(Channels.newChannel(baos)); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java index 029297e73e8..ee13c55087e 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java @@ -39,13 +39,15 @@ public class CompressedLongsSupplierSerializerTest public void testSanity() throws Exception { final ByteOrder order = ByteOrder.nativeOrder(); + final int sizePer = 999; CompressedLongsSupplierSerializer serializer = new CompressedLongsSupplierSerializer( - 999, + sizePer, new GenericIndexedWriter>( new IOPeonForTesting(), "test", - CompressedLongBufferObjectStrategy.getBufferForOrder(order) - ) + CompressedLongBufferObjectStrategy.getBufferForOrder(order, CompressedObjectStrategy.CompressionStrategy.LZ4, sizePer) + ), + CompressedObjectStrategy.CompressionStrategy.LZ4 ); serializer.open(); From 6fa1dfe68f01b312e2413d48d383a19d3aef7ad7 Mon Sep 17 00:00:00 2001 From: fjy Date: Thu, 31 Jul 2014 14:47:44 -0700 Subject: [PATCH 20/46] new docs on logging --- docs/content/Logging.md | 39 +++++++++++++++++++++++++++++++++++++++ docs/content/toc.textile | 4 ++-- 2 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 docs/content/Logging.md diff --git a/docs/content/Logging.md b/docs/content/Logging.md new file mode 100644 index 00000000000..d949aa7858d --- /dev/null +++ b/docs/content/Logging.md @@ -0,0 +1,39 @@ +--- +layout: doc_page +--- +Logging +========================== + +Druid nodes will emit logs that are useful for debugging to the console. Druid nodes also emit periodic metrics about their state. For more about metrics, see [Configuration](Configuration.html). Metric logs are printed to the console by default, and can be disabled with `-Ddruid.emitter.logging.logLevel=debug`. + +Druid uses [log4j](http://logging.apache.org/log4j/2.x/) for logging, and console logs can be configured by adding a log4j.xml file. Add this xml file to your classpath if you want to override default Druid log configuration. + +An example log4j.xml file is shown below: + +``` + + + + + + + + + + + + + + + + + + + + + + + + + +``` \ No newline at end of file diff --git a/docs/content/toc.textile b/docs/content/toc.textile index 5ca3d978fe4..d35114ae928 100644 --- a/docs/content/toc.textile +++ b/docs/content/toc.textile @@ -28,17 +28,17 @@ h2. Configuration * "Realtime":Realtime-Config.html h2. Data Ingestion +* "Ingestion FAQ":./Ingestion-FAQ.html * "Realtime":./Realtime-ingestion.html * "Batch":./Batch-ingestion.html * "Indexing Service":./Indexing-Service.html ** "Tasks":./Tasks.html * "Data Formats":./Data_formats.html -* "Ingestion FAQ":./Ingestion-FAQ.html h2. Operations +* "Performance FAQ":./Performance-FAQ.html * "Extending Druid":./Modules.html * "Booting a Production Cluster":./Booting-a-production-cluster.html -* "Performance FAQ":./Performance-FAQ.html h2. Querying * "Querying":./Querying.html From 93afc26d92f2b2d0e15510e6c9aaf865f398522c Mon Sep 17 00:00:00 2001 From: fjy Date: Thu, 31 Jul 2014 15:36:01 -0700 Subject: [PATCH 21/46] another log fix --- .../main/java/io/druid/server/coordinator/LoadQueuePeon.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java index e2577795a27..c7ee45ce492 100644 --- a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java +++ b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java @@ -208,7 +208,7 @@ public class LoadQueuePeon } } - log.info("Asking server peon[%s] to drop segment[%s]", basePath, segment); + log.info("Asking server peon[%s] to drop segment[%s]", basePath, segment.getIdentifier()); segmentsToDrop.add(holder); doNext(); } From a5bc6fc706bf3f372577b98d7f8deffa0bf61413 Mon Sep 17 00:00:00 2001 From: fjy Date: Thu, 31 Jul 2014 17:08:08 -0700 Subject: [PATCH 22/46] fix to string error --- .../java/io/druid/query/timeboundary/TimeBoundaryQuery.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java index db9fa913bc9..5aae5d9fc67 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java @@ -199,7 +199,7 @@ public class TimeBoundaryQuery extends BaseQuery "dataSource='" + getDataSource() + '\'' + ", querySegmentSpec=" + getQuerySegmentSpec() + ", duration=" + getDuration() + - ", bound" + bound + + ", bound=" + bound + '}'; } From 8de945ee24d219b71694221256b46e78f05c1695 Mon Sep 17 00:00:00 2001 From: fjy Date: Fri, 1 Aug 2014 13:24:12 -0700 Subject: [PATCH 23/46] rework the readme --- README | 7 ------- README.md | 20 ++++++++++++++++++++ 2 files changed, 20 insertions(+), 7 deletions(-) delete mode 100644 README create mode 100644 README.md diff --git a/README b/README deleted file mode 100644 index 2f3003db430..00000000000 --- a/README +++ /dev/null @@ -1,7 +0,0 @@ -What is Druid? http://www.druid.io - -Looking for docs? http://druid.io/docs/latest/ - -Want to get started? http://druid.io/docs/latest/Tutorial:-A-First-Look-at-Druid.html - -Questions about setting up Druid? https://groups.google.com/forum/#!forum/druid-development diff --git a/README.md b/README.md new file mode 100644 index 00000000000..625b6e638b8 --- /dev/null +++ b/README.md @@ -0,0 +1,20 @@ +## Druid + +Druid is a distributed, column-oriented, real-time analytics data store that is +commonly used to power exploratory dashboards in multi-tenant environments. +Druid excels as a data warehousing solution for fast aggregate queries on +petabyte sized data sets. Druid supports a variety of flexible filters, exact +calculations, approximate algorithms, and other tools commonly found in the +business intelligence space. Druid can load both streaming and batch data. + +### More Information +Much more information about Druid can be found on our [website](http://www.druid.io). + +### Documentation +We host documentation on our [website](http://druid.io/docs/latest/). If you want to contribute documentation changes, please submit a pull request to this repository. + +### Tutorials +We have a series of tutorials to get started with Druid, starting with this [one](http://druid.io/docs/latest/Tutorial:-A-First-Look-at-Druid.html). + +### Support +Contact us through our [forum](https://groups.google.com/forum/#!forum/druid-development) or on IRC in #druid-dev on irc.freenode.net. From 991e1828b05326eb2865929f3f0623108e79b0a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Wed, 6 Aug 2014 15:50:04 -0700 Subject: [PATCH 24/46] make LZ4 the default compression strategy - LZ4 is now hardwired to be the default strategy - Rework tests to test all available compression strategies --- .../segment/FloatMetricColumnSerializer.java | 2 +- .../java/io/druid/segment/IndexMerger.java | 2 +- .../data/CompressedObjectStrategy.java | 2 + .../data/InMemoryCompressedFloats.java | 2 +- .../segment/data/InMemoryCompressedLongs.java | 2 +- .../CompressedFloatsIndexedSupplierTest.java | 16 ++++-- ...ompressedFloatsSupplierSerializerTest.java | 16 ++++-- .../CompressedLongsIndexedSupplierTest.java | 11 ++-- ...CompressedLongsSupplierSerializerTest.java | 16 ++++-- .../segment/data/CompressionStrategyTest.java | 52 +++++++++++++++++++ 10 files changed, 99 insertions(+), 22 deletions(-) create mode 100644 processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java diff --git a/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java b/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java index 520708fe015..2bc50c19d30 100644 --- a/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/FloatMetricColumnSerializer.java @@ -53,7 +53,7 @@ public class FloatMetricColumnSerializer implements MetricColumnSerializer { writer = CompressedFloatsSupplierSerializer.create( ioPeon, String.format("%s_little", metricName), IndexIO.BYTE_ORDER, - CompressedObjectStrategy.CompressionStrategy.LZ4 // TODO define this somewhere else + CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY ); writer.open(); diff --git a/processing/src/main/java/io/druid/segment/IndexMerger.java b/processing/src/main/java/io/druid/segment/IndexMerger.java index 87cfa25a2ed..b817e3ea089 100644 --- a/processing/src/main/java/io/druid/segment/IndexMerger.java +++ b/processing/src/main/java/io/druid/segment/IndexMerger.java @@ -596,7 +596,7 @@ public class IndexMerger CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create( ioPeon, "little_end_time", IndexIO.BYTE_ORDER, - CompressedObjectStrategy.CompressionStrategy.LZ4 // TODO define this somewhere else + CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY ); timeWriter.open(); diff --git a/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java index e0865e8ec80..36a3ad4876e 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedObjectStrategy.java @@ -42,6 +42,8 @@ import java.util.Map; */ public class CompressedObjectStrategy implements ObjectStrategy> { + public static final CompressionStrategy DEFAULT_COMPRESSION_STRATEGY = CompressionStrategy.LZ4; + public static enum CompressionStrategy { LZF ((byte)0x0) { diff --git a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java index e8ae40efbab..d86a0f4364e 100644 --- a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java +++ b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedFloats.java @@ -37,7 +37,7 @@ import java.util.List; */ public class InMemoryCompressedFloats implements IndexedFloats { - public static final CompressedObjectStrategy.CompressionStrategy COMPRESSION = CompressedObjectStrategy.CompressionStrategy.LZ4; + public static final CompressedObjectStrategy.CompressionStrategy COMPRESSION = CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY; private final CompressedFloatBufferObjectStrategy strategy; private final int sizePer; diff --git a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java index 9fd314569d5..266475636d3 100644 --- a/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java +++ b/processing/src/main/java/io/druid/segment/data/InMemoryCompressedLongs.java @@ -38,7 +38,7 @@ import java.util.List; */ public class InMemoryCompressedLongs implements IndexedLongs { - public static final CompressedObjectStrategy.CompressionStrategy COMPRESSION = CompressedObjectStrategy.CompressionStrategy.LZ4; + public static final CompressedObjectStrategy.CompressionStrategy COMPRESSION = CompressedObjectStrategy.DEFAULT_COMPRESSION_STRATEGY; private final CompressedLongBufferObjectStrategy strategy; private final int sizePer; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java index e481ee8cef6..0982f76f8bf 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedFloatsIndexedSupplierTest.java @@ -25,6 +25,8 @@ import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -36,10 +38,14 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -/** - */ -public class CompressedFloatsIndexedSupplierTest +@RunWith(Parameterized.class) +public class CompressedFloatsIndexedSupplierTest extends CompressionStrategyTest { + public CompressedFloatsIndexedSupplierTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy) + { + super(compressionStrategy); + } + private IndexedFloats indexed; private CompressedFloatsIndexedSupplier supplier; private float[] vals; @@ -69,7 +75,7 @@ public class CompressedFloatsIndexedSupplierTest FloatBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), - CompressedObjectStrategy.CompressionStrategy.LZ4 + compressionStrategy ); indexed = supplier.get(); @@ -83,7 +89,7 @@ public class CompressedFloatsIndexedSupplierTest ByteArrayOutputStream baos = new ByteArrayOutputStream(); final CompressedFloatsIndexedSupplier theSupplier = CompressedFloatsIndexedSupplier.fromFloatBuffer( - FloatBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), CompressedObjectStrategy.CompressionStrategy.LZ4 + FloatBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), compressionStrategy ); theSupplier.writeToChannel(Channels.newChannel(baos)); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java index 3b888ae132f..e61c01be8e5 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSupplierSerializerTest.java @@ -23,6 +23,8 @@ import com.google.common.io.OutputSupplier; import io.druid.collections.ResourceHolder; import org.junit.Assert; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -31,10 +33,14 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; -/** - */ -public class CompressedFloatsSupplierSerializerTest +@RunWith(Parameterized.class) +public class CompressedFloatsSupplierSerializerTest extends CompressionStrategyTest { + public CompressedFloatsSupplierSerializerTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy) + { + super(compressionStrategy); + } + @Test public void testSanity() throws Exception { @@ -47,11 +53,11 @@ public class CompressedFloatsSupplierSerializerTest "test", CompressedFloatBufferObjectStrategy.getBufferForOrder( order, - CompressedObjectStrategy.CompressionStrategy.LZ4, + compressionStrategy, sizePer ) ), - CompressedObjectStrategy.CompressionStrategy.LZ4 + compressionStrategy ); serializer.open(); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java index dd33d532498..fc29e284443 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java @@ -38,8 +38,13 @@ import java.util.concurrent.atomic.AtomicReference; /** */ -public class CompressedLongsIndexedSupplierTest +public class CompressedLongsIndexedSupplierTest extends CompressionStrategyTest { + public CompressedLongsIndexedSupplierTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy) + { + super(compressionStrategy); + } + private IndexedLongs indexed; private CompressedLongsIndexedSupplier supplier; private long[] vals; @@ -67,7 +72,7 @@ public class CompressedLongsIndexedSupplierTest LongBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), - CompressedObjectStrategy.CompressionStrategy.LZ4 + compressionStrategy ); indexed = supplier.get(); @@ -79,7 +84,7 @@ public class CompressedLongsIndexedSupplierTest ByteArrayOutputStream baos = new ByteArrayOutputStream(); final CompressedLongsIndexedSupplier theSupplier = CompressedLongsIndexedSupplier.fromLongBuffer( - LongBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), CompressedObjectStrategy.CompressionStrategy.LZ4 + LongBuffer.wrap(vals), 5, ByteOrder.nativeOrder(), compressionStrategy ); theSupplier.writeToChannel(Channels.newChannel(baos)); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java index ee13c55087e..bb5d6ec444e 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsSupplierSerializerTest.java @@ -23,6 +23,8 @@ import com.google.common.io.OutputSupplier; import io.druid.collections.ResourceHolder; import org.junit.Assert; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -31,10 +33,14 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.LongBuffer; -/** - */ -public class CompressedLongsSupplierSerializerTest +@RunWith(Parameterized.class) +public class CompressedLongsSupplierSerializerTest extends CompressionStrategyTest { + public CompressedLongsSupplierSerializerTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy) + { + super(compressionStrategy); + } + @Test public void testSanity() throws Exception { @@ -45,9 +51,9 @@ public class CompressedLongsSupplierSerializerTest new GenericIndexedWriter>( new IOPeonForTesting(), "test", - CompressedLongBufferObjectStrategy.getBufferForOrder(order, CompressedObjectStrategy.CompressionStrategy.LZ4, sizePer) + CompressedLongBufferObjectStrategy.getBufferForOrder(order, compressionStrategy, sizePer) ), - CompressedObjectStrategy.CompressionStrategy.LZ4 + compressionStrategy ); serializer.open(); diff --git a/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java b/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java new file mode 100644 index 00000000000..845ed8369c7 --- /dev/null +++ b/processing/src/test/java/io/druid/segment/data/CompressionStrategyTest.java @@ -0,0 +1,52 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013, 2014 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package io.druid.segment.data; + +import com.google.common.base.Function; +import com.google.common.collect.Iterables; +import org.junit.runners.Parameterized; + +import java.util.Arrays; + +public class CompressionStrategyTest +{ + @Parameterized.Parameters + public static Iterable compressionStrategies() + { + return Iterables.transform( + Arrays.asList(CompressedObjectStrategy.CompressionStrategy.values()), + new Function() + { + @Override + public Object[] apply(CompressedObjectStrategy.CompressionStrategy compressionStrategy) + { + return new Object[]{compressionStrategy}; + } + } + ); + } + + protected final CompressedObjectStrategy.CompressionStrategy compressionStrategy; + + public CompressionStrategyTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy) + { + this.compressionStrategy = compressionStrategy; + } +} From 6fa611c2620d285934b5abf6feda832ccc1a3543 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Wed, 6 Aug 2014 16:13:39 -0700 Subject: [PATCH 25/46] refactor FixedSizeCompressedObjectStrategy --- .../data/CompressedFloatBufferObjectStrategy.java | 12 ++---------- .../data/CompressedLongBufferObjectStrategy.java | 12 ++---------- .../data/FixedSizeCompressedObjectStrategy.java | 10 ++++++++-- 3 files changed, 12 insertions(+), 22 deletions(-) diff --git a/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java index e6294462afe..b0680b2ec23 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedFloatBufferObjectStrategy.java @@ -35,8 +35,6 @@ public class CompressedFloatBufferObjectStrategy extends FixedSizeCompressedObje return new CompressedFloatBufferObjectStrategy(order, compression, sizePer); } - private final int sizePer; - private CompressedFloatBufferObjectStrategy(final ByteOrder order, final CompressionStrategy compression, final int sizePer) { super( @@ -67,14 +65,8 @@ public class CompressedFloatBufferObjectStrategy extends FixedSizeCompressedObje return into.asFloatBuffer().put(from); } }, - compression + compression, + sizePer ); - this.sizePer = sizePer; - } - - @Override - public int getSize() - { - return sizePer; } } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java index b0b511278f1..13fd264eba3 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedLongBufferObjectStrategy.java @@ -35,8 +35,6 @@ public class CompressedLongBufferObjectStrategy extends FixedSizeCompressedObjec return new CompressedLongBufferObjectStrategy(order, compression, sizePer); } - private final int sizePer; - private CompressedLongBufferObjectStrategy(final ByteOrder order, final CompressionStrategy compression, final int sizePer) { super( @@ -67,14 +65,8 @@ public class CompressedLongBufferObjectStrategy extends FixedSizeCompressedObjec return into.asLongBuffer().put(from); } }, - compression + compression, + sizePer ); - this.sizePer = sizePer; - } - - @Override - public int getSize() - { - return sizePer; } } diff --git a/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java index c79e0edc615..3efc1ba06ac 100644 --- a/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/FixedSizeCompressedObjectStrategy.java @@ -25,16 +25,22 @@ import java.nio.ByteOrder; public abstract class FixedSizeCompressedObjectStrategy extends CompressedObjectStrategy { + private final int sizePer; + protected FixedSizeCompressedObjectStrategy( ByteOrder order, BufferConverter converter, - CompressionStrategy compression + CompressionStrategy compression, + int sizePer ) { super(order, converter, compression); + this.sizePer = sizePer; } - public abstract int getSize(); + public int getSize() { + return sizePer; + } @Override protected ByteBuffer bufferFor(T val) From 263463dccc27d9ae2fb6ddc8475a98b2fc2d9752 Mon Sep 17 00:00:00 2001 From: fjy Date: Fri, 8 Aug 2014 13:44:44 -0700 Subject: [PATCH 26/46] update pom~ --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 31bd84a4f63..9a47fd2d6e4 100644 --- a/pom.xml +++ b/pom.xml @@ -23,14 +23,14 @@ io.druid druid pom - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT druid druid scm:git:ssh://git@github.com/metamx/druid.git scm:git:ssh://git@github.com/metamx/druid.git http://www.github.com/metamx/druid - druid-0.6.131-SNAPSHOT + druid-0.7.0-SNAPSHOT From c860de43cfeaa6500420e0739aa0c071c76f88a6 Mon Sep 17 00:00:00 2001 From: fjy Date: Fri, 8 Aug 2014 13:46:25 -0700 Subject: [PATCH 27/46] fix dependencies --- cassandra-storage/pom.xml | 2 +- common/pom.xml | 2 +- examples/pom.xml | 2 +- hdfs-storage/pom.xml | 2 +- histogram/pom.xml | 2 +- indexing-hadoop/pom.xml | 2 +- indexing-service/pom.xml | 2 +- kafka-eight/pom.xml | 2 +- kafka-seven/pom.xml | 2 +- processing/pom.xml | 2 +- rabbitmq/pom.xml | 2 +- s3-extensions/pom.xml | 2 +- server/pom.xml | 2 +- services/pom.xml | 2 +- 14 files changed, 14 insertions(+), 14 deletions(-) diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml index e8ebb570a86..c8fec800e3e 100644 --- a/cassandra-storage/pom.xml +++ b/cassandra-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 0b57dcf484e..03e902df7c5 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/examples/pom.xml b/examples/pom.xml index 321e4a226e8..896b772738e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml index 1fe657ba77a..ac2c29b7405 100644 --- a/hdfs-storage/pom.xml +++ b/hdfs-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/histogram/pom.xml b/histogram/pom.xml index 24e9937621a..a520de6725b 100644 --- a/histogram/pom.xml +++ b/histogram/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 22fb248bf9b..f23382b6de9 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index 09368b7c105..81260ba3295 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml index 7c09c218cc7..e0c5fb43ebe 100644 --- a/kafka-eight/pom.xml +++ b/kafka-eight/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml index cbd3a85f0e0..a5e6ef4b806 100644 --- a/kafka-seven/pom.xml +++ b/kafka-seven/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/processing/pom.xml b/processing/pom.xml index 312b9b408cd..8b04ea52763 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml index fd953df211a..4a7866d1030 100644 --- a/rabbitmq/pom.xml +++ b/rabbitmq/pom.xml @@ -9,7 +9,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml index 586529ab49f..bdd0d117b2c 100644 --- a/s3-extensions/pom.xml +++ b/s3-extensions/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/server/pom.xml b/server/pom.xml index d752f9a1811..0b9726a61b5 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/services/pom.xml b/services/pom.xml index f0f3090e198..0c45dc6c753 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT From ddcea18631782b140e600d6a9afa55174dc5627a Mon Sep 17 00:00:00 2001 From: fjy Date: Fri, 8 Aug 2014 14:02:14 -0700 Subject: [PATCH 28/46] fix dependencies --- cassandra-storage/pom.xml | 2 +- common/pom.xml | 2 +- examples/pom.xml | 2 +- hdfs-storage/pom.xml | 2 +- histogram/pom.xml | 2 +- indexing-hadoop/pom.xml | 2 +- indexing-service/pom.xml | 2 +- kafka-eight/pom.xml | 2 +- kafka-seven/pom.xml | 2 +- processing/pom.xml | 2 +- rabbitmq/pom.xml | 2 +- s3-extensions/pom.xml | 2 +- server/pom.xml | 2 +- services/pom.xml | 2 +- 14 files changed, 14 insertions(+), 14 deletions(-) diff --git a/cassandra-storage/pom.xml b/cassandra-storage/pom.xml index e8ebb570a86..c8fec800e3e 100644 --- a/cassandra-storage/pom.xml +++ b/cassandra-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/common/pom.xml b/common/pom.xml index 0b57dcf484e..03e902df7c5 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/examples/pom.xml b/examples/pom.xml index 321e4a226e8..896b772738e 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/hdfs-storage/pom.xml b/hdfs-storage/pom.xml index 1fe657ba77a..ac2c29b7405 100644 --- a/hdfs-storage/pom.xml +++ b/hdfs-storage/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/histogram/pom.xml b/histogram/pom.xml index 24e9937621a..a520de6725b 100644 --- a/histogram/pom.xml +++ b/histogram/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 22fb248bf9b..f23382b6de9 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index 09368b7c105..81260ba3295 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/kafka-eight/pom.xml b/kafka-eight/pom.xml index 7c09c218cc7..e0c5fb43ebe 100644 --- a/kafka-eight/pom.xml +++ b/kafka-eight/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/kafka-seven/pom.xml b/kafka-seven/pom.xml index cbd3a85f0e0..a5e6ef4b806 100644 --- a/kafka-seven/pom.xml +++ b/kafka-seven/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/processing/pom.xml b/processing/pom.xml index 312b9b408cd..8b04ea52763 100644 --- a/processing/pom.xml +++ b/processing/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/rabbitmq/pom.xml b/rabbitmq/pom.xml index fd953df211a..4a7866d1030 100644 --- a/rabbitmq/pom.xml +++ b/rabbitmq/pom.xml @@ -9,7 +9,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/s3-extensions/pom.xml b/s3-extensions/pom.xml index 586529ab49f..bdd0d117b2c 100644 --- a/s3-extensions/pom.xml +++ b/s3-extensions/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/server/pom.xml b/server/pom.xml index d752f9a1811..0b9726a61b5 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -28,7 +28,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT diff --git a/services/pom.xml b/services/pom.xml index f0f3090e198..0c45dc6c753 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -27,7 +27,7 @@ io.druid druid - 0.6.140-SNAPSHOT + 0.7.0-SNAPSHOT From d198959c606cdd0613760c407e914c269ccacb97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 8 Aug 2014 14:05:10 -0700 Subject: [PATCH 29/46] fix test harness --- .../segment/data/CompressedLongsIndexedSupplierTest.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java index fc29e284443..b251134ddc7 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsIndexedSupplierTest.java @@ -25,6 +25,8 @@ import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -36,8 +38,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -/** - */ +@RunWith(Parameterized.class) public class CompressedLongsIndexedSupplierTest extends CompressionStrategyTest { public CompressedLongsIndexedSupplierTest(CompressedObjectStrategy.CompressionStrategy compressionStrategy) From d082718ed2aadf1872edfc8e2e6fbf84e1a2b874 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 11 Aug 2014 14:28:44 -0700 Subject: [PATCH 30/46] fix assembly file --- services/src/assembly/assembly.xml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/services/src/assembly/assembly.xml b/services/src/assembly/assembly.xml index 3dfb6a68f7e..6ab4cefa7e6 100644 --- a/services/src/assembly/assembly.xml +++ b/services/src/assembly/assembly.xml @@ -14,6 +14,13 @@ config + + ../examples/config/_global + + * + + config/_global + ../examples/config/broker From b448deeca0ada385e5ae8af374dd0ce08e126ad0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Tue, 12 Aug 2014 14:02:08 -0700 Subject: [PATCH 31/46] fix compilation with Java 8 --- .../src/test/java/io/druid/query/RetryQueryRunnerTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 16c8a51504a..9c6329a3df4 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -131,7 +131,7 @@ public class RetryQueryRunnerTest @Override public Sequence run(Query query, Map context) { - if (context.get("count") == 0) { + if ((int)context.get("count") == 0) { ((List) context.get(RetryQueryRunner.missingSegments)).add( new SegmentDescriptor( new Interval( @@ -368,4 +368,4 @@ public class RetryQueryRunnerTest Assert.assertTrue("Should have one entry in the list of missing segments", ((List) context.get(RetryQueryRunner.missingSegments)).size() == 1); } -} \ No newline at end of file +} From 6f0e768d8260290896c71041b8b59343283cb2d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Thu, 14 Aug 2014 17:09:12 -0700 Subject: [PATCH 32/46] add tools comand to create metadata storage tables --- .../main/java/io/druid/cli/CreateTables.java | 113 ++++++++++++++++++ services/src/main/java/io/druid/cli/Main.java | 2 +- 2 files changed, 114 insertions(+), 1 deletion(-) create mode 100644 services/src/main/java/io/druid/cli/CreateTables.java diff --git a/services/src/main/java/io/druid/cli/CreateTables.java b/services/src/main/java/io/druid/cli/CreateTables.java new file mode 100644 index 00000000000..3c788ab279e --- /dev/null +++ b/services/src/main/java/io/druid/cli/CreateTables.java @@ -0,0 +1,113 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012, 2013, 2014 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package io.druid.cli; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Binder; +import com.google.inject.Injector; +import com.google.inject.Key; +import com.google.inject.Module; +import com.metamx.common.logger.Logger; +import io.airlift.command.Command; +import io.airlift.command.Option; +import io.druid.db.DbConnector; +import io.druid.db.DbConnectorConfig; +import io.druid.db.DbTablesConfig; +import io.druid.guice.JsonConfigProvider; +import io.druid.guice.annotations.Self; +import io.druid.server.DruidNode; + +import java.util.List; + +@Command( + name = "metadata-init", + description = "Initialize Metadata Storage" +) +public class CreateTables extends GuiceRunnable +{ + @Option(name = "--connectURI", description = "Database JDBC connection string", required = true) + private String connectURI; + + @Option(name = "--user", description = "Database username", required = true) + private String user; + + @Option(name = "--password", description = "Database password", required = true) + private String password; + + @Option(name = "--base", description = "Base table name") + private String base; + + private static final Logger log = new Logger(CreateTables.class); + + public CreateTables() + { + super(log); + } + + @Override + protected List getModules() + { + return ImmutableList.of( + new Module() + { + @Override + public void configure(Binder binder) + { + JsonConfigProvider.bindInstance( + binder, Key.get(DbConnectorConfig.class), new DbConnectorConfig() + { + @Override + public String getConnectURI() + { + return connectURI; + } + + @Override + public String getUser() + { + return user; + } + + @Override + public String getPassword() + { + return password; + } + } + ); + JsonConfigProvider.bindInstance( + binder, Key.get(DbTablesConfig.class), DbTablesConfig.fromBase(base) + ); + JsonConfigProvider.bindInstance( + binder, Key.get(DruidNode.class, Self.class), new DruidNode("tools", "localhost", -1) + ); + } + } + ); + } + + @Override + public void run() + { + final Injector injector = makeInjector(); + DbConnector dbConnector = injector.getInstance(DbConnector.class); + dbConnector.createSegmentTable(); + } +} diff --git a/services/src/main/java/io/druid/cli/Main.java b/services/src/main/java/io/druid/cli/Main.java index 0e23a0e81e7..00f48d11f94 100644 --- a/services/src/main/java/io/druid/cli/Main.java +++ b/services/src/main/java/io/druid/cli/Main.java @@ -61,7 +61,7 @@ public class Main builder.withGroup("tools") .withDescription("Various tools for working with Druid") .withDefaultCommand(Help.class) - .withCommands(ConvertProperties.class, DruidJsonValidator.class, PullDependencies.class); + .withCommands(ConvertProperties.class, DruidJsonValidator.class, PullDependencies.class, CreateTables.class); builder.withGroup("index") .withDescription("Run indexing for druid") From 17a3e65dc43da8b24e53490fc34a8e37b92ce343 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 15 Aug 2014 12:50:30 -0700 Subject: [PATCH 33/46] add missing tables --- services/src/main/java/io/druid/cli/CreateTables.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/src/main/java/io/druid/cli/CreateTables.java b/services/src/main/java/io/druid/cli/CreateTables.java index 3c788ab279e..5dfbf02de45 100644 --- a/services/src/main/java/io/druid/cli/CreateTables.java +++ b/services/src/main/java/io/druid/cli/CreateTables.java @@ -109,5 +109,8 @@ public class CreateTables extends GuiceRunnable final Injector injector = makeInjector(); DbConnector dbConnector = injector.getInstance(DbConnector.class); dbConnector.createSegmentTable(); + dbConnector.createRulesTable(); + dbConnector.createConfigTable(); + dbConnector.createTaskTables(); } } From 1fd30ab58852876808f0b3de8bccb4ab392966fc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 15 Aug 2014 17:14:05 -0700 Subject: [PATCH 34/46] default service/host/port for all nodes --- .../config/ForkingTaskRunnerConfig.java | 2 +- .../main/java/io/druid/server/DruidNode.java | 21 +++++++++++++------ .../server/coordinator/DruidCoordinator.java | 2 +- .../coordinator/DruidCoordinatorConfig.java | 3 --- .../initialization/InitializationTest.java | 2 +- .../coordinator/DruidCoordinatorTest.java | 6 ------ .../src/main/java/io/druid/cli/CliBridge.java | 4 ++++ .../src/main/java/io/druid/cli/CliBroker.java | 4 ++++ .../java/io/druid/cli/CliCoordinator.java | 4 ++++ .../main/java/io/druid/cli/CliHistorical.java | 4 ++++ .../java/io/druid/cli/CliMiddleManager.java | 4 ++++ .../main/java/io/druid/cli/CliOverlord.java | 4 ++++ .../main/java/io/druid/cli/CliRealtime.java | 13 +++++++++++- .../src/main/java/io/druid/cli/CliRouter.java | 4 ++++ 14 files changed, 58 insertions(+), 19 deletions(-) diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java index 8d89d834785..8807debcb3d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/config/ForkingTaskRunnerConfig.java @@ -49,7 +49,7 @@ public class ForkingTaskRunnerConfig @JsonProperty @Min(1024) @Max(65535) - private int startPort = 8081; + private int startPort = 8100; @JsonProperty @NotNull diff --git a/server/src/main/java/io/druid/server/DruidNode.java b/server/src/main/java/io/druid/server/DruidNode.java index 4c8528bd6ee..1928d6487d8 100644 --- a/server/src/main/java/io/druid/server/DruidNode.java +++ b/server/src/main/java/io/druid/server/DruidNode.java @@ -19,8 +19,10 @@ package io.druid.server; +import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.inject.name.Named; import io.druid.common.utils.SocketUtil; import javax.validation.constraints.Max; @@ -31,15 +33,17 @@ import javax.validation.constraints.NotNull; */ public class DruidNode { + public static final String DEFAULT_HOST = "localhost"; + private String hostNoPort; @JsonProperty("service") @NotNull - private String serviceName = null; + private String serviceName; @JsonProperty @NotNull - private String host = null; + private String host; @JsonProperty @Min(0) @Max(0xffff) @@ -47,16 +51,21 @@ public class DruidNode @JsonCreator public DruidNode( - @JsonProperty("service") String serviceName, + @JacksonInject @Named("serviceName") @JsonProperty("service") String serviceName, @JsonProperty("host") String host, - @JsonProperty("port") Integer port + @JacksonInject @Named("servicePort") @JsonProperty("port") Integer port ) + { + init(serviceName, host, port); + } + + private void init(String serviceName, String host, Integer port) { this.serviceName = serviceName; if (port == null) { if (host == null) { - setHostAndPort(null, -1, null); + setHostAndPort(DEFAULT_HOST, -1, DEFAULT_HOST); } else if (host.contains(":")) { final String[] hostParts = host.split(":"); @@ -74,7 +83,7 @@ public class DruidNode } else { if (host == null || host.contains(":")) { - setHostAndPort(host, port, host == null ? null : host.split(":")[0]); + setHostAndPort(host == null ? DEFAULT_HOST : host, port, host == null ? DEFAULT_HOST : host.split(":")[0]); } else { setHostAndPort(String.format("%s:%d", host, port), port, host); diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 51b1773e26f..3b6a7276d57 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -454,7 +454,7 @@ public class DruidCoordinator private LeaderLatch createNewLeaderLatch() { final LeaderLatch newLeaderLatch = new LeaderLatch( - curator, ZKPaths.makePath(zkPaths.getCoordinatorPath(), COORDINATOR_OWNER_NODE), config.getHost() + curator, ZKPaths.makePath(zkPaths.getCoordinatorPath(), COORDINATOR_OWNER_NODE), self.getHost() ); newLeaderLatch.addListener( diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorConfig.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorConfig.java index 571e70ee3bd..e58747dfb95 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorConfig.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorConfig.java @@ -27,9 +27,6 @@ import org.skife.config.Default; */ public abstract class DruidCoordinatorConfig { - @Config("druid.host") - public abstract String getHost(); - @Config("druid.coordinator.startDelay") @Default("PT300s") public abstract Duration getCoordinatorStartDelay(); diff --git a/server/src/test/java/io/druid/initialization/InitializationTest.java b/server/src/test/java/io/druid/initialization/InitializationTest.java index f4bcc3708d2..6f60a51954d 100644 --- a/server/src/test/java/io/druid/initialization/InitializationTest.java +++ b/server/src/test/java/io/druid/initialization/InitializationTest.java @@ -109,7 +109,7 @@ public class InitializationTest public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("hadoop-indexer", "localhost", -1) + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test-inject", "localhost", -1) ); } } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index c0d6bfa1c36..d3d9d697047 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -77,12 +77,6 @@ public class DruidCoordinatorTest coordinator = new DruidCoordinator( new DruidCoordinatorConfig() { - @Override - public String getHost() - { - return null; - } - @Override public Duration getCoordinatorStartDelay() { diff --git a/services/src/main/java/io/druid/cli/CliBridge.java b/services/src/main/java/io/druid/cli/CliBridge.java index ffed4789727..6da1ec21c38 100644 --- a/services/src/main/java/io/druid/cli/CliBridge.java +++ b/services/src/main/java/io/druid/cli/CliBridge.java @@ -5,6 +5,7 @@ import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; +import com.google.inject.name.Names; import com.metamx.common.lifecycle.Lifecycle; import com.metamx.common.logger.Logger; import io.airlift.command.Command; @@ -69,6 +70,9 @@ public class CliBridge extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/bridge"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8081); + ConfigProvider.bind(binder, BridgeCuratorConfig.class); binder.bind(BridgeZkCoordinator.class).in(ManageLifecycle.class); diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index 41336ed7c33..af37fa80134 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -22,6 +22,7 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Module; +import com.google.inject.name.Names; import com.metamx.common.logger.Logger; import io.airlift.command.Command; import io.druid.client.BrokerServerView; @@ -78,6 +79,9 @@ public class CliBroker extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/broker"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8082); + binder.bind(QueryToolChestWarehouse.class).to(MapQueryToolChestWarehouse.class); binder.bind(CachingClusteredClient.class).in(LazySingleton.class); diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index 95919bc3de5..4d060c4acac 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; +import com.google.inject.name.Names; import com.metamx.common.concurrent.ScheduledExecutorFactory; import com.metamx.common.logger.Logger; import io.airlift.command.Command; @@ -87,6 +88,9 @@ public class CliCoordinator extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/coordinator"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8081); + ConfigProvider.bind(binder, DruidCoordinatorConfig.class); JsonConfigProvider.bind(binder, "druid.manager.segments", DatabaseSegmentManagerConfig.class); diff --git a/services/src/main/java/io/druid/cli/CliHistorical.java b/services/src/main/java/io/druid/cli/CliHistorical.java index 6d4152b9226..b1098114e72 100644 --- a/services/src/main/java/io/druid/cli/CliHistorical.java +++ b/services/src/main/java/io/druid/cli/CliHistorical.java @@ -22,6 +22,7 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Module; +import com.google.inject.name.Names; import com.metamx.common.logger.Logger; import io.airlift.command.Command; import io.druid.client.cache.Cache; @@ -68,6 +69,9 @@ public class CliHistorical extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/historical"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8083); + binder.bind(ServerManager.class).in(LazySingleton.class); binder.bind(ZkCoordinator.class).in(ManageLifecycle.class); binder.bind(QuerySegmentWalker.class).to(ServerManager.class).in(LazySingleton.class); diff --git a/services/src/main/java/io/druid/cli/CliMiddleManager.java b/services/src/main/java/io/druid/cli/CliMiddleManager.java index 381b382269a..e458a350fd2 100644 --- a/services/src/main/java/io/druid/cli/CliMiddleManager.java +++ b/services/src/main/java/io/druid/cli/CliMiddleManager.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; +import com.google.inject.name.Names; import com.google.inject.util.Providers; import com.metamx.common.logger.Logger; import io.airlift.command.Command; @@ -74,6 +75,9 @@ public class CliMiddleManager extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/middlemanager"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8091); + IndexingServiceModuleHelper.configureTaskRunnerConfigs(binder); JsonConfigProvider.bind(binder, "druid.indexer.task", TaskConfig.class); diff --git a/services/src/main/java/io/druid/cli/CliOverlord.java b/services/src/main/java/io/druid/cli/CliOverlord.java index 95780b57156..b39d1847858 100644 --- a/services/src/main/java/io/druid/cli/CliOverlord.java +++ b/services/src/main/java/io/druid/cli/CliOverlord.java @@ -26,6 +26,7 @@ import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; +import com.google.inject.name.Names; import com.google.inject.servlet.GuiceFilter; import com.google.inject.util.Providers; import com.metamx.common.logger.Logger; @@ -114,6 +115,9 @@ public class CliOverlord extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/overlord"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8090); + JsonConfigProvider.bind(binder, "druid.indexer.queue", TaskQueueConfig.class); JsonConfigProvider.bind(binder, "druid.indexer.task", TaskConfig.class); diff --git a/services/src/main/java/io/druid/cli/CliRealtime.java b/services/src/main/java/io/druid/cli/CliRealtime.java index e80d22242c5..a34e3bd0eda 100644 --- a/services/src/main/java/io/druid/cli/CliRealtime.java +++ b/services/src/main/java/io/druid/cli/CliRealtime.java @@ -20,6 +20,9 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; +import com.google.inject.Binder; +import com.google.inject.Module; +import com.google.inject.name.Names; import com.metamx.common.logger.Logger; import io.airlift.command.Command; import io.druid.guice.RealtimeModule; @@ -45,7 +48,15 @@ public class CliRealtime extends ServerRunnable protected List getModules() { return ImmutableList.of( - new RealtimeModule() + new RealtimeModule(), + new Module() { + @Override + public void configure(Binder binder) + { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/realtime"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8084); + } + } ); } } diff --git a/services/src/main/java/io/druid/cli/CliRouter.java b/services/src/main/java/io/druid/cli/CliRouter.java index 355e1f993a3..550f94e7c47 100644 --- a/services/src/main/java/io/druid/cli/CliRouter.java +++ b/services/src/main/java/io/druid/cli/CliRouter.java @@ -24,6 +24,7 @@ import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; import com.google.inject.TypeLiteral; +import com.google.inject.name.Names; import com.metamx.common.logger.Logger; import io.airlift.command.Command; import io.druid.curator.discovery.DiscoveryModule; @@ -72,6 +73,9 @@ public class CliRouter extends ServerRunnable @Override public void configure(Binder binder) { + binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/router"); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8888); + JsonConfigProvider.bind(binder, "druid.router", TieredBrokerConfig.class); binder.bind(CoordinatorRuleManager.class); From 8f7c6050f97733a3caa6e7f9d61b3c2ed5b3046d Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 18 Aug 2014 12:56:51 -0700 Subject: [PATCH 35/46] fix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2a5f1efae96..40734ed8d51 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Druid is a distributed, column-oriented, real-time analytics data store that is commonly used to power exploratory dashboards in multi-tenant environments. Druid excels as a data warehousing solution for fast aggregate queries on petabyte sized data sets. Druid supports a variety of flexible filters, exact -calculations, approximate algorithms, and other useful calculations Druid can +calculations, approximate algorithms, and other useful calculations. Druid can load both streaming and batch data and integrates with Storm and Hadoop. ### More Information From 78054d7f0516424b066fb80208d0c9a59c5d12c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Mon, 18 Aug 2014 13:37:47 -0700 Subject: [PATCH 36/46] fix test initialization --- .../server/initialization/JettyTest.java | 73 +++++++++---------- 1 file changed, 35 insertions(+), 38 deletions(-) diff --git a/server/src/test/java/io/druid/server/initialization/JettyTest.java b/server/src/test/java/io/druid/server/initialization/JettyTest.java index 3d03bf8a22a..a95e883b65d 100644 --- a/server/src/test/java/io/druid/server/initialization/JettyTest.java +++ b/server/src/test/java/io/druid/server/initialization/JettyTest.java @@ -20,11 +20,12 @@ package io.druid.server.initialization; import com.google.api.client.repackaged.com.google.common.base.Throwables; -import com.google.common.collect.Lists; +import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; +import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; import com.metamx.common.lifecycle.Lifecycle; @@ -33,10 +34,14 @@ import com.metamx.http.client.response.InputStreamResponseHandler; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.guice.Jerseys; +import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; +import io.druid.guice.LifecycleModule; import io.druid.guice.annotations.Global; import io.druid.guice.GuiceInjectors; +import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; +import io.druid.server.DruidNode; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; @@ -78,10 +83,7 @@ public class JettyTest public static void setProperties() { - System.setProperty("druid.host", "localhost:9999"); - System.setProperty("druid.port", "9999"); System.setProperty("druid.server.http.numThreads", "20"); - System.setProperty("druid.service", "test"); System.setProperty("druid.server.http.maxIdleTime", "PT1S"); System.setProperty("druid.global.http.readTimeout", "PT1S"); } @@ -91,22 +93,24 @@ public class JettyTest { setProperties(); Injector injector = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), Lists.newArrayList( - new Module() - { - @Override - public void configure(Binder binder) - { - binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); - Jerseys.addResource(binder, SlowResource.class); - Jerseys.addResource(binder, ExceptionResource.class); - } - } - ) + GuiceInjectors.makeStartupInjector(), ImmutableList.of( + new Module() + { + @Override + public void configure(Binder binder) + { + JsonConfigProvider.bindInstance( + binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", 9999) + ); + binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class); + Jerseys.addResource(binder, SlowResource.class); + Jerseys.addResource(binder, ExceptionResource.class); + LifecycleModule.register(binder, Server.class); + } + } + ) ); lifecycle = injector.getInstance(Lifecycle.class); - // Jetty is Lazy Initialized do a getInstance - injector.getInstance(Server.class); lifecycle.start(); ClientHolder holder = injector.getInstance(ClientHolder.class); client = holder.getClient(); @@ -136,12 +140,9 @@ public class JettyTest long startTime = System.currentTimeMillis(); long startTime2 = 0; try { - ListenableFuture go = client.get( - new URL( - "http://localhost:9999/slow/hello" - ) - ) - .go(new StatusResponseHandler(Charset.defaultCharset())); + ListenableFuture go = + client.get(new URL("http://localhost:9999/slow/hello")) + .go(new StatusResponseHandler(Charset.defaultCharset())); startTime2 = System.currentTimeMillis(); go.get(); } @@ -150,13 +151,13 @@ public class JettyTest } finally { System.out - .println( - "Response time client" - + (System.currentTimeMillis() - startTime) - + "time taken for getting future" - + (System.currentTimeMillis() - startTime2) - + "Counter " + count.incrementAndGet() - ); + .println( + "Response time client" + + (System.currentTimeMillis() - startTime) + + "time taken for getting future" + + (System.currentTimeMillis() - startTime2) + + "Counter " + count.incrementAndGet() + ); latch.countDown(); } @@ -178,13 +179,9 @@ public class JettyTest // above bug is not fixed in jetty for gzip encoding, and the chunk is still finalized instead of throwing exception. public void testChunkNotFinalized() throws Exception { - ListenableFuture go = client.get( - new URL( - "http://localhost:9999/exception/exception" - ) - - ) - .go(new InputStreamResponseHandler()); + ListenableFuture go = + client.get(new URL("http://localhost:9999/exception/exception")) + .go(new InputStreamResponseHandler()); try { StringWriter writer = new StringWriter(); IOUtils.copy(go.get(), writer, "utf-8"); From 88de45c6955a006c65af3c98f6edba0117fb91a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Fri, 29 Aug 2014 15:13:52 -0700 Subject: [PATCH 37/46] ensure jackson injection is available on startup --- common/src/main/java/io/druid/guice/DruidSecondaryModule.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/common/src/main/java/io/druid/guice/DruidSecondaryModule.java b/common/src/main/java/io/druid/guice/DruidSecondaryModule.java index fbafb29d42b..d768a60a7c1 100644 --- a/common/src/main/java/io/druid/guice/DruidSecondaryModule.java +++ b/common/src/main/java/io/druid/guice/DruidSecondaryModule.java @@ -71,7 +71,8 @@ public class DruidSecondaryModule implements Module binder.install(new DruidGuiceExtensions()); binder.bind(Properties.class).toInstance(properties); binder.bind(ConfigurationObjectFactory.class).toInstance(factory); - binder.bind(ObjectMapper.class).to(Key.get(ObjectMapper.class, Json.class)); + // make objectMapper eager to ensure jackson gets setup with guice injection for JsonConfigurator + binder.bind(ObjectMapper.class).to(Key.get(ObjectMapper.class, Json.class)).asEagerSingleton(); binder.bind(Validator.class).toInstance(validator); binder.bind(JsonConfigurator.class).toInstance(jsonConfigurator); } From 75134335de5cb732f541f3c2eb1e5cc9a42b5b58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Sat, 30 Aug 2014 22:45:36 -0700 Subject: [PATCH 38/46] fix last merge from master not being forward-compatible --- .../java/io/druid/query/TimewarpOperator.java | 7 +- .../io/druid/query/TimewarpOperatorTest.java | 12 +- .../timeseries/TimeseriesQueryRunnerTest.java | 130 +++++++++--------- 3 files changed, 78 insertions(+), 71 deletions(-) diff --git a/processing/src/main/java/io/druid/query/TimewarpOperator.java b/processing/src/main/java/io/druid/query/TimewarpOperator.java index 49a8fb1ed85..3f197795d6d 100644 --- a/processing/src/main/java/io/druid/query/TimewarpOperator.java +++ b/processing/src/main/java/io/druid/query/TimewarpOperator.java @@ -33,6 +33,8 @@ import org.joda.time.Interval; import org.joda.time.Period; import java.util.Arrays; +import java.util.Map; + /** * TimewarpOperator is an example post-processing operator that maps current time @@ -79,7 +81,7 @@ public class TimewarpOperator implements PostProcessingOperator return new QueryRunner() { @Override - public Sequence run(final Query query) + public Sequence run(final Query query, final Map context) { final long offset = computeOffset(now); @@ -90,7 +92,8 @@ public class TimewarpOperator implements PostProcessingOperator ); return Sequences.map( baseRunner.run( - query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Arrays.asList(modifiedInterval))) + query.withQuerySegmentSpec(new MultipleIntervalSegmentSpec(Arrays.asList(modifiedInterval))), + context ), new Function() { diff --git a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java index f1703e8099c..f3aff876944 100644 --- a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java @@ -35,9 +35,13 @@ import org.junit.Assert; import org.junit.Test; import java.util.Arrays; +import java.util.Map; + public class TimewarpOperatorTest { + public static final ImmutableMap CONTEXT = ImmutableMap.of(); + TimewarpOperator> testOperator = new TimewarpOperator<>( new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), new Period("P1W"), @@ -75,7 +79,7 @@ public class TimewarpOperatorTest new QueryRunner>() { @Override - public Sequence> run(Query> query) + public Sequence> run(Query> query, Map context) { return Sequences.simple( ImmutableList.of( @@ -120,7 +124,7 @@ public class TimewarpOperatorTest new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ), - Sequences.toList(queryRunner.run(query), Lists.>newArrayList()) + Sequences.toList(queryRunner.run(query, CONTEXT), Lists.>newArrayList()) ); @@ -134,7 +138,7 @@ public class TimewarpOperatorTest new QueryRunner>() { @Override - public Sequence> run(Query> query) + public Sequence> run(Query> query, Map context) { return Sequences.simple( ImmutableList.of( @@ -161,7 +165,7 @@ public class TimewarpOperatorTest new TimeBoundaryResultValue(ImmutableMap.of("maxTime", new DateTime("2014-08-02"))) ) ), - Sequences.toList(timeBoundaryRunner.run(timeBoundaryQuery), Lists.>newArrayList()) + Sequences.toList(timeBoundaryRunner.run(timeBoundaryQuery, CONTEXT), Lists.>newArrayList()) ); } diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 635e49ccd1a..ec2ff8e277d 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -54,7 +54,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; +import java.util.Map; import java.util.List; /** @@ -62,6 +62,9 @@ import java.util.List; @RunWith(Parameterized.class) public class TimeseriesQueryRunnerTest { + + public static final Map CONTEXT = ImmutableMap.of(); + @Parameterized.Parameters public static Collection constructorFeeder() throws IOException { @@ -103,10 +106,9 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); @@ -170,9 +172,8 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - HashMap context = new HashMap(); Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); Result result = results.iterator().next(); @@ -217,9 +218,9 @@ public class TimeseriesQueryRunnerTest DateTime expectedEarliest = new DateTime("2011-01-12"); DateTime expectedLast = new DateTime("2011-04-15"); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); @@ -283,9 +284,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); @@ -330,9 +331,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); @@ -372,9 +373,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results1 = Sequences.toList( - runner.run(query1, context), + runner.run(query1, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -411,7 +412,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results2 = Sequences.toList( - runner.run(query2, context), + runner.run(query2, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults2, results2); @@ -462,9 +463,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results1 = Sequences.toList( - runner.run(query1, context), + runner.run(query1, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -532,7 +533,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results1 = Sequences.toList( - runner.run(query1), + runner.run(query1, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -579,7 +580,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results1 = Sequences.toList( - runner.run(query1), + runner.run(query1, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -619,9 +620,8 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); Iterable> results1 = Sequences.toList( - runner.run(query1, context), + runner.run(query1, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults1, results1); @@ -659,7 +659,7 @@ public class TimeseriesQueryRunnerTest ); Iterable> results2 = Sequences.toList( - runner.run(query2, context), + runner.run(query2, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults2, results2); @@ -692,9 +692,9 @@ public class TimeseriesQueryRunnerTest .build(); List> expectedResults = Arrays.asList(); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -742,9 +742,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -792,9 +792,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -842,9 +842,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -892,9 +892,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -942,9 +942,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1000,9 +1000,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1058,9 +1058,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1116,9 +1116,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1172,9 +1172,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1234,9 +1234,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1278,9 +1278,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1322,9 +1322,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1380,9 +1380,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> results = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, results); @@ -1420,9 +1420,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> actualResults = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1461,9 +1461,9 @@ public class TimeseriesQueryRunnerTest ) ) ); - HashMap context = new HashMap(); + Iterable> actualResults = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1480,7 +1480,7 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap context = new HashMap(); + Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1490,12 +1490,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - context + CONTEXT ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1512,7 +1512,7 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - HashMap context = new HashMap(); + Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1523,12 +1523,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - context + CONTEXT ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1574,7 +1574,7 @@ public class TimeseriesQueryRunnerTest ) ) .build(); - HashMap context = new HashMap(); + Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1585,12 +1585,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - context + CONTEXT ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); @@ -1638,7 +1638,7 @@ public class TimeseriesQueryRunnerTest ) ) .build(); - HashMap context = new HashMap(); + Iterable> expectedResults = Sequences.toList( runner.run( Druids.newTimeseriesQueryBuilder() @@ -1649,12 +1649,12 @@ public class TimeseriesQueryRunnerTest .aggregators(QueryRunnerTestHelper.commonAggregators) .postAggregators(Arrays.asList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(), - context + CONTEXT ), Lists.>newArrayList() ); Iterable> actualResults = Sequences.toList( - runner.run(query, context), + runner.run(query, CONTEXT), Lists.>newArrayList() ); TestHelper.assertExpectedResults(expectedResults, actualResults); From e62ed40d869854e9425c8ef3c1f7b9a7427a8ad4 Mon Sep 17 00:00:00 2001 From: fjy Date: Thu, 11 Sep 2014 16:49:18 -0700 Subject: [PATCH 39/46] make some logs not emit --- .../src/main/java/io/druid/client/CachingClusteredClient.java | 2 +- server/src/main/java/io/druid/client/DirectDruidClient.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 0a44e41f768..ba3b49f25d2 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -321,7 +321,7 @@ public class CachingClusteredClient implements QueryRunner final QueryRunner clientQueryable = serverView.getQueryRunner(server); if (clientQueryable == null) { - log.makeAlert("WTF!? server[%s] doesn't have a client Queryable?", server).emit(); + log.error("WTF!? server[%s] doesn't have a client Queryable?", server); continue; } diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index a2fde2c1033..45f40133096 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -343,7 +343,7 @@ public class DirectDruidClient implements QueryRunner } } catch (IOException | InterruptedException | ExecutionException e) { - throw new RE(e, "Failure getting results from[%s]", url); + log.error(e, "Failure getting results from[%s]. Likely a timeout occurred.", url); } catch (CancellationException e) { throw new QueryInterruptedException("Query cancelled"); From cbf50769618733805b86eb5ea82fc3459b857bda Mon Sep 17 00:00:00 2001 From: fjy Date: Fri, 12 Sep 2014 11:43:42 -0700 Subject: [PATCH 40/46] still propagate log --- server/src/main/java/io/druid/client/DirectDruidClient.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 45f40133096..2e0e22ce48a 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -343,7 +343,7 @@ public class DirectDruidClient implements QueryRunner } } catch (IOException | InterruptedException | ExecutionException e) { - log.error(e, "Failure getting results from[%s]. Likely a timeout occurred.", url); + throw new RE(e, "Failure getting results from[%s]. Likely a timeout occurred.", url); } catch (CancellationException e) { throw new QueryInterruptedException("Query cancelled"); From 257fb3f18eba7595c7df06bd2c4bf23c8fe18dd0 Mon Sep 17 00:00:00 2001 From: fjy Date: Fri, 12 Sep 2014 15:47:41 -0700 Subject: [PATCH 41/46] fix retry logic --- .../src/main/java/io/druid/query/RetryQueryRunner.java | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index d7c8eaa5724..9f3921bd87d 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -27,6 +27,7 @@ import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Yielder; import com.metamx.common.guava.YieldingAccumulator; import com.metamx.common.guava.YieldingSequenceBase; +import io.druid.query.spec.MultipleSpecificSegmentSpec; import io.druid.segment.SegmentMissingException; import java.util.List; @@ -73,13 +74,18 @@ public class RetryQueryRunner implements QueryRunner for (int i = 0; i < config.numTries(); i++) { context.put(MISSING_SEGMENTS_KEY, Lists.newArrayList()); - yielder = baseRunner.run(query, context).toYielder(initValue, accumulator); + final Query retryQuery = query.withQuerySegmentSpec( + new MultipleSpecificSegmentSpec( + missingSegments + ) + ); + yielder = baseRunner.run(retryQuery, context).toYielder(initValue, accumulator); if (getMissingSegments(context).isEmpty()) { break; } } - final List finalMissingSegs= getMissingSegments(context); + final List finalMissingSegs = getMissingSegments(context); if (!config.returnPartialResults() && !finalMissingSegs.isEmpty()) { throw new SegmentMissingException("No results found for segments[%s]", finalMissingSegs); } From 1615e1ac63fcb21ad9561efd8f6d723c49a177ce Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 15 Sep 2014 12:43:35 -0700 Subject: [PATCH 42/46] update druid api ver --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 6d227fcbf4c..898e28a3448 100644 --- a/pom.xml +++ b/pom.xml @@ -41,7 +41,7 @@ UTF-8 0.26.6 2.6.0 - 0.2.8 + 0.2.9 From aa28bc06130a5cfb595425efb87727d59df098e0 Mon Sep 17 00:00:00 2001 From: fjy Date: Mon, 15 Sep 2014 13:14:23 -0700 Subject: [PATCH 43/46] address cr --- server/src/main/java/io/druid/client/DirectDruidClient.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 2e0e22ce48a..c354bc18259 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -343,7 +343,7 @@ public class DirectDruidClient implements QueryRunner } } catch (IOException | InterruptedException | ExecutionException e) { - throw new RE(e, "Failure getting results from[%s]. Likely a timeout occurred.", url); + throw new RE(e, "Failure getting results from[%s] because of [%s]", url, e.getMessage()); } catch (CancellationException e) { throw new QueryInterruptedException("Query cancelled"); From 0209b7e44013c785bb6a7ed3e1c4bf6c842cb918 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Mon, 15 Sep 2014 15:56:35 -0700 Subject: [PATCH 44/46] fix bridge port --- services/src/main/java/io/druid/cli/CliBridge.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/src/main/java/io/druid/cli/CliBridge.java b/services/src/main/java/io/druid/cli/CliBridge.java index 6da1ec21c38..a1f893f968e 100644 --- a/services/src/main/java/io/druid/cli/CliBridge.java +++ b/services/src/main/java/io/druid/cli/CliBridge.java @@ -71,7 +71,7 @@ public class CliBridge extends ServerRunnable public void configure(Binder binder) { binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/bridge"); - binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8081); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8089); ConfigProvider.bind(binder, BridgeCuratorConfig.class); From d9a0d403b3b4ec655d4f39b1fc28d302d38b937d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20L=C3=A9aut=C3=A9?= Date: Tue, 16 Sep 2014 14:53:54 -0700 Subject: [PATCH 45/46] default zk compressed + batch announcements in 0.7 --- server/src/main/java/io/druid/curator/CuratorConfig.java | 2 +- .../druid/server/coordination/DataSegmentAnnouncerProvider.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/io/druid/curator/CuratorConfig.java b/server/src/main/java/io/druid/curator/CuratorConfig.java index 7ac0247b342..50d23f64104 100644 --- a/server/src/main/java/io/druid/curator/CuratorConfig.java +++ b/server/src/main/java/io/druid/curator/CuratorConfig.java @@ -34,6 +34,6 @@ public abstract class CuratorConfig public abstract int getZkSessionTimeoutMs(); @Config("druid.curator.compress") - @Default("false") + @Default("true") public abstract boolean enableCompression(); } diff --git a/server/src/main/java/io/druid/server/coordination/DataSegmentAnnouncerProvider.java b/server/src/main/java/io/druid/server/coordination/DataSegmentAnnouncerProvider.java index eeed92b045a..eb52deb13bd 100644 --- a/server/src/main/java/io/druid/server/coordination/DataSegmentAnnouncerProvider.java +++ b/server/src/main/java/io/druid/server/coordination/DataSegmentAnnouncerProvider.java @@ -25,7 +25,7 @@ import com.google.inject.Provider; /** */ -@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LegacyDataSegmentAnnouncerProvider.class) +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = BatchDataSegmentAnnouncerProvider.class) @JsonSubTypes(value = { @JsonSubTypes.Type(name = "legacy", value = LegacyDataSegmentAnnouncerProvider.class), @JsonSubTypes.Type(name = "batch", value = BatchDataSegmentAnnouncerProvider.class) From b38a9bc1c1fe114b304d6e1db0d976a1be46cbcc Mon Sep 17 00:00:00 2001 From: nishantmonu51 Date: Mon, 22 Sep 2014 22:15:00 +0530 Subject: [PATCH 46/46] upgrade druid-api --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 898e28a3448..639cb99ceec 100644 --- a/pom.xml +++ b/pom.xml @@ -41,7 +41,7 @@ UTF-8 0.26.6 2.6.0 - 0.2.9 + 0.2.10