diff --git a/.gitignore b/.gitignore index c7f1290850b..df8a58abf2c 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ target *.log *.DS_Store _site +dependency-reduced-pom.xml diff --git a/.travis.yml b/.travis.yml index 81f35b2aaa7..e455942a18a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,6 @@ language: java -dist: precise +dist: trusty jdk: - oraclejdk8 @@ -19,17 +19,16 @@ matrix: # processing module test - sudo: false - install: mvn install -q -ff -DskipTests -B - script: mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing + install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B + script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing # non-processing modules test - sudo: false - install: mvn install -q -ff -DskipTests -B - script: mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing' + install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B + script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing' # run integration tests - sudo: required - dist: trusty services: - docker env: diff --git a/api/src/main/java/io/druid/data/input/Committer.java b/api/src/main/java/io/druid/data/input/Committer.java index 04dbe96707e..006bd5a6795 100644 --- a/api/src/main/java/io/druid/data/input/Committer.java +++ b/api/src/main/java/io/druid/data/input/Committer.java @@ -18,15 +18,19 @@ */ package io.druid.data.input; + +import io.druid.guice.annotations.ExtensionPoint; + /** * Committer includes a Runnable and a Jackson-serialized metadata object containing the offset */ +@ExtensionPoint public interface Committer extends Runnable { - /** - * @return A json serialized representation of commit metadata, - * which needs to be serialized and deserialized by Jackson. - * Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types - * */ - public Object getMetadata(); + /** + * @return A json serialized representation of commit metadata, + * which needs to be serialized and deserialized by Jackson. + * Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types + */ + public Object getMetadata(); } diff --git a/api/src/main/java/io/druid/data/input/Firehose.java b/api/src/main/java/io/druid/data/input/Firehose.java index 4f4c640f104..a6f403cf355 100644 --- a/api/src/main/java/io/druid/data/input/Firehose.java +++ b/api/src/main/java/io/druid/data/input/Firehose.java @@ -19,6 +19,8 @@ package io.druid.data.input; +import io.druid.guice.annotations.ExtensionPoint; + import javax.annotation.Nullable; import java.io.Closeable; @@ -36,6 +38,7 @@ import java.io.Closeable; * which will be called on another thread, so the operations inside of that callback must be thread-safe. *

*/ +@ExtensionPoint public interface Firehose extends Closeable { /** diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactory.java b/api/src/main/java/io/druid/data/input/FirehoseFactory.java index 2494c13ea71..75049ea8911 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseFactory.java +++ b/api/src/main/java/io/druid/data/input/FirehoseFactory.java @@ -22,6 +22,7 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonTypeInfo; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.PrefetchableTextFilesFirehoseFactory; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.parsers.ParseException; import java.io.File; @@ -32,6 +33,7 @@ import java.io.IOException; * It currently provides two methods for creating a {@link Firehose} and their default implementations call each other * for the backward compatibility. Implementations of this interface must implement one of these methods. */ +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") public interface FirehoseFactory { diff --git a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java index a0fc5e2468f..64a2b11d937 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java +++ b/api/src/main/java/io/druid/data/input/FirehoseFactoryV2.java @@ -20,8 +20,8 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonTypeInfo; - import io.druid.data.input.impl.InputRowParser; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.parsers.ParseException; import java.io.IOException; @@ -37,6 +37,7 @@ import java.io.IOException; * value will throw a surprising NPE. Throwing IOException on connection failure or runtime exception on * invalid configuration is preferred over returning null. */ +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") public interface FirehoseFactoryV2 { diff --git a/api/src/main/java/io/druid/data/input/FirehoseV2.java b/api/src/main/java/io/druid/data/input/FirehoseV2.java index 05f72cc8609..d22f1a59a4f 100644 --- a/api/src/main/java/io/druid/data/input/FirehoseV2.java +++ b/api/src/main/java/io/druid/data/input/FirehoseV2.java @@ -19,6 +19,8 @@ package io.druid.data.input; +import io.druid.guice.annotations.ExtensionPoint; + import java.io.Closeable; /** * This is an interface that holds onto the stream of incoming data. Realtime data ingestion is built around this @@ -44,6 +46,7 @@ import java.io.Closeable; * which will be called on another thread, so the operations inside of that callback must be thread-safe. *

*/ +@ExtensionPoint public interface FirehoseV2 extends Closeable { /** diff --git a/api/src/main/java/io/druid/data/input/InputRow.java b/api/src/main/java/io/druid/data/input/InputRow.java index 40164571bc1..b3f792711f5 100644 --- a/api/src/main/java/io/druid/data/input/InputRow.java +++ b/api/src/main/java/io/druid/data/input/InputRow.java @@ -19,6 +19,8 @@ package io.druid.data.input; +import io.druid.guice.annotations.ExtensionPoint; + import java.util.List; /** @@ -28,8 +30,8 @@ import java.util.List; * implement "schema-less" data ingestion that allows the system to add new dimensions as they appear. * */ -public interface - InputRow extends Row +@ExtensionPoint +public interface InputRow extends Row { /** * Returns the dimensions that exist in this row. diff --git a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java b/api/src/main/java/io/druid/data/input/MapBasedInputRow.java index 61fe512e2fc..d6f3647ed17 100644 --- a/api/src/main/java/io/druid/data/input/MapBasedInputRow.java +++ b/api/src/main/java/io/druid/data/input/MapBasedInputRow.java @@ -19,6 +19,8 @@ package io.druid.data.input; +import io.druid.guice.annotations.PublicApi; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import java.util.List; @@ -26,6 +28,7 @@ import java.util.Map; /** */ +@PublicApi public class MapBasedInputRow extends MapBasedRow implements InputRow { private final List dimensions; @@ -60,7 +63,7 @@ public class MapBasedInputRow extends MapBasedRow implements InputRow public String toString() { return "MapBasedInputRow{" + - "timestamp=" + new DateTime(getTimestampFromEpoch()) + + "timestamp=" + DateTimes.utc(getTimestampFromEpoch()) + ", event=" + getEvent() + ", dimensions=" + dimensions + '}'; diff --git a/api/src/main/java/io/druid/data/input/MapBasedRow.java b/api/src/main/java/io/druid/data/input/MapBasedRow.java index 0d4cbc8be8e..7a8ba07a44a 100644 --- a/api/src/main/java/io/druid/data/input/MapBasedRow.java +++ b/api/src/main/java/io/druid/data/input/MapBasedRow.java @@ -22,6 +22,8 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; +import io.druid.guice.annotations.PublicApi; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; @@ -32,6 +34,7 @@ import java.util.regex.Pattern; /** */ +@PublicApi public class MapBasedRow implements Row { private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+"); @@ -54,7 +57,7 @@ public class MapBasedRow implements Row Map event ) { - this(new DateTime(timestamp), event); + this(DateTimes.utc(timestamp), event); } @Override diff --git a/api/src/main/java/io/druid/data/input/Row.java b/api/src/main/java/io/druid/data/input/Row.java index f698c02dd68..f5462ff5bda 100644 --- a/api/src/main/java/io/druid/data/input/Row.java +++ b/api/src/main/java/io/druid/data/input/Row.java @@ -21,6 +21,7 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; +import io.druid.guice.annotations.PublicApi; import org.joda.time.DateTime; import java.util.List; @@ -29,6 +30,7 @@ import java.util.List; * A Row of data. This can be used for both input and output into various parts of the system. It assumes * that the user already knows the schema of the row and can query for the parts that they care about. */ +@PublicApi @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version", defaultImpl = MapBasedRow.class) @JsonSubTypes(value = { @JsonSubTypes.Type(name = "v1", value = MapBasedRow.class) diff --git a/api/src/main/java/io/druid/data/input/Rows.java b/api/src/main/java/io/druid/data/input/Rows.java index 05e1aeec4f9..a31d1b3a224 100644 --- a/api/src/main/java/io/druid/data/input/Rows.java +++ b/api/src/main/java/io/druid/data/input/Rows.java @@ -23,33 +23,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Maps; -import io.druid.java.util.common.ISE; - import java.util.List; import java.util.Map; import java.util.Set; -import java.util.TreeMap; /** */ public class Rows { - public static InputRow toCaseInsensitiveInputRow(final Row row, final List dimensions) - { - if (row instanceof MapBasedRow) { - MapBasedRow mapBasedRow = (MapBasedRow) row; - - TreeMap caseInsensitiveMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); - caseInsensitiveMap.putAll(mapBasedRow.getEvent()); - return new MapBasedInputRow( - mapBasedRow.getTimestamp(), - dimensions, - caseInsensitiveMap - ); - } - throw new ISE("Can only convert MapBasedRow objects because we are ghetto like that."); - } - /** * @param timeStamp rollup up timestamp to be used to create group key * @param inputRow input row diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java index fd9acb62be1..fb61f5b977b 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java @@ -26,10 +26,12 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.StringUtils; /** */ +@PublicApi @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class) @JsonSubTypes(value = { @JsonSubTypes.Type(name = DimensionSchema.STRING_TYPE_NAME, value = StringDimensionSchema.class), diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java index ecbfb978887..3d48a95abe7 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionsSpec.java @@ -28,7 +28,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; - +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.parsers.ParserUtils; import javax.annotation.Nullable; @@ -37,7 +37,7 @@ import java.util.List; import java.util.Map; import java.util.Set; - +@PublicApi public class DimensionsSpec { private final List dimensions; diff --git a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java b/api/src/main/java/io/druid/data/input/impl/InputRowParser.java index 13c7cc18edf..dcf369ca959 100644 --- a/api/src/main/java/io/druid/data/input/impl/InputRowParser.java +++ b/api/src/main/java/io/druid/data/input/impl/InputRowParser.java @@ -22,7 +22,9 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import io.druid.data.input.InputRow; +import io.druid.guice.annotations.ExtensionPoint; +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class) @JsonSubTypes(value = { @JsonSubTypes.Type(name = "string", value = StringInputRowParser.class), diff --git a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java index 17600ee18f4..177a2b39a75 100644 --- a/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/JSONLowercaseParseSpec.java @@ -22,7 +22,6 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; - import io.druid.java.util.common.parsers.JSONToLowerParser; import io.druid.java.util.common.parsers.Parser; diff --git a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java index 81ce73b94a4..d58dac4aa4a 100644 --- a/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/JSONParseSpec.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonParser.Feature; import com.fasterxml.jackson.databind.ObjectMapper; - import io.druid.java.util.common.parsers.JSONPathParser; import io.druid.java.util.common.parsers.Parser; diff --git a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java b/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java index 620f8109bd1..499f61d5bfe 100644 --- a/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/JavaScriptParseSpec.java @@ -22,7 +22,6 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; - import io.druid.java.util.common.ISE; import io.druid.java.util.common.parsers.JavaScriptParser; import io.druid.java.util.common.parsers.Parser; diff --git a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java b/api/src/main/java/io/druid/data/input/impl/ParseSpec.java index 96c06237d9b..7efc397f5f4 100644 --- a/api/src/main/java/io/druid/data/input/impl/ParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/ParseSpec.java @@ -22,13 +22,12 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; - +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.parsers.Parser; import java.util.List; -/** - */ +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format", defaultImpl = DelimitedParseSpec.class) @JsonSubTypes(value = { @JsonSubTypes.Type(name = "json", value = JSONParseSpec.class), diff --git a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java b/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java index a90978bf2b1..926328c701a 100644 --- a/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/RegexParseSpec.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Preconditions; - import io.druid.java.util.common.parsers.Parser; import io.druid.java.util.common.parsers.RegexParser; diff --git a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java index 8f83d25b714..ffb767bd7ac 100644 --- a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java @@ -22,6 +22,7 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.parsers.TimestampParser; import org.joda.time.DateTime; @@ -31,6 +32,7 @@ import java.util.Objects; /** */ +@PublicApi public class TimestampSpec { private static class ParseCtx diff --git a/api/src/main/java/io/druid/guice/Binders.java b/api/src/main/java/io/druid/guice/Binders.java index 1d6b220ffa7..30315d6fdeb 100644 --- a/api/src/main/java/io/druid/guice/Binders.java +++ b/api/src/main/java/io/druid/guice/Binders.java @@ -22,16 +22,18 @@ package io.druid.guice; import com.google.inject.Binder; import com.google.inject.Key; import com.google.inject.multibindings.MapBinder; +import io.druid.guice.annotations.PublicApi; import io.druid.segment.loading.DataSegmentArchiver; import io.druid.segment.loading.DataSegmentFinder; -import io.druid.segment.loading.DataSegmentMover; import io.druid.segment.loading.DataSegmentKiller; +import io.druid.segment.loading.DataSegmentMover; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.DataSegmentPusher; import io.druid.tasklogs.TaskLogs; /** */ +@PublicApi public class Binders { public static MapBinder dataSegmentPullerBinder(Binder binder) diff --git a/api/src/main/java/io/druid/guice/ConditionalMultibind.java b/api/src/main/java/io/druid/guice/ConditionalMultibind.java index 2846977944c..2b9ea4162c4 100644 --- a/api/src/main/java/io/druid/guice/ConditionalMultibind.java +++ b/api/src/main/java/io/druid/guice/ConditionalMultibind.java @@ -23,6 +23,7 @@ import com.google.common.base.Predicate; import com.google.inject.Binder; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; +import io.druid.guice.annotations.PublicApi; import java.lang.annotation.Annotation; import java.util.Properties; @@ -43,6 +44,7 @@ import java.util.Properties; * At injection time, you will get the items that satisfy their corresponding predicates by calling * injector.getInstance(Key.get(new TypeLiteral>(){})) */ +@PublicApi public class ConditionalMultibind { diff --git a/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java b/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java index 149f72c9be7..956abc7abcd 100644 --- a/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java +++ b/api/src/main/java/io/druid/guice/DruidGuiceExtensions.java @@ -21,9 +21,11 @@ package io.druid.guice; import com.google.inject.Binder; import com.google.inject.Module; +import io.druid.guice.annotations.PublicApi; /** */ +@PublicApi public class DruidGuiceExtensions implements Module { @Override diff --git a/api/src/main/java/io/druid/guice/DruidScopes.java b/api/src/main/java/io/druid/guice/DruidScopes.java index a837928a2a7..d7aeab313c2 100644 --- a/api/src/main/java/io/druid/guice/DruidScopes.java +++ b/api/src/main/java/io/druid/guice/DruidScopes.java @@ -23,9 +23,11 @@ import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.Scope; import com.google.inject.Scopes; +import io.druid.guice.annotations.PublicApi; /** */ +@PublicApi public class DruidScopes { public static final Scope SINGLETON = new Scope() diff --git a/api/src/main/java/io/druid/guice/Jerseys.java b/api/src/main/java/io/druid/guice/Jerseys.java index 9c0163a4fb5..51520eae749 100644 --- a/api/src/main/java/io/druid/guice/Jerseys.java +++ b/api/src/main/java/io/druid/guice/Jerseys.java @@ -23,9 +23,11 @@ import com.google.inject.Binder; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; import io.druid.guice.annotations.JSR311Resource; +import io.druid.guice.annotations.PublicApi; /** */ +@PublicApi public class Jerseys { public static void addResource(Binder binder, Class resourceClazz) diff --git a/api/src/main/java/io/druid/guice/JsonConfigProvider.java b/api/src/main/java/io/druid/guice/JsonConfigProvider.java index 4e9b5f64f98..c3a9cfd64d8 100644 --- a/api/src/main/java/io/druid/guice/JsonConfigProvider.java +++ b/api/src/main/java/io/druid/guice/JsonConfigProvider.java @@ -26,6 +26,7 @@ import com.google.inject.Inject; import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.util.Types; +import io.druid.guice.annotations.PublicApi; import java.lang.annotation.Annotation; import java.lang.reflect.ParameterizedType; @@ -76,6 +77,7 @@ import java.util.Properties; * * @param type of config object to provide. */ +@PublicApi public class JsonConfigProvider implements Provider> { @SuppressWarnings("unchecked") diff --git a/api/src/main/java/io/druid/guice/LazySingleton.java b/api/src/main/java/io/druid/guice/LazySingleton.java index 452621df812..5acf6466be6 100644 --- a/api/src/main/java/io/druid/guice/LazySingleton.java +++ b/api/src/main/java/io/druid/guice/LazySingleton.java @@ -20,6 +20,7 @@ package io.druid.guice; import com.google.inject.ScopeAnnotation; +import io.druid.guice.annotations.PublicApi; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -31,6 +32,7 @@ import java.lang.annotation.Target; @Target({ElementType.TYPE, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @ScopeAnnotation +@PublicApi public @interface LazySingleton { } diff --git a/api/src/main/java/io/druid/guice/LifecycleModule.java b/api/src/main/java/io/druid/guice/LifecycleModule.java index 2d9968cdc3c..2dc340dd84e 100644 --- a/api/src/main/java/io/druid/guice/LifecycleModule.java +++ b/api/src/main/java/io/druid/guice/LifecycleModule.java @@ -27,7 +27,6 @@ import com.google.inject.Provides; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.Multibinder; import com.google.inject.name.Names; - import io.druid.java.util.common.lifecycle.Lifecycle; import java.lang.annotation.Annotation; diff --git a/api/src/main/java/io/druid/guice/LifecycleScope.java b/api/src/main/java/io/druid/guice/LifecycleScope.java index 42a34979a3d..19914b0f0f3 100644 --- a/api/src/main/java/io/druid/guice/LifecycleScope.java +++ b/api/src/main/java/io/druid/guice/LifecycleScope.java @@ -23,7 +23,6 @@ import com.google.common.collect.Lists; import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.Scope; - import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; diff --git a/api/src/main/java/io/druid/guice/ManageLifecycle.java b/api/src/main/java/io/druid/guice/ManageLifecycle.java index e062aa3994b..53d4d8f33e9 100644 --- a/api/src/main/java/io/druid/guice/ManageLifecycle.java +++ b/api/src/main/java/io/druid/guice/ManageLifecycle.java @@ -20,6 +20,7 @@ package io.druid.guice; import com.google.inject.ScopeAnnotation; +import io.druid.guice.annotations.PublicApi; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -34,6 +35,7 @@ import java.lang.annotation.Target; @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) @ScopeAnnotation +@PublicApi public @interface ManageLifecycle { } diff --git a/api/src/main/java/io/druid/guice/ManageLifecycleLast.java b/api/src/main/java/io/druid/guice/ManageLifecycleLast.java index ff88784898a..02a7ff15d98 100644 --- a/api/src/main/java/io/druid/guice/ManageLifecycleLast.java +++ b/api/src/main/java/io/druid/guice/ManageLifecycleLast.java @@ -20,6 +20,7 @@ package io.druid.guice; import com.google.inject.ScopeAnnotation; +import io.druid.guice.annotations.PublicApi; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -34,6 +35,7 @@ import java.lang.annotation.Target; @Target({ ElementType.TYPE, ElementType.METHOD }) @Retention(RetentionPolicy.RUNTIME) @ScopeAnnotation +@PublicApi public @interface ManageLifecycleLast { } diff --git a/api/src/main/java/io/druid/guice/PolyBind.java b/api/src/main/java/io/druid/guice/PolyBind.java index fddeb98c309..fbcdaaaa4ad 100644 --- a/api/src/main/java/io/druid/guice/PolyBind.java +++ b/api/src/main/java/io/druid/guice/PolyBind.java @@ -30,6 +30,7 @@ import com.google.inject.TypeLiteral; import com.google.inject.binder.ScopedBindingBuilder; import com.google.inject.multibindings.MapBinder; import com.google.inject.util.Types; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; @@ -45,6 +46,7 @@ import java.util.Properties; * returned by the optionBinder() method. Multiple different modules can call optionBinder and all options will be * reflected at injection time as long as equivalent interface Key objects are passed into the various methods. */ +@PublicApi public class PolyBind { /** diff --git a/api/src/main/java/io/druid/guice/annotations/ExtensionPoint.java b/api/src/main/java/io/druid/guice/annotations/ExtensionPoint.java new file mode 100644 index 00000000000..9dc02e17044 --- /dev/null +++ b/api/src/main/java/io/druid/guice/annotations/ExtensionPoint.java @@ -0,0 +1,50 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Signifies that the annotated type is an extension point. Extension points are interfaces or non-final classes that + * may be subclassed in extensions in order to add functionality to Druid. Extension points may change in breaking ways + * only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Extension points + * may change at any time in non-breaking ways, however, such as by adding new default methods to an interface. + * + * All public and protected fields, methods, and constructors of annotated classes and interfaces are considered + * stable in this sense. If a class is not annotated, but an individual field, method, or constructor is + * annotated, then only that particular field, method, or constructor is considered an extension API. + * + * Extension points are all considered public APIs in the sense of {@link PublicApi}, even if not explicitly annotated + * as such. + * + * Note that there are number of injectable interfaces that are not annotated with {@code ExtensionPoint}. You may + * still extend these interfaces in extensions, but your extension may need to be recompiled even for a minor + * update of Druid. + * + * @see PublicApi + */ +@Target({ElementType.TYPE}) +@Retention(RetentionPolicy.SOURCE) +public @interface ExtensionPoint +{ +} diff --git a/api/src/main/java/io/druid/guice/annotations/Global.java b/api/src/main/java/io/druid/guice/annotations/Global.java index 25222ce4bf3..84de2013261 100644 --- a/api/src/main/java/io/druid/guice/annotations/Global.java +++ b/api/src/main/java/io/druid/guice/annotations/Global.java @@ -31,6 +31,7 @@ import java.lang.annotation.Target; @BindingAnnotation @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) +@PublicApi public @interface Global { } diff --git a/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java b/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java index 465840cc7d0..948bd576063 100644 --- a/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java +++ b/api/src/main/java/io/druid/guice/annotations/JSR311Resource.java @@ -31,6 +31,7 @@ import java.lang.annotation.Target; @BindingAnnotation @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) +@PublicApi public @interface JSR311Resource { } diff --git a/api/src/main/java/io/druid/guice/annotations/Json.java b/api/src/main/java/io/druid/guice/annotations/Json.java index 73dac864e9a..4371554977a 100644 --- a/api/src/main/java/io/druid/guice/annotations/Json.java +++ b/api/src/main/java/io/druid/guice/annotations/Json.java @@ -31,6 +31,7 @@ import java.lang.annotation.Target; @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @BindingAnnotation +@PublicApi public @interface Json { } diff --git a/api/src/main/java/io/druid/guice/annotations/PublicApi.java b/api/src/main/java/io/druid/guice/annotations/PublicApi.java new file mode 100644 index 00000000000..f398dfe81a2 --- /dev/null +++ b/api/src/main/java/io/druid/guice/annotations/PublicApi.java @@ -0,0 +1,51 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.guice.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Signifies that the annotated entity is a public API for extension authors. Public APIs may change in breaking ways + * only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Public APIs may + * change at any time in non-breaking ways, however, such as by adding new fields, methods, or constructors. + * + * Note that interfaces annotated with {@code PublicApi} but not with {@link ExtensionPoint} are not meant to be + * subclassed in extensions. In this case, the annotation simply signifies that the interface is stable for callers. + * In particular, since it is not meant to be subclassed, new non-default methods may be added to an interface and + * new abstract methods may be added to a class. + * + * If a class or interface is annotated, then all public and protected fields, methods, and constructors that class + * or interface are considered stable in this sense. If a class is not annotated, but an individual field, method, or + * constructor is annotated, then only that particular field, method, or constructor is considered a public API. + * + * Classes, fields, method, and constructors _not_ annotated with {@code @PublicApi} may be modified or removed + * in any Druid release, unless they are annotated with {@link ExtensionPoint} (which implies they are a public API + * as well). + * + * @see ExtensionPoint + */ +@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.CONSTRUCTOR}) +@Retention(RetentionPolicy.SOURCE) +public @interface PublicApi +{ +} diff --git a/api/src/main/java/io/druid/guice/annotations/Self.java b/api/src/main/java/io/druid/guice/annotations/Self.java index e6123fbe188..f5a8b348c4e 100644 --- a/api/src/main/java/io/druid/guice/annotations/Self.java +++ b/api/src/main/java/io/druid/guice/annotations/Self.java @@ -31,6 +31,7 @@ import java.lang.annotation.Target; @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @BindingAnnotation +@PublicApi public @interface Self { } diff --git a/api/src/main/java/io/druid/guice/annotations/Smile.java b/api/src/main/java/io/druid/guice/annotations/Smile.java index 136885a4f46..babfb5a68d7 100644 --- a/api/src/main/java/io/druid/guice/annotations/Smile.java +++ b/api/src/main/java/io/druid/guice/annotations/Smile.java @@ -31,6 +31,7 @@ import java.lang.annotation.Target; @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @BindingAnnotation +@PublicApi public @interface Smile { } diff --git a/api/src/main/java/io/druid/initialization/DruidModule.java b/api/src/main/java/io/druid/initialization/DruidModule.java index 9015dca45be..89bb3d85768 100644 --- a/api/src/main/java/io/druid/initialization/DruidModule.java +++ b/api/src/main/java/io/druid/initialization/DruidModule.java @@ -20,11 +20,13 @@ package io.druid.initialization; import com.fasterxml.jackson.databind.Module; +import io.druid.guice.annotations.ExtensionPoint; import java.util.List; /** */ +@ExtensionPoint public interface DruidModule extends com.google.inject.Module { public List getJacksonModules(); diff --git a/api/src/main/java/io/druid/js/JavaScriptConfig.java b/api/src/main/java/io/druid/js/JavaScriptConfig.java index 6b62431aa88..7dc6bb1b2fb 100644 --- a/api/src/main/java/io/druid/js/JavaScriptConfig.java +++ b/api/src/main/java/io/druid/js/JavaScriptConfig.java @@ -21,7 +21,13 @@ package io.druid.js; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.guice.annotations.PublicApi; +/** + * Should be used by extension filters, aggregators, etc, that use JavaScript to determine if JavaScript is enabled + * or not. + */ +@PublicApi public class JavaScriptConfig { public static final int DEFAULT_OPTIMIZATION_LEVEL = 9; diff --git a/api/src/main/java/io/druid/segment/SegmentUtils.java b/api/src/main/java/io/druid/segment/SegmentUtils.java index f1b495f9a13..448eaf0c503 100644 --- a/api/src/main/java/io/druid/segment/SegmentUtils.java +++ b/api/src/main/java/io/druid/segment/SegmentUtils.java @@ -21,6 +21,7 @@ package io.druid.segment; import com.google.common.io.Files; import com.google.common.primitives.Ints; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.IOE; import java.io.File; @@ -29,7 +30,9 @@ import java.io.IOException; import java.io.InputStream; /** + * Utility methods useful for implementing deep storage extensions. */ +@PublicApi public class SegmentUtils { public static int getVersionFromDir(File inDir) throws IOException diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java b/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java index b08365cce9e..2776bfb4aa4 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentArchiver.java @@ -19,10 +19,12 @@ package io.druid.segment.loading; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.timeline.DataSegment; import javax.annotation.Nullable; +@ExtensionPoint public interface DataSegmentArchiver { /** diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java b/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java index ef4dafbdba9..937a42e72c2 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentFinder.java @@ -19,6 +19,7 @@ package io.druid.segment.loading; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.timeline.DataSegment; import java.util.Set; @@ -27,6 +28,7 @@ import java.util.Set; * A DataSegmentFinder is responsible for finding Druid segments underneath a specified directory and optionally updates * all descriptor.json files on deep storage with correct loadSpec. */ +@ExtensionPoint public interface DataSegmentFinder { /** diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java b/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java index ba9b879587a..c26a73daeb1 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentKiller.java @@ -19,12 +19,14 @@ package io.druid.segment.loading; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.timeline.DataSegment; import java.io.IOException; /** */ +@ExtensionPoint public interface DataSegmentKiller { void kill(DataSegment segments) throws SegmentLoadingException; diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java b/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java index 81080585cdf..6b59c87e421 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentMover.java @@ -19,10 +19,12 @@ package io.druid.segment.loading; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.timeline.DataSegment; import java.util.Map; +@ExtensionPoint public interface DataSegmentMover { public DataSegment move(DataSegment segment, Map targetLoadSpec) throws SegmentLoadingException; diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java index 46f051138a5..fe2d089b0ec 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentPuller.java @@ -19,6 +19,7 @@ package io.druid.segment.loading; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.timeline.DataSegment; import java.io.File; @@ -26,6 +27,7 @@ import java.io.File; /** * A DataSegmentPuller is responsible for pulling data for a particular segment into a particular directory */ +@ExtensionPoint public interface DataSegmentPuller { /** diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java index 3076b5e7c1a..b9bf810f72c 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java @@ -20,6 +20,7 @@ package io.druid.segment.loading; import com.google.common.base.Joiner; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; @@ -30,6 +31,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +@ExtensionPoint public interface DataSegmentPusher { Joiner JOINER = Joiner.on("/").skipNulls(); diff --git a/api/src/main/java/io/druid/segment/loading/LoadSpec.java b/api/src/main/java/io/druid/segment/loading/LoadSpec.java index d1945e16085..e026ded7090 100644 --- a/api/src/main/java/io/druid/segment/loading/LoadSpec.java +++ b/api/src/main/java/io/druid/segment/loading/LoadSpec.java @@ -20,12 +20,14 @@ package io.druid.segment.loading; import com.fasterxml.jackson.annotation.JsonTypeInfo; +import io.druid.guice.annotations.ExtensionPoint; import java.io.File; /** * A means of pulling segment files into a destination directory */ +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") public interface LoadSpec { diff --git a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java b/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java index 2ac69944a74..3bd388dc9f9 100644 --- a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java +++ b/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java @@ -19,10 +19,12 @@ package io.druid.segment.loading; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.StringUtils; /** */ +@PublicApi public class SegmentLoadingException extends Exception { public SegmentLoadingException( diff --git a/api/src/main/java/io/druid/segment/loading/URIDataPuller.java b/api/src/main/java/io/druid/segment/loading/URIDataPuller.java index 9ae45a3ac42..1a5718fc54b 100644 --- a/api/src/main/java/io/druid/segment/loading/URIDataPuller.java +++ b/api/src/main/java/io/druid/segment/loading/URIDataPuller.java @@ -20,6 +20,7 @@ package io.druid.segment.loading; import com.google.common.base.Predicate; +import io.druid.guice.annotations.ExtensionPoint; import java.io.IOException; import java.io.InputStream; @@ -28,6 +29,7 @@ import java.net.URI; /** * A URIDataPuller has handlings for URI based data */ +@ExtensionPoint public interface URIDataPuller { /** diff --git a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java b/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java index 4ba760a79b8..3c413303c70 100644 --- a/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java +++ b/api/src/main/java/io/druid/tasklogs/NoopTaskLogs.java @@ -21,7 +21,6 @@ package io.druid.tasklogs; import com.google.common.base.Optional; import com.google.common.io.ByteSource; - import io.druid.java.util.common.logger.Logger; import java.io.File; diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java b/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java index f03e46ad0c4..7a63f640a8f 100644 --- a/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java +++ b/api/src/main/java/io/druid/tasklogs/TaskLogKiller.java @@ -19,10 +19,13 @@ package io.druid.tasklogs; +import io.druid.guice.annotations.ExtensionPoint; + import java.io.IOException; /** */ +@ExtensionPoint public interface TaskLogKiller { void killAll() throws IOException; diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java b/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java index 3fc16d46f98..9b30e4f2be9 100644 --- a/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java +++ b/api/src/main/java/io/druid/tasklogs/TaskLogPusher.java @@ -19,12 +19,15 @@ package io.druid.tasklogs; +import io.druid.guice.annotations.ExtensionPoint; + import java.io.File; import java.io.IOException; /** * Something that knows how to persist local task logs to some form of long-term storage. */ +@ExtensionPoint public interface TaskLogPusher { public void pushTaskLog(String taskid, File logFile) throws IOException; diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java b/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java index ccd9a99cdcb..0e60ffcf7c7 100644 --- a/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java +++ b/api/src/main/java/io/druid/tasklogs/TaskLogStreamer.java @@ -21,12 +21,14 @@ package io.druid.tasklogs; import com.google.common.base.Optional; import com.google.common.io.ByteSource; +import io.druid.guice.annotations.ExtensionPoint; import java.io.IOException; /** * Something that knows how to stream logs for tasks. */ +@ExtensionPoint public interface TaskLogStreamer { /** diff --git a/api/src/main/java/io/druid/tasklogs/TaskLogs.java b/api/src/main/java/io/druid/tasklogs/TaskLogs.java index db76b924e9e..383c3559ae1 100644 --- a/api/src/main/java/io/druid/tasklogs/TaskLogs.java +++ b/api/src/main/java/io/druid/tasklogs/TaskLogs.java @@ -19,6 +19,9 @@ package io.druid.tasklogs; +import io.druid.guice.annotations.ExtensionPoint; + +@ExtensionPoint public interface TaskLogs extends TaskLogStreamer, TaskLogPusher, TaskLogKiller { } diff --git a/api/src/main/java/io/druid/timeline/DataSegment.java b/api/src/main/java/io/druid/timeline/DataSegment.java index 74322c8c3c6..0b4b2f9fe5c 100644 --- a/api/src/main/java/io/druid/timeline/DataSegment.java +++ b/api/src/main/java/io/druid/timeline/DataSegment.java @@ -31,6 +31,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Interner; import com.google.common.collect.Interners; import com.google.common.collect.Iterables; +import io.druid.guice.annotations.PublicApi; import io.druid.jackson.CommaListJoinDeserializer; import io.druid.jackson.CommaListJoinSerializer; import io.druid.java.util.common.granularity.Granularities; @@ -46,6 +47,7 @@ import java.util.Map; /** */ +@PublicApi public class DataSegment implements Comparable { public static String delimiter = "_"; diff --git a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java index 4d8690e9576..349bbceda03 100644 --- a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java +++ b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java @@ -20,6 +20,7 @@ package io.druid.timeline; import com.google.common.base.Function; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -33,6 +34,7 @@ import java.util.Objects; /** * identifier to DataSegment. */ +@PublicApi public class DataSegmentUtils { private static final Logger LOGGER = new Logger(DataSegmentUtils.class); @@ -91,7 +93,7 @@ public class DataSegmentUtils return new SegmentIdentifierParts( dataSource, - new Interval(start.getMillis(), end.getMillis()), + new Interval(start, end), version, trail ); diff --git a/api/src/main/java/io/druid/timeline/partition/ShardSpec.java b/api/src/main/java/io/druid/timeline/partition/ShardSpec.java index b76a01941cf..466c9b14582 100644 --- a/api/src/main/java/io/druid/timeline/partition/ShardSpec.java +++ b/api/src/main/java/io/druid/timeline/partition/ShardSpec.java @@ -28,7 +28,8 @@ import java.util.List; import java.util.Map; /** - * A Marker interface that exists to combine ShardSpec objects together for Jackson + * A Marker interface that exists to combine ShardSpec objects together for Jackson. Note that this is not an + * extension API. Extensions are not expected to create new kinds of ShardSpecs. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes({ diff --git a/api/src/main/java/io/druid/utils/CompressionUtils.java b/api/src/main/java/io/druid/utils/CompressionUtils.java index 3d628dce1cf..6a551e319e0 100644 --- a/api/src/main/java/io/druid/utils/CompressionUtils.java +++ b/api/src/main/java/io/druid/utils/CompressionUtils.java @@ -20,15 +20,17 @@ package io.druid.utils; +import io.druid.guice.annotations.PublicApi; +import io.druid.java.util.common.logger.Logger; + import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import io.druid.java.util.common.logger.Logger; - /** */ +@PublicApi public class CompressionUtils { private static final Logger log = new Logger(CompressionUtils.class); diff --git a/api/src/main/java/io/druid/utils/Runnables.java b/api/src/main/java/io/druid/utils/Runnables.java index d6cec40db15..883aae65b8c 100644 --- a/api/src/main/java/io/druid/utils/Runnables.java +++ b/api/src/main/java/io/druid/utils/Runnables.java @@ -19,8 +19,11 @@ package io.druid.utils; +import io.druid.guice.annotations.PublicApi; + /** */ +@PublicApi public class Runnables { public static Runnable getNoopRunnable() diff --git a/api/src/test/java/io/druid/TestObjectMapper.java b/api/src/test/java/io/druid/TestObjectMapper.java index 740176fb0c5..8ce6fedf843 100644 --- a/api/src/test/java/io/druid/TestObjectMapper.java +++ b/api/src/test/java/io/druid/TestObjectMapper.java @@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.databind.deser.std.StdDeserializer; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import java.io.IOException; @@ -61,7 +62,7 @@ public class TestObjectMapper extends ObjectMapper JsonParser jsonParser, DeserializationContext deserializationContext ) throws IOException, JsonProcessingException { - return new Interval(jsonParser.getText()); + return Intervals.of(jsonParser.getText()); } } ); diff --git a/api/src/test/java/io/druid/data/input/MapBasedRowTest.java b/api/src/test/java/io/druid/data/input/MapBasedRowTest.java index 8d98e11ddcf..05241f50e05 100644 --- a/api/src/test/java/io/druid/data/input/MapBasedRowTest.java +++ b/api/src/test/java/io/druid/data/input/MapBasedRowTest.java @@ -19,19 +19,18 @@ package io.druid.data.input; -import org.joda.time.DateTime; +import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; -import com.google.common.collect.ImmutableMap; - public class MapBasedRowTest { @Test public void testGetLongMetricFromString() { MapBasedRow row = new MapBasedRow( - new DateTime(), + DateTimes.nowUtc(), ImmutableMap.builder() .put("k0", "-1.2") .put("k1", "1.23") diff --git a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java index 13164db3f9a..144c14418d7 100644 --- a/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java +++ b/api/src/test/java/io/druid/data/input/impl/InputRowParserSerdeTest.java @@ -27,8 +27,8 @@ import com.google.common.collect.Lists; import io.druid.TestObjectMapper; import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -63,7 +63,7 @@ public class InputRowParserSerdeTest Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions()); Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); - Assert.assertEquals(new DateTime("2000").getMillis(), parsed.getTimestampFromEpoch()); + Assert.assertEquals(DateTimes.of("2000").getMillis(), parsed.getTimestampFromEpoch()); } @Test @@ -79,7 +79,7 @@ public class InputRowParserSerdeTest Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions()); Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo")); Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar")); - Assert.assertEquals(new DateTime("3000").getMillis(), parsed.getTimestampFromEpoch()); + Assert.assertEquals(DateTimes.of("3000").getMillis(), parsed.getTimestampFromEpoch()); } } @@ -219,7 +219,7 @@ public class InputRowParserSerdeTest Assert.assertEquals(ImmutableList.of("asdf"), parsed.getDimension("hey0barx")); Assert.assertEquals(ImmutableList.of("456"), parsed.getDimension("metA")); Assert.assertEquals(ImmutableList.of("5"), parsed.getDimension("newmet")); - Assert.assertEquals(new DateTime("2999").getMillis(), parsed.getTimestampFromEpoch()); + Assert.assertEquals(DateTimes.of("2999").getMillis(), parsed.getTimestampFromEpoch()); String testSpec = "{\"enabled\": true,\"useFieldDiscovery\": true, \"fields\": [\"parseThisRootField\"]}"; final JSONPathSpec parsedSpec = jsonMapper.readValue(testSpec, JSONPathSpec.class); diff --git a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java index b2e6f4681ad..d8fc9fde77e 100644 --- a/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/JSONLowercaseParseSpecTest.java @@ -20,7 +20,6 @@ package io.druid.data.input.impl; import com.google.common.collect.Lists; - import io.druid.java.util.common.parsers.Parser; import junit.framework.Assert; import org.junit.Test; diff --git a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java index a1e95c028ad..daf58c648af 100644 --- a/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/ParseSpecTest.java @@ -20,9 +20,7 @@ package io.druid.data.input.impl; import com.google.common.collect.Lists; - import io.druid.java.util.common.parsers.ParseException; - import org.junit.Test; import java.util.Arrays; diff --git a/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java b/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java index 1af0ff4968c..d41164effc1 100644 --- a/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java +++ b/api/src/test/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactoryTest.java @@ -24,10 +24,10 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import io.druid.data.input.Firehose; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -124,7 +124,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest for (int i = 0; i < 10; i++) { for (int j = 0; j < 100; j++) { final Row row = rows.get(i * 100 + j); - Assert.assertEquals(new DateTime(20171220 + i), row.getTimestamp()); + Assert.assertEquals(DateTimes.utc(20171220 + i), row.getTimestamp()); Assert.assertEquals(String.valueOf(i), row.getDimension("a").get(0)); Assert.assertEquals(String.valueOf(j), row.getDimension("b").get(0)); } diff --git a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java index 0357ae9ada1..52667a098b5 100644 --- a/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/TimestampSpecTest.java @@ -20,6 +20,7 @@ package io.druid.data.input.impl; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.format.ISODateTimeFormat; import org.junit.Assert; @@ -32,7 +33,7 @@ public class TimestampSpecTest { TimestampSpec spec = new TimestampSpec("TIMEstamp", "yyyy-MM-dd", null); Assert.assertEquals( - new DateTime("2014-03-01"), + DateTimes.of("2014-03-01"), spec.extractTimestamp(ImmutableMap.of("TIMEstamp", "2014-03-01")) ); } @@ -40,9 +41,9 @@ public class TimestampSpecTest @Test public void testExtractTimestampWithMissingTimestampColumn() throws Exception { - TimestampSpec spec = new TimestampSpec(null, null, new DateTime(0)); + TimestampSpec spec = new TimestampSpec(null, null, DateTimes.EPOCH); Assert.assertEquals( - new DateTime("1970-01-01"), + DateTimes.of("1970-01-01"), spec.extractTimestamp(ImmutableMap.of("dim", "foo")) ); } diff --git a/api/src/test/java/io/druid/timeline/DataSegmentTest.java b/api/src/test/java/io/druid/timeline/DataSegmentTest.java index 5488e97d965..b5c60d717bd 100644 --- a/api/src/test/java/io/druid/timeline/DataSegmentTest.java +++ b/api/src/test/java/io/druid/timeline/DataSegmentTest.java @@ -19,7 +19,6 @@ package io.druid.timeline; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,11 +27,13 @@ import com.google.common.collect.Range; import com.google.common.collect.Sets; import io.druid.TestObjectMapper; import io.druid.data.input.InputRow; +import io.druid.java.util.common.jackson.JacksonUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.ShardSpecLookup; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -90,7 +91,7 @@ public class DataSegmentTest public void testV1Serialization() throws Exception { - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -107,9 +108,7 @@ public class DataSegmentTest final Map objectMap = mapper.readValue( mapper.writeValueAsString(segment), - new TypeReference>() - { - } + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); Assert.assertEquals(10, objectMap.size()); @@ -150,8 +149,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(NoneShardSpec.instance()) .build(); @@ -166,8 +165,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(getShardSpec(0)) .build(); @@ -182,8 +181,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(getShardSpec(7)) .build(); @@ -198,8 +197,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .build(); final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); @@ -240,7 +239,7 @@ public class DataSegmentTest { return DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .size(1) .build(); diff --git a/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java b/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java index 5ae9d1dae69..a58fe0b6f6c 100644 --- a/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java +++ b/api/src/test/java/io/druid/timeline/DataSegmentUtilsTest.java @@ -19,8 +19,8 @@ package io.druid.timeline; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegmentUtils.SegmentIdentifierParts; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -32,19 +32,19 @@ public class DataSegmentUtilsTest public void testBasic() { String datasource = "datasource"; - SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0"); + SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0"); Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", null); + desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null); Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); } @@ -53,19 +53,19 @@ public class DataSegmentUtilsTest public void testDataSourceWithUnderscore1() { String datasource = "datasource_1"; - SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0"); + SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0"); Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", null); + desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null); Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString()); Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString())); } @@ -74,28 +74,28 @@ public class DataSegmentUtilsTest public void testDataSourceWithUnderscore2() { String dataSource = "datasource_2015-01-01T00:00:00.000Z"; - SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0"); + SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0"); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString() ); Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString() ); Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString())); - desc = new SegmentIdentifierParts(dataSource, new Interval("2015-01-02/2015-01-03"), "ver", null); + desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", null); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString() ); Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString())); - desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D")); + desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D")); Assert.assertEquals( "datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString() diff --git a/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java index a8ce3d2837b..7954ac782b1 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/ExpressionBenchmark.java @@ -24,6 +24,7 @@ import com.google.common.collect.Iterables; import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.SegmentGenerator; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -41,7 +42,6 @@ import io.druid.segment.VirtualColumns; import io.druid.segment.column.ValueType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -89,7 +89,7 @@ public class ExpressionBenchmark BenchmarkColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false) ), ImmutableList.of(), - new Interval("2000/P1D"), + Intervals.of("2000/P1D"), false ); @@ -176,7 +176,7 @@ public class ExpressionBenchmark Sequences.map( cursors, cursor -> { - final BufferAggregator bufferAggregator = aggregatorFactory.apply(cursor); + final BufferAggregator bufferAggregator = aggregatorFactory.apply(cursor.getColumnSelectorFactory()); bufferAggregator.init(aggregationBuffer, 0); while (!cursor.isDone()) { diff --git a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java index cf547d4c833..0ec205463d3 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FilterPartitionBenchmark.java @@ -514,7 +514,9 @@ public class FilterPartitionBenchmark { List strings = new ArrayList(); List selectors = new ArrayList<>(); - selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null))); + selectors.add( + input.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null)) + ); //selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimB", null))); while (!input.isDone()) { for (DimensionSelector selector : selectors) { @@ -540,7 +542,7 @@ public class FilterPartitionBenchmark public List apply(Cursor input) { List longvals = new ArrayList(); - LongColumnSelector selector = input.makeLongColumnSelector("sumLongSequential"); + LongColumnSelector selector = input.getColumnSelectorFactory().makeLongColumnSelector("sumLongSequential"); while (!input.isDone()) { long rowval = selector.getLong(); blackhole.consume(rowval); diff --git a/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java index f18cf2c084a..ca700b87e64 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/TimeParseBenchmark.java @@ -39,6 +39,7 @@ import org.openjdk.jmh.runner.options.OptionsBuilder; import java.text.SimpleDateFormat; import java.util.Date; +import java.util.Locale; import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) @@ -60,7 +61,7 @@ public class TimeParseBenchmark @Setup public void setup() { - SimpleDateFormat format = new SimpleDateFormat(DATA_FORMAT); + SimpleDateFormat format = new SimpleDateFormat(DATA_FORMAT, Locale.ENGLISH); long start = System.currentTimeMillis(); int rowsPerBatch = numRows / numBatches; int numRowInBatch = 0; diff --git a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java index f8b5da8dcc9..3f9a1dc9ab2 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java +++ b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkSchemas.java @@ -20,6 +20,7 @@ package io.druid.benchmark.datagen; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; @@ -84,7 +85,7 @@ public class BenchmarkSchemas basicSchemaIngestAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "metFloatZipf")); basicSchemaIngestAggs.add(new HyperUniquesAggregatorFactory("hyper", "dimHyperUnique")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, @@ -104,7 +105,7 @@ public class BenchmarkSchemas List basicSchemaIngestAggs = new ArrayList<>(); basicSchemaIngestAggs.add(new CountAggregatorFactory("rows")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, @@ -125,7 +126,7 @@ public class BenchmarkSchemas basicSchemaIngestAggs.add(new LongSumAggregatorFactory("dimSequential", "dimSequential")); basicSchemaIngestAggs.add(new CountAggregatorFactory("rows")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, @@ -146,7 +147,7 @@ public class BenchmarkSchemas basicSchemaIngestAggs.add(new DoubleSumAggregatorFactory("dimSequential", "dimSequential")); basicSchemaIngestAggs.add(new CountAggregatorFactory("rows")); - Interval basicSchemaDataInterval = new Interval(0, 1000000); + Interval basicSchemaDataInterval = Intervals.utc(0, 1000000); BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo( basicSchemaColumns, diff --git a/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java index 57adb66c88a..b61a46474de 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/indexing/IncrementalIndexReadBenchmark.java @@ -144,10 +144,10 @@ public class IncrementalIndexReadBenchmark Cursor cursor = Sequences.toList(Sequences.limit(cursors, 1), Lists.newArrayList()).get(0); List selectors = new ArrayList<>(); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null))); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimZipf", null))); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimUniform", null))); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequentialHalfNull", null))); + selectors.add(makeDimensionSelector(cursor, "dimSequential")); + selectors.add(makeDimensionSelector(cursor, "dimZipf")); + selectors.add(makeDimensionSelector(cursor, "dimUniform")); + selectors.add(makeDimensionSelector(cursor, "dimSequentialHalfNull")); cursor.reset(); while (!cursor.isDone()) { @@ -179,10 +179,10 @@ public class IncrementalIndexReadBenchmark Cursor cursor = Sequences.toList(Sequences.limit(cursors, 1), Lists.newArrayList()).get(0); List selectors = new ArrayList<>(); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null))); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimZipf", null))); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimUniform", null))); - selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequentialHalfNull", null))); + selectors.add(makeDimensionSelector(cursor, "dimSequential")); + selectors.add(makeDimensionSelector(cursor, "dimZipf")); + selectors.add(makeDimensionSelector(cursor, "dimUniform")); + selectors.add(makeDimensionSelector(cursor, "dimSequentialHalfNull")); cursor.reset(); while (!cursor.isDone()) { @@ -205,4 +205,9 @@ public class IncrementalIndexReadBenchmark null ); } + + private static DimensionSelector makeDimensionSelector(Cursor cursor, String name) + { + return cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec(name, null)); + } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java index a54cdf3d44b..a4321afaa58 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/SqlBenchmark.java @@ -25,8 +25,8 @@ import com.google.common.io.Files; import io.druid.benchmark.datagen.BenchmarkSchemaInfo; import io.druid.benchmark.datagen.BenchmarkSchemas; import io.druid.benchmark.datagen.SegmentGenerator; -import io.druid.common.utils.JodaUtils; import io.druid.data.input.Row; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -48,7 +48,6 @@ import io.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -119,12 +118,13 @@ public class SqlBenchmark CalciteTests.createMockQueryLifecycleFactory(walker), CalciteTests.createOperatorTable(), CalciteTests.createExprMacroTable(), - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ); groupByQuery = GroupByQuery .builder() .setDataSource("foo") - .setInterval(new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT)) + .setInterval(Intervals.ETERNITY) .setDimensions( Arrays.asList( new DefaultDimensionSpec("dimZipf", "d0"), diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java index 028334c5997..d428352738e 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/TimeseriesBenchmark.java @@ -30,6 +30,7 @@ import io.druid.concurrent.Execs; import io.druid.data.input.InputRow; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -72,7 +73,6 @@ import io.druid.segment.column.ColumnConfig; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.serde.ComplexMetrics; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -214,7 +214,7 @@ public class TimeseriesBenchmark basicQueries.put("timeFilterAlphanumeric", timeFilterQuery); } { - QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(200000, 300000))); + QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(200000, 300000))); List queryAggs = new ArrayList<>(); LongSumAggregatorFactory lsaf = new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential"); queryAggs.add(lsaf); diff --git a/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index ef4eca1ca57..4f397ec882c 100644 --- a/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/benchmarks/src/main/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -19,6 +19,7 @@ package io.druid.server.coordinator; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -39,7 +40,7 @@ import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) public class CostBalancerStrategyBenchmark { - private final static DateTime t0 = new DateTime("2016-01-01T01:00:00Z"); + private final static DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z"); private List segments; private DataSegment segment; diff --git a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java index dd6f271f066..593dd76a76b 100644 --- a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java +++ b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java @@ -22,8 +22,8 @@ package io.druid.benchmark; import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkDataGenerator; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Intervals; import io.druid.segment.column.ValueType; -import org.joda.time.Interval; import org.junit.Test; import java.util.ArrayList; @@ -378,13 +378,13 @@ public class BenchmarkDataGeneratorTest ) ); - BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 600000), 100); + BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, Intervals.utc(50000, 600000), 100); for (int i = 0; i < 100; i++) { InputRow row = dataGenerator.nextRow(); //System.out.println("S-ROW: " + row); } - BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 50001), 100); + BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, Intervals.utc(50000, 50001), 100); for (int i = 0; i < 100; i++) { InputRow row = dataGenerator2.nextRow(); //System.out.println("S2-ROW: " + row); diff --git a/codestyle/joda-time-forbidden-apis.txt b/codestyle/joda-time-forbidden-apis.txt new file mode 100644 index 00000000000..76b1fe59f42 --- /dev/null +++ b/codestyle/joda-time-forbidden-apis.txt @@ -0,0 +1,46 @@ +@defaultMessage Uses default time zone +org.joda.time.DateTime#() +org.joda.time.DateTime#(long) +org.joda.time.DateTime#(java.lang.Object) +org.joda.time.DateTime#(int, int, int, int, int) +org.joda.time.DateTime#(int, int, int, int, int, int) +org.joda.time.DateTime#(int, int, int, int, int, int, int) +org.joda.time.DateTime#now() +org.joda.time.Instant#toDateTime() +org.joda.time.Instant#toMutableDateTime() +org.joda.time.Instant#toMutableDateTimeISO() +org.joda.time.base.AbstractInstant#toDateTimeISO() +org.joda.time.base.AbstractInstant#toDateTime() +org.joda.time.base.AbstractInstant#toMutableDateTime() +org.joda.time.base.AbstractInstant#toMutableDateTimeISO() +org.joda.time.LocalDateTime#() +org.joda.time.LocalDateTime#(long) +org.joda.time.LocalDateTime#(java.lang.Object) +org.joda.time.LocalDateTime#now() +org.joda.time.LocalDateTime#fromDateFields(java.util.Date) +org.joda.time.LocalDateTime#toDate() +org.joda.time.LocalDateTime#toDateTime() +org.joda.time.LocalDate#() +org.joda.time.LocalDate#(long) +org.joda.time.LocalDate#(java.lang.Object) +org.joda.time.LocalDate#fromDateFields(java.util.Date) +org.joda.time.LocalDate#now() +org.joda.time.LocalDate#toDate() +org.joda.time.LocalDate#toDateTime(org.joda.time.LocalTime) +org.joda.time.LocalDate#toDateTimeAtCurrentTime() +org.joda.time.LocalDate#toDateTimeAtStartOfDay() +org.joda.time.LocalDate#toInterval() +org.joda.time.LocalTime#() +org.joda.time.LocalTime#(long) +org.joda.time.LocalTime#(java.lang.Object) +org.joda.time.LocalTime#fromDateFields(java.util.Date) +org.joda.time.LocalTime#now() +org.joda.time.LocalTime#toDateTimeToday() +org.joda.time.Interval#(long, long) +org.joda.time.Interval#(java.lang.Object) +org.joda.time.Interval#parse(java.lang.String) +org.joda.time.Interval#parseWithOffset(java.lang.String) + +@defaultMessage Doesn't handle edge cases where the start of day isn't midnight. +org.joda.time.LocalDate#toDateTimeAtMidnight() +org.joda.time.DateMidnight \ No newline at end of file diff --git a/common/src/main/java/io/druid/audit/AuditEntry.java b/common/src/main/java/io/druid/audit/AuditEntry.java index c1c3bcaa114..4b1d5bacb3f 100644 --- a/common/src/main/java/io/druid/audit/AuditEntry.java +++ b/common/src/main/java/io/druid/audit/AuditEntry.java @@ -22,6 +22,7 @@ package io.druid.audit; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; /** @@ -50,7 +51,7 @@ public class AuditEntry this.key = key; this.type = type; this.auditInfo = authorInfo; - this.auditTime = auditTime == null ? DateTime.now() : auditTime; + this.auditTime = auditTime == null ? DateTimes.nowUtc() : auditTime; this.payload = payload; } @@ -151,7 +152,7 @@ public class AuditEntry this.key = null; this.auditInfo = null; this.payload = null; - this.auditTime = DateTime.now(); + this.auditTime = DateTimes.nowUtc(); } public Builder key(String key) diff --git a/common/src/main/java/io/druid/math/expr/Function.java b/common/src/main/java/io/druid/math/expr/Function.java index df226372080..b3e6ac87e04 100644 --- a/common/src/main/java/io/druid/math/expr/Function.java +++ b/common/src/main/java/io/druid/math/expr/Function.java @@ -30,7 +30,9 @@ import org.joda.time.format.ISODateTimeFormat; import java.util.List; /** + * Do NOT remove "unused" members in this class. They are used by generated Antlr */ +@SuppressWarnings("unused") interface Function { String name(); @@ -1024,7 +1026,7 @@ interface Function } final String arg = args.get(0).eval(bindings).asString(); - return ExprEval.of(Strings.nullToEmpty(arg).toLowerCase()); + return ExprEval.of(StringUtils.toLowerCase(Strings.nullToEmpty(arg))); } } @@ -1044,7 +1046,7 @@ interface Function } final String arg = args.get(0).eval(bindings).asString(); - return ExprEval.of(Strings.nullToEmpty(arg).toUpperCase()); + return ExprEval.of(StringUtils.toUpperCase(Strings.nullToEmpty(arg))); } } } diff --git a/common/src/main/java/io/druid/metadata/PasswordProvider.java b/common/src/main/java/io/druid/metadata/PasswordProvider.java index ec8855aeb21..5d2928cb054 100644 --- a/common/src/main/java/io/druid/metadata/PasswordProvider.java +++ b/common/src/main/java/io/druid/metadata/PasswordProvider.java @@ -21,11 +21,13 @@ package io.druid.metadata; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; +import io.druid.guice.annotations.ExtensionPoint; /** * Implement this for different ways to (optionally securely) access secrets. */ +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DefaultPasswordProvider.class) @JsonSubTypes(value = { @JsonSubTypes.Type(name = "default", value = DefaultPasswordProvider.class), diff --git a/common/src/main/java/io/druid/timeline/LogicalSegment.java b/common/src/main/java/io/druid/timeline/LogicalSegment.java index c053b7b655c..f5e5c4f9f35 100644 --- a/common/src/main/java/io/druid/timeline/LogicalSegment.java +++ b/common/src/main/java/io/druid/timeline/LogicalSegment.java @@ -19,8 +19,10 @@ package io.druid.timeline; +import io.druid.guice.annotations.PublicApi; import org.joda.time.Interval; +@PublicApi public interface LogicalSegment { public Interval getInterval(); diff --git a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java index 831438c22af..aa5a7e81e93 100644 --- a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java +++ b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java @@ -25,7 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.UOE; import io.druid.java.util.common.guava.Comparators; import io.druid.timeline.partition.ImmutablePartitionHolder; @@ -288,7 +288,7 @@ public class VersionedIntervalTimeline implements Timel } Interval lower = completePartitionsTimeline.floorKey( - new Interval(interval.getStartMillis(), JodaUtils.MAX_INSTANT) + new Interval(interval.getStart(), DateTimes.MAX) ); if (lower == null || !lower.overlaps(interval)) { diff --git a/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java b/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java index 5c4a7646b34..df5c3791afa 100644 --- a/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/JodaUtilsTest.java @@ -19,6 +19,8 @@ package io.druid.common.utils; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import org.joda.time.Duration; import org.joda.time.Interval; import org.joda.time.Period; @@ -37,18 +39,18 @@ public class JodaUtilsTest public void testUmbrellaIntervalsSimple() throws Exception { List intervals = Arrays.asList( - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-01-01/2011-01-02"), - new Interval("2011-02-01/2011-02-05"), - new Interval("2011-02-03/2011-02-08"), - new Interval("2011-01-01/2011-01-03"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-05/2011-03-06"), - new Interval("2011-02-01/2011-02-02") + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-01-01/2011-01-02"), + Intervals.of("2011-02-01/2011-02-05"), + Intervals.of("2011-02-03/2011-02-08"), + Intervals.of("2011-01-01/2011-01-03"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-05/2011-03-06"), + Intervals.of("2011-02-01/2011-02-02") ); Assert.assertEquals( - new Interval("2011-01-01/2011-03-06"), + Intervals.of("2011-01-01/2011-03-06"), JodaUtils.umbrellaInterval(intervals) ); } @@ -71,23 +73,23 @@ public class JodaUtilsTest public void testCondenseIntervalsSimple() throws Exception { List intervals = Arrays.asList( - new Interval("2011-01-01/2011-01-02"), - new Interval("2011-01-02/2011-01-03"), - new Interval("2011-02-01/2011-02-05"), - new Interval("2011-02-01/2011-02-02"), - new Interval("2011-02-03/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06") + Intervals.of("2011-01-01/2011-01-02"), + Intervals.of("2011-01-02/2011-01-03"), + Intervals.of("2011-02-01/2011-02-05"), + Intervals.of("2011-02-01/2011-02-02"), + Intervals.of("2011-02-03/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06") ); Assert.assertEquals( Arrays.asList( - new Interval("2011-01-01/2011-01-03"), - new Interval("2011-02-01/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06") + Intervals.of("2011-01-01/2011-01-03"), + Intervals.of("2011-02-01/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06") ), JodaUtils.condenseIntervals(intervals) ); @@ -97,31 +99,31 @@ public class JodaUtilsTest public void testCondenseIntervalsMixedUp() throws Exception { List intervals = Arrays.asList( - new Interval("2011-01-01/2011-01-02"), - new Interval("2011-01-02/2011-01-03"), - new Interval("2011-02-01/2011-02-05"), - new Interval("2011-02-01/2011-02-02"), - new Interval("2011-02-03/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06"), - new Interval("2011-04-01/2011-04-05"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-05-01/2011-05-05"), - new Interval("2011-05-02/2011-05-07") + Intervals.of("2011-01-01/2011-01-02"), + Intervals.of("2011-01-02/2011-01-03"), + Intervals.of("2011-02-01/2011-02-05"), + Intervals.of("2011-02-01/2011-02-02"), + Intervals.of("2011-02-03/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06"), + Intervals.of("2011-04-01/2011-04-05"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-05-01/2011-05-05"), + Intervals.of("2011-05-02/2011-05-07") ); for (int i = 0; i < 20; ++i) { Collections.shuffle(intervals); Assert.assertEquals( Arrays.asList( - new Interval("2011-01-01/2011-01-03"), - new Interval("2011-02-01/2011-02-08"), - new Interval("2011-03-01/2011-03-02"), - new Interval("2011-03-03/2011-03-04"), - new Interval("2011-03-05/2011-03-06"), - new Interval("2011-04-01/2011-04-05"), - new Interval("2011-05-01/2011-05-07") + Intervals.of("2011-01-01/2011-01-03"), + Intervals.of("2011-02-01/2011-02-08"), + Intervals.of("2011-03-01/2011-03-02"), + Intervals.of("2011-03-03/2011-03-04"), + Intervals.of("2011-03-05/2011-03-06"), + Intervals.of("2011-04-01/2011-04-05"), + Intervals.of("2011-05-01/2011-05-07") ), JodaUtils.condenseIntervals(intervals) ); @@ -131,15 +133,13 @@ public class JodaUtilsTest @Test public void testMinMaxInterval() { - final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); - Assert.assertEquals(Long.MAX_VALUE, interval.toDuration().getMillis()); + Assert.assertEquals(Long.MAX_VALUE, Intervals.ETERNITY.toDuration().getMillis()); } @Test public void testMinMaxDuration() { - final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); - final Duration duration = interval.toDuration(); + final Duration duration = Intervals.ETERNITY.toDuration(); Assert.assertEquals(Long.MAX_VALUE, duration.getMillis()); Assert.assertEquals("PT9223372036854775.807S", duration.toString()); } @@ -148,8 +148,7 @@ public class JodaUtilsTest @Test(expected = ArithmeticException.class) public void testMinMaxPeriod() { - final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); - final Period period = interval.toDuration().toPeriod(); + final Period period = Intervals.ETERNITY.toDuration().toPeriod(); Assert.assertEquals(Long.MAX_VALUE, period.getMinutes()); } diff --git a/common/src/test/java/io/druid/concurrent/ExecsTest.java b/common/src/test/java/io/druid/concurrent/ExecsTest.java index fdb75260737..714d3516366 100644 --- a/common/src/test/java/io/druid/concurrent/ExecsTest.java +++ b/common/src/test/java/io/druid/concurrent/ExecsTest.java @@ -76,7 +76,7 @@ public class ExecsTest { for (int i = 0; i < nTasks; i++) { final int taskID = i; - System.out.println("Produced task" + taskID); + log.info("Produced task %d", taskID); blockingExecutor.submit( new Runnable() { diff --git a/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java b/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java index d981aeb081e..e1d9f8adbbf 100644 --- a/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java +++ b/common/src/test/java/io/druid/concurrent/LifecycleLockTest.java @@ -57,7 +57,7 @@ public class LifecycleLockTest finishLatch.countDown(); } catch (InterruptedException e) { - e.printStackTrace(); + throw new RuntimeException(e); } } }.start(); @@ -99,7 +99,7 @@ public class LifecycleLockTest finishLatch.countDown(); } catch (InterruptedException e) { - e.printStackTrace(); + throw new RuntimeException(e); } } }.start(); diff --git a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java index 6f955fa2933..0eb158798f9 100644 --- a/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java +++ b/common/src/test/java/io/druid/timeline/VersionedIntervalTimelineTest.java @@ -25,8 +25,8 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; - -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.timeline.partition.ImmutablePartitionHolder; import io.druid.timeline.partition.IntegerPartitionChunk; @@ -86,7 +86,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-02/2011-04-06", "2", 1), createExpected("2011-04-06/2011-04-09", "3", 4) ), - timeline.lookup(new Interval("2011-04-01/2011-04-09")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); } @@ -95,7 +95,7 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(1), - timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1)) ); assertValues( Arrays.asList( @@ -104,7 +104,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-03/2011-04-06", "1", 3), createExpected("2011-04-06/2011-04-09", "3", 4) ), - timeline.lookup(new Interval("2011-04-01/2011-04-09")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); } @@ -113,11 +113,11 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(1), - timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1)) ); Assert.assertEquals( makeSingle(2), - timeline.remove(new Interval("2011-04-01/2011-04-03"), "1", makeSingle(2)) + timeline.remove(Intervals.of("2011-04-01/2011-04-03"), "1", makeSingle(2)) ); assertValues( Arrays.asList( @@ -125,7 +125,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-03/2011-04-06", "1", 3), createExpected("2011-04-06/2011-04-09", "3", 4) ), - timeline.lookup(new Interval("2011-04-01/2011-04-09")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); } @@ -134,7 +134,7 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(1), - timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1)) ); assertValues( Arrays.asList( @@ -142,7 +142,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-02/2011-04-03", "1", 2), createExpected("2011-04-03/2011-04-05", "1", 3) ), - timeline.lookup(new Interval("2011-04-01/2011-04-05")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-05")) ); assertValues( @@ -150,7 +150,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-04-02T18/2011-04-03", "1", 2), createExpected("2011-04-03/2011-04-04T01", "1", 3) ), - timeline.lookup(new Interval("2011-04-02T18/2011-04-04T01")) + timeline.lookup(Intervals.of("2011-04-02T18/2011-04-04T01")) ); } @@ -161,21 +161,21 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-05-01/2011-05-09", "4", 9) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @Test public void testMay2() throws Exception { - Assert.assertNotNull(timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(1))); + Assert.assertNotNull(timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(1))); assertValues( Arrays.asList( createExpected("2011-05-01/2011-05-03", "2", 7), createExpected("2011-05-03/2011-05-04", "3", 8), createExpected("2011-05-04/2011-05-05", "2", 7) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @@ -184,25 +184,25 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( makeSingle(9), - timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(9)) + timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(9)) ); Assert.assertEquals( makeSingle(7), - timeline.remove(new Interval("2011-05-01/2011-05-05"), "2", makeSingle(7)) + timeline.remove(Intervals.of("2011-05-01/2011-05-05"), "2", makeSingle(7)) ); assertValues( Arrays.asList( createExpected("2011-05-01/2011-05-02", "1", 6), createExpected("2011-05-03/2011-05-04", "3", 8) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @Test public void testInsertInWrongOrder() throws Exception { - DateTime overallStart = new DateTime().minus(Hours.TWO); + DateTime overallStart = DateTimes.nowUtc().minus(Hours.TWO); Assert.assertTrue( "These timestamps have to be at the end AND include now for this test to work.", @@ -241,32 +241,32 @@ public class VersionedIntervalTimelineTest { Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01/2011-10-02"), "1") + timeline.findEntry(Intervals.of("2011-10-01/2011-10-02"), "1") ); Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01/2011-10-01T10"), "1") + timeline.findEntry(Intervals.of("2011-10-01/2011-10-01T10"), "1") ); Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01T02/2011-10-02"), "1") + timeline.findEntry(Intervals.of("2011-10-01T02/2011-10-02"), "1") ); Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-10-01T04/2011-10-01T17"), "1") + timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-01T17"), "1") ); Assert.assertEquals( null, - timeline.findEntry(new Interval("2011-10-01T04/2011-10-01T17"), "2") + timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-01T17"), "2") ); Assert.assertEquals( null, - timeline.findEntry(new Interval("2011-10-01T04/2011-10-02T17"), "1") + timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-02T17"), "1") ); } @@ -280,7 +280,7 @@ public class VersionedIntervalTimelineTest Assert.assertEquals( new ImmutablePartitionHolder(new PartitionHolder(makeSingle(1))), - timeline.findEntry(new Interval("2011-01-02T02/2011-01-04"), "1") + timeline.findEntry(Intervals.of("2011-01-02T02/2011-01-04"), "1") ); } @@ -301,7 +301,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-10-04/2011-10-05", "4", 4), createExpected("2011-10-05/2011-10-06", "5", 5) ), - timeline.lookup(new Interval("2011-10-01/2011-10-06")) + timeline.lookup(Intervals.of("2011-10-01/2011-10-06")) ); } @@ -313,14 +313,14 @@ public class VersionedIntervalTimelineTest add("2011-10-06/2011-10-07", "6", IntegerPartitionChunk.make(null, 10, 0, 60)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); add("2011-10-06/2011-10-07", "6", IntegerPartitionChunk.make(10, 20, 1, 61)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); @@ -337,7 +337,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); } @@ -368,10 +368,10 @@ public class VersionedIntervalTimelineTest testIncompletePartitionDoesNotOvershadow(); final IntegerPartitionChunk chunk = IntegerPartitionChunk.make(null, 10, 0, 60); - Assert.assertEquals(chunk, timeline.remove(new Interval("2011-10-05/2011-10-07"), "6", chunk)); + Assert.assertEquals(chunk, timeline.remove(Intervals.of("2011-10-05/2011-10-07"), "6", chunk)); assertValues( ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)), - timeline.lookup(new Interval("2011-10-05/2011-10-07")) + timeline.lookup(Intervals.of("2011-10-05/2011-10-07")) ); Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty()); } @@ -384,18 +384,18 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-05-01/2011-05-09", "5", 10) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); Assert.assertEquals( makeSingle(10), - timeline.remove(new Interval("2011-05-01/2011-05-10"), "5", makeSingle(10)) + timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "5", makeSingle(10)) ); assertValues( Collections.singletonList( createExpected("2011-05-01/2011-05-09", "4", 9) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); add("2011-05-01/2011-05-10", "5", 10); @@ -403,18 +403,18 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-05-01/2011-05-09", "5", 10) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); Assert.assertEquals( makeSingle(9), - timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(9)) + timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(9)) ); assertValues( Collections.singletonList( createExpected("2011-05-01/2011-05-09", "5", 10) ), - timeline.lookup(new Interval("2011-05-01/2011-05-09")) + timeline.lookup(Intervals.of("2011-05-01/2011-05-09")) ); } @@ -454,7 +454,7 @@ public class VersionedIntervalTimelineTest Collections.singletonList( createExpected("2011-01-01/2011-01-10", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-10")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-10")) ); } @@ -475,7 +475,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -496,7 +496,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -515,7 +515,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-05", "1", 1), createExpected("2011-01-05/2011-01-15", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -534,7 +534,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-05", "1", 1), createExpected("2011-01-05/2011-01-15", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -553,7 +553,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 1), createExpected("2011-01-10/2011-01-15", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -572,7 +572,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 1), createExpected("2011-01-10/2011-01-15", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-15")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-15")) ); } @@ -592,7 +592,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -612,7 +612,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -632,7 +632,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -652,7 +652,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-15", "2", 3), createExpected("2011-01-15/2011-01-20", "1", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -674,7 +674,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-03/2011-01-06", "3", 3), createExpected("2011-01-06/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -696,7 +696,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-03/2011-01-06", "3", 3), createExpected("2011-01-06/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -718,7 +718,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-03/2011-01-06", "3", 3), createExpected("2011-01-06/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -741,7 +741,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-06/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -764,7 +764,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-06/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -787,7 +787,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-06/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -811,7 +811,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-15", "1", 2), createExpected("2011-01-15/2011-01-20", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -835,7 +835,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-15", "1", 2), createExpected("2011-01-15/2011-01-20", "1", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -855,7 +855,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -875,7 +875,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -897,7 +897,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-25", "3", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-25")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-25")) ); } @@ -919,7 +919,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-25", "3", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-25")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-25")) ); } @@ -941,7 +941,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-13", "1", 1), createExpected("2011-01-13/2011-01-20", "2", 2) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -961,7 +961,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -983,7 +983,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-17", "2", 3), createExpected("2011-01-17/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1005,7 +1005,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-13/2011-01-17", "2", 3), createExpected("2011-01-17/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1026,7 +1026,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "1", 1), createExpected("2011-01-15/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1047,7 +1047,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-15", "1", 1), createExpected("2011-01-15/2011-01-20", "2", 3) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1072,7 +1072,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-04/2011-01-05", "3", 3), createExpected("2011-01-05/2011-01-06", "4", 4) ), - timeline.lookup(new Interval("0000-01-01/3000-01-01")) + timeline.lookup(Intervals.of("0000-01-01/3000-01-01")) ); } @@ -1093,7 +1093,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-01T12/2011-01-02", "3", 3), createExpected("2011-01-02/3011-01-03", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/3011-01-03")) + timeline.lookup(Intervals.of("2011-01-01/3011-01-03")) ); } @@ -1175,13 +1175,13 @@ public class VersionedIntervalTimelineTest add("2011-01-01/2011-01-20", "1", 1); add("2011-01-10/2011-01-15", "2", 2); - timeline.remove(new Interval("2011-01-10/2011-01-15"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-15"), "2", makeSingle(2)); assertValues( Collections.singletonList( createExpected("2011-01-01/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1194,7 +1194,7 @@ public class VersionedIntervalTimelineTest add("2011-01-10/2011-01-20", "2", 2); add("2011-01-20/2011-01-30", "3", 4); - timeline.remove(new Interval("2011-01-10/2011-01-20"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-20"), "2", makeSingle(2)); assertValues( Arrays.asList( @@ -1202,7 +1202,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-20/2011-01-30", "3", 4) ), - timeline.lookup(new Interval("2011-01-01/2011-01-30")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-30")) ); } @@ -1215,15 +1215,15 @@ public class VersionedIntervalTimelineTest add("2011-01-02/2011-01-03", "2", 2); add("2011-01-10/2011-01-14", "2", 3); - timeline.remove(new Interval("2011-01-02/2011-01-03"), "2", makeSingle(2)); - timeline.remove(new Interval("2011-01-10/2011-01-14"), "2", makeSingle(3)); + timeline.remove(Intervals.of("2011-01-02/2011-01-03"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-14"), "2", makeSingle(3)); assertValues( Collections.singletonList( createExpected("2011-01-01/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1236,7 +1236,7 @@ public class VersionedIntervalTimelineTest add("2011-01-10/2011-01-15", "2", 2); add("2011-01-15/2011-01-20", "2", 3); - timeline.remove(new Interval("2011-01-15/2011-01-20"), "2", makeSingle(3)); + timeline.remove(Intervals.of("2011-01-15/2011-01-20"), "2", makeSingle(3)); assertValues( Arrays.asList( @@ -1244,7 +1244,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-10/2011-01-15", "2", 2), createExpected("2011-01-15/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1255,14 +1255,14 @@ public class VersionedIntervalTimelineTest add("2011-01-01/2011-01-20", "1", 1); add("2011-01-10/2011-01-15", "2", 2); - timeline.remove(new Interval("2011-01-10/2011-01-15"), "2", makeSingle(2)); + timeline.remove(Intervals.of("2011-01-10/2011-01-15"), "2", makeSingle(2)); add("2011-01-01/2011-01-20", "1", 1); assertValues( Collections.singletonList( createExpected("2011-01-01/2011-01-20", "1", 1) ), - timeline.lookup(new Interval("2011-01-01/2011-01-20")) + timeline.lookup(Intervals.of("2011-01-01/2011-01-20")) ); } @@ -1271,11 +1271,11 @@ public class VersionedIntervalTimelineTest { Assert.assertNull( "Don't have it, should be null", - timeline.remove(new Interval("1970-01-01/2025-04-20"), "1", makeSingle(1)) + timeline.remove(Intervals.of("1970-01-01/2025-04-20"), "1", makeSingle(1)) ); Assert.assertNull( "Don't have it, should be null", - timeline.remove(new Interval("2011-04-01/2011-04-09"), "version does not exist", makeSingle(1)) + timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "version does not exist", makeSingle(1)) ); } @@ -1289,7 +1289,7 @@ public class VersionedIntervalTimelineTest add("2011-01-10/2011-01-15", "3", 3); add("2011-01-15/2011-01-20", "4", 4); - timeline.remove(new Interval("2011-01-15/2011-01-20"), "4", makeSingle(4)); + timeline.remove(Intervals.of("2011-01-15/2011-01-20"), "4", makeSingle(4)); assertValues( Arrays.asList( @@ -1297,7 +1297,7 @@ public class VersionedIntervalTimelineTest createExpected("2011-01-05/2011-01-10", "2", 2), createExpected("2011-01-10/2011-01-15", "3", 3) ), - timeline.lookup(new Interval(new DateTime(0), new DateTime(JodaUtils.MAX_INSTANT))) + timeline.lookup(new Interval(DateTimes.EPOCH, DateTimes.MAX)) ); } @@ -1504,7 +1504,7 @@ public class VersionedIntervalTimelineTest add("2011-04-01/2011-04-09", "1", 1); - Assert.assertTrue(timeline.lookup(Interval.parse("1970/1980")).isEmpty()); + Assert.assertTrue(timeline.lookup(Intervals.of("1970/1980")).isEmpty()); } // https://github.com/druid-io/druid/issues/3010 @@ -1526,7 +1526,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-04-01/2011-04-02")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-02")) ); add("2011-04-01/2011-04-02", "3", IntegerPartitionChunk.make(null, 1, 0, 110)); @@ -1540,7 +1540,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-04-01/2011-04-02")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-02")) ); assertValues( Sets.newHashSet( @@ -1564,7 +1564,7 @@ public class VersionedIntervalTimelineTest ) ) ), - timeline.lookup(new Interval("2011-04-01/2011-04-02")) + timeline.lookup(Intervals.of("2011-04-01/2011-04-02")) ); } @@ -1579,58 +1579,58 @@ public class VersionedIntervalTimelineTest add("2011-04-15/2011-04-17", "1", new SingleElementPartitionChunk(1)); add("2011-04-17/2011-04-19", "1", new SingleElementPartitionChunk(1)); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-03"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-05"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-06"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-07"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-08"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-30"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-03"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-05"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-06"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-07"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-08"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "1")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "2")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "2")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "2")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "2")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "1")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "2")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "2")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "2")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "2")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-30"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-30"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-16"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-17"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-18"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-30"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-16"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-17"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-18"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-30"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-19/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-21/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-19/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-21/2011-04-22"), "0")); } @Test @@ -1645,77 +1645,77 @@ public class VersionedIntervalTimelineTest add("2011-04-17/2011-04-21", "11", new SingleElementPartitionChunk(1)); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-03"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-05"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-06"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-07"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-08"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-09"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-10"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-11"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-30"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-03"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-05"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-06"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-07"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-08"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-09"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-10"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-11"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-30"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "12")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "12")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "13")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "13")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-12"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-16"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-12"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-16"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-18"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-18"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-22"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-07"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-08"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-09"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-10"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-11"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-07"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-08"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-09"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-10"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-11"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-12"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-16"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-12"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-16"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-18"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-18"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-22"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-15"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-16"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-15"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-16"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-17"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-18"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-19"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-20"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-22"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-17"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-18"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-19"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-20"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-22"), "0")); - Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-21"), "0")); - Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-21/2011-04-22"), "0")); + Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-21"), "0")); + Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-21/2011-04-22"), "0")); } private Pair>> createExpected( @@ -1738,7 +1738,7 @@ public class VersionedIntervalTimelineTest ) { return Pair.of( - new Interval(intervalString), + Intervals.of(intervalString), Pair.of(version, new PartitionHolder(values)) ); } @@ -1750,17 +1750,17 @@ public class VersionedIntervalTimelineTest private void add(String interval, String version, Integer value) { - add(new Interval(interval), version, value); + add(Intervals.of(interval), version, value); } private void add(Interval interval, String version, Integer value) { - add(new Interval(interval), version, makeSingle(value)); + add(interval, version, makeSingle(value)); } private void add(String interval, String version, PartitionChunk value) { - add(new Interval(interval), version, value); + add(Intervals.of(interval), version, value); } private void add(Interval interval, String version, PartitionChunk value) diff --git a/docs/_graphics/indexing_service.dot b/docs/_graphics/indexing_service.dot index 38505512998..d12c5b16cc9 100644 --- a/docs/_graphics/indexing_service.dot +++ b/docs/_graphics/indexing_service.dot @@ -32,10 +32,11 @@ digraph g { peon_12[label = "peon"] peon_13[label = "peon"] - mm1 -> peon_11 [label = "new_task"] - mm1 -> { peon_12; peon_13 } + mm1 -> {peon_11;peon_12} + mm1 -> peon_13 [label = "new_task"] + mm1 -> peon_13:e [label = "new_task_status" dir=back] } - zk_status:new_task:s -> peon_11:e [label = "new_task_status" dir = back] + zk_status:new_task:s -> mm1:e [label = "new_task_status" dir = back] overlord:e -> zk_status:new_task:n [dir=back label="new_task_status"] } diff --git a/docs/content/configuration/index.md b/docs/content/configuration/index.md index f17f6858a01..8ae9cd7a1fc 100644 --- a/docs/content/configuration/index.md +++ b/docs/content/configuration/index.md @@ -71,7 +71,6 @@ The indexing service also uses its own set of paths. These configs can be includ |`druid.zk.paths.indexer.announcementsPath`|Middle managers announce themselves here.|`${druid.zk.paths.indexer.base}/announcements`| |`druid.zk.paths.indexer.tasksPath`|Used to assign tasks to middle managers.|`${druid.zk.paths.indexer.base}/tasks`| |`druid.zk.paths.indexer.statusPath`|Parent path for announcement of task statuses.|`${druid.zk.paths.indexer.base}/status`| -|`druid.zk.paths.indexer.leaderLatchPath`|Used for Overlord leader election.|`${druid.zk.paths.indexer.base}/leaderLatchPath`| If `druid.zk.paths.base` and `druid.zk.paths.indexer.base` are both set, and none of the other `druid.zk.paths.*` or `druid.zk.paths.indexer.*` values are set, then the other properties will be evaluated relative to their respective `base`. For example, if `druid.zk.paths.base` is set to `/druid1` and `druid.zk.paths.indexer.base` is set to `/druid2` then `druid.zk.paths.announcementsPath` will default to `/druid1/announcements` while `druid.zk.paths.indexer.announcementsPath` will default to `/druid2/announcements`. diff --git a/docs/content/configuration/indexing-service.md b/docs/content/configuration/indexing-service.md index 20388653af4..1631378d7cb 100644 --- a/docs/content/configuration/indexing-service.md +++ b/docs/content/configuration/indexing-service.md @@ -156,7 +156,7 @@ A sample worker config spec is shown below: ```json { "selectStrategy": { - "type": "fillCapacityWithAffinity", + "type": "fillCapacity", "affinityConfig": { "affinity": { "datasource1": ["host1:port", "host2:port"], @@ -193,7 +193,7 @@ Issuing a GET request at the same URL will return the current worker config spec |Property|Description|Default| |--------|-----------|-------| -|`selectStrategy`|How to assign tasks to middle managers. Choices are `fillCapacity`, `fillCapacityWithAffinity`, `equalDistribution`, `equalDistributionWithAffinity` and `javascript`.|equalDistribution| +|`selectStrategy`|How to assign tasks to middle managers. Choices are `fillCapacity`, `equalDistribution`, and `javascript`.|equalDistribution| |`autoScaler`|Only used if autoscaling is enabled. See below.|null| To view the audit history of worker config issue a GET request to the URL - @@ -212,48 +212,31 @@ http://:/druid/indexer/v1/worker/history?count= #### Worker Select Strategy -##### Fill Capacity - -Workers are assigned tasks until capacity. - -|Property|Description|Default| -|--------|-----------|-------| -|`type`|`fillCapacity`.|required; must be `fillCapacity`| - -Note that, if `druid.indexer.runner.pendingTasksRunnerNumThreads` is set to n (> 1) then it means to fill n workers upto capacity simultaneously and then moving on. - -##### Fill Capacity With Affinity - -An affinity config can be provided. - -|Property|Description|Default| -|--------|-----------|-------| -|`type`|`fillCapacityWithAffinity`.|required; must be `fillCapacityWithAffinity`| -|`affinity`|JSON object mapping a datasource String name to a list of indexing service middle manager host:port String values. Druid doesn't perform DNS resolution, so the 'host' value must match what is configured on the middle manager and what the middle manager announces itself as (examine the Overlord logs to see what your middle manager announces itself as).|{}| - -Tasks will try to be assigned to preferred workers. Fill capacity strategy is used if no preference for a datasource specified. - -Note that, if `druid.indexer.runner.pendingTasksRunnerNumThreads` is set to n (> 1) then it means to fill n preferred workers upto capacity simultaneously and then moving on. +Worker select strategies control how Druid assigns tasks to middleManagers. ##### Equal Distribution -The workers with the least amount of tasks is assigned the task. +Tasks are assigned to the middleManager with the most available capacity at the time the task begins running. This is +useful if you want work evenly distributed across your middleManagers. |Property|Description|Default| |--------|-----------|-------| |`type`|`equalDistribution`.|required; must be `equalDistribution`| +|`affinityConfig`|[Affinity config](#affinity) object|null (no affinity)| -##### Equal Distribution With Affinity +##### Fill Capacity -An affinity config can be provided. +Tasks are assigned to the worker with the most currently-running tasks at the time the task begins running. This is +useful in situations where you are elastically auto-scaling middleManagers, since it will tend to pack some full and +leave others empty. The empty ones can be safely terminated. + +Note that if `druid.indexer.runner.pendingTasksRunnerNumThreads` is set to _N_ > 1, then this strategy will fill _N_ +middleManagers up to capacity simultaneously, rather than a single middleManager. |Property|Description|Default| |--------|-----------|-------| -|`type`|`equalDistributionWithAffinity`.|required; must be `equalDistributionWithAffinity`| -|`affinity`|Exactly same with `fillCapacityWithAffinity` 's affinity.|{}| - -Tasks will try to be assigned to preferred workers. Equal Distribution strategy is used if no preference for a datasource specified. - +|`type`|`fillCapacity`.|required; must be `fillCapacity`| +|`affinityConfig`|[Affinity config](#affinity) object|null (no affinity)| ##### Javascript @@ -263,7 +246,6 @@ It can be used for rapid development of missing features where the worker select If the selection logic is quite complex and cannot be easily tested in javascript environment, its better to write a druid extension module with extending current worker selection strategies written in java. - |Property|Description|Default| |--------|-----------|-------| |`type`|`javascript`.|required; must be `javascript`| @@ -282,6 +264,16 @@ Example: a function that sends batch_index_task to workers 10.0.0.1 and 10.0.0.2 JavaScript-based functionality is disabled by default. Please refer to the Druid JavaScript programming guide for guidelines about using Druid's JavaScript functionality, including instructions on how to enable it. +##### Affinity + +Affinity configs can be provided to the _equalDistribution_ and _fillCapacity_ strategies using the "affinityConfig" +field. If not provided, the default is to not use affinity at all. + +|Property|Description|Default| +|--------|-----------|-------| +|`affinity`|JSON object mapping a datasource String name to a list of indexing service middleManager host:port String values. Druid doesn't perform DNS resolution, so the 'host' value must match what is configured on the middleManager and what the middleManager announces itself as (examine the Overlord logs to see what your middleManager announces itself as).|{}| +|`strong`|With weak affinity (the default), tasks for a dataSource may be assigned to other middleManagers if their affinity-mapped middleManagers are not able to run all pending tasks in the queue for that dataSource. With strong affinity, tasks for a dataSource will only ever be assigned to their affinity-mapped middleManagers, and will wait in the pending queue if necessary.|false| + #### Autoscaler Amazon's EC2 is currently the only supported autoscaler. @@ -300,7 +292,7 @@ Middle managers pass their configurations down to their child peons. The middle |Property|Description|Default| |--------|-----------|-------| -|`druid.indexer.runner.allowedPrefixes`|Whitelist of prefixes for configs that can be passed down to child peons.|"com.metamx", "druid", "io.druid", "user.timezone","file.encoding"| +|`druid.indexer.runner.allowedPrefixes`|Whitelist of prefixes for configs that can be passed down to child peons.|"com.metamx", "druid", "io.druid", "user.timezone", "file.encoding", "java.io.tmpdir", "hadoop"| |`druid.indexer.runner.compressZnodes`|Indicates whether or not the middle managers should compress Znodes.|true| |`druid.indexer.runner.classpath`|Java classpath for the peon.|System.getProperty("java.class.path")| |`druid.indexer.runner.javaCommand`|Command required to execute java.|java| diff --git a/docs/content/development/extensions-contrib/graphite.md b/docs/content/development/extensions-contrib/graphite.md index 66150e0ad50..6692d35a693 100644 --- a/docs/content/development/extensions-contrib/graphite.md +++ b/docs/content/development/extensions-contrib/graphite.md @@ -9,7 +9,8 @@ To use this extension, make sure to [include](../../operations/including-extensi ## Introduction This extension emits druid metrics to a graphite carbon server. -Events are sent after been [pickled](http://graphite.readthedocs.org/en/latest/feeding-carbon.html#the-pickle-protocol); the size of the batch is configurable. +Metrics can be sent by using [plaintext](http://graphite.readthedocs.io/en/latest/feeding-carbon.html#the-plaintext-protocol) or [pickle](http://graphite.readthedocs.io/en/latest/feeding-carbon.html#the-pickle-protocol) protocol. +The pickle protocol is more efficient and supports sending batches of metrics (plaintext protocol send only one metric) in one request; batch size is configurable. ## Configuration @@ -19,8 +20,9 @@ All the configuration parameters for graphite emitter are under `druid.emitter.g |--------|-----------|---------|-------| |`druid.emitter.graphite.hostname`|The hostname of the graphite server.|yes|none| |`druid.emitter.graphite.port`|The port of the graphite server.|yes|none| -|`druid.emitter.graphite.batchSize`|Number of events to send as one batch.|no|100| -|`druid.emitter.graphite.eventConverter`| Filter and converter of druid events to graphite event(please see next section). |yes|none| +|`druid.emitter.graphite.batchSize`|Number of events to send as one batch (only for pickle protocol)|no|100| +|`druid.emitter.graphite.protocol`|Graphite protocol; available protocols: pickle, plaintext.|no|pickle| +|`druid.emitter.graphite.eventConverter`| Filter and converter of druid events to graphite event (please see next section).|yes|none| |`druid.emitter.graphite.flushPeriod` | Queue flushing period in milliseconds. |no|1 minute| |`druid.emitter.graphite.maxQueueSize`| Maximum size of the queue used to buffer events. |no|`MAX_INT`| |`druid.emitter.graphite.alertEmitters`| List of emitters where alerts will be forwarded to. |no| empty list (no forwarding)| @@ -52,10 +54,15 @@ The path will be in the form `.[].[.[].[].` You can omit the hostname by setting `ignoreHostname=true` -`druid.SERVICE_NAME.dataSourceName.queryType.query.time` +`druid.SERVICE_NAME.dataSourceName.queryType.query/time` You can omit the service name by setting `ignoreServiceName=true` -`druid.HOSTNAME.dataSourceName.queryType.query.time` +`druid.HOSTNAME.dataSourceName.queryType.query/time` + +Elements in metric name by default are separated by "/", so graphite will create all metrics on one level. If you want to have metrics in the tree structure, you have to set `replaceSlashWithDot=true` +Original: `druid.HOSTNAME.dataSourceName.queryType.query/time` +Changed: `druid.HOSTNAME.dataSourceName.queryType.query.time` + ```json @@ -70,7 +77,7 @@ Same as for the `all` converter user has control of `.[", "fields": [ , , ... ], - "byRow": # (optional, defaults to false) + "byRow": # (optional, defaults to false), + "round": # (optional, defaults to false) } ``` Each individual element of the "fields" list can be a String or [DimensionSpec](../querying/dimensionspecs.html). A String dimension in the fields list is equivalent to a DefaultDimensionSpec (no transformations). +The HyperLogLog algorithm generates decimal estimates with some error. "round" can be set to true to round off estimated +values to whole numbers. Note that even with rounding, the cardinality is still an estimate. The "round" field only +affects query-time behavior, and is ignored at ingestion-time. + #### Cardinality by value When setting `byRow` to `false` (the default) it computes the cardinality of the set composed of the union of all dimension values for all the given dimensions. @@ -315,12 +320,17 @@ Uses [HyperLogLog](http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf) to "type" : "hyperUnique", "name" : , "fieldName" : , - "isInputHyperUnique" : false + "isInputHyperUnique" : false, + "round" : false } ``` -isInputHyperUnique can be set to true to index pre-computed HLL (Base64 encoded output from druid-hll is expected). -The isInputHyperUnique field only affects ingestion-time behavior, and is ignored at query time. +"isInputHyperUnique" can be set to true to index pre-computed HLL (Base64 encoded output from druid-hll is expected). +The "isInputHyperUnique" field only affects ingestion-time behavior, and is ignored at query-time. + +The HyperLogLog algorithm generates decimal estimates with some error. "round" can be set to true to round off estimated +values to whole numbers. Note that even with rounding, the cardinality is still an estimate. The "round" field only +affects query-time behavior, and is ignored at ingestion-time. For more approximate aggregators, please see [theta sketches](../development/extensions-core/datasketches-aggregators.html). diff --git a/docs/content/querying/post-aggregations.md b/docs/content/querying/post-aggregations.md index 799aa046eb4..f450a5077e0 100644 --- a/docs/content/querying/post-aggregations.md +++ b/docs/content/querying/post-aggregations.md @@ -36,16 +36,26 @@ postAggregation : { } ``` -### Field accessor post-aggregator +### Field accessor post-aggregators -This returns the value produced by the specified [aggregator](../querying/aggregations.html). +These post-aggregators return the value produced by the specified [aggregator](../querying/aggregations.html). `fieldName` refers to the output name of the aggregator given in the [aggregations](../querying/aggregations.html) portion of the query. +For complex aggregators, like "cardinality" and "hyperUnique", the `type` of the post-aggregator determines what +the post-aggregator will return. Use type "fieldAccess" to return the raw aggregation object, or use type +"finalizingFieldAccess" to return a finalized value, such as an estimated cardinality. ```json { "type" : "fieldAccess", "name": , "fieldName" : } ``` +or + +```json +{ "type" : "finalizingFieldAccess", "name": , "fieldName" : } +``` + + ### Constant post-aggregator The constant post-aggregator always returns the specified value. @@ -107,7 +117,11 @@ JavaScript-based functionality is disabled by default. Please refer to the Druid The hyperUniqueCardinality post aggregator is used to wrap a hyperUnique object such that it can be used in post aggregations. ```json -{ "type" : "hyperUniqueCardinality", "name": , "fieldName" : } +{ + "type" : "hyperUniqueCardinality", + "name": , + "fieldName" : +} ``` It can be used in a sample calculation as so: @@ -128,6 +142,10 @@ It can be used in a sample calculation as so: }] ``` +This post-aggregator will inherit the rounding behavior of the aggregator it references. Note that this inheritance +is only effective if you directly reference an aggregator. Going through another post-aggregator, for example, will +cause the user-specified rounding behavior to get lost and default to "no rounding". + ## Example Usage In this example, let’s calculate a simple percentage using post aggregators. Let’s imagine our data set has a metric called "total". diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java index 06e97ddea4d..eeb4048a9b4 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java +++ b/extensions-contrib/ambari-metrics-emitter/src/test/java/io/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java @@ -22,6 +22,7 @@ package io.druid.emitter.ambari.metrics; import com.google.common.collect.Maps; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; @@ -44,7 +45,7 @@ public class WhiteListBasedDruidToTimelineEventConverterTest new DefaultObjectMapper() ); private ServiceMetricEvent event; - private final DateTime createdTime = new DateTime(); + private final DateTime createdTime = DateTimes.nowUtc(); private final String hostname = "testHost:8080"; private final String serviceName = "historical"; private final String defaultNamespace = prefix + "." + serviceName; diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java index 17428a545f6..3003103449e 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentKillerTest.java @@ -21,11 +21,11 @@ package io.druid.storage.azure; import com.google.common.collect.ImmutableMap; import com.microsoft.azure.storage.StorageException; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -44,7 +44,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java index 5a5eec038ce..b01e6b638bd 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -22,11 +22,11 @@ package io.druid.storage.azure; import com.google.common.collect.ImmutableMap; import com.microsoft.azure.storage.StorageException; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -50,7 +50,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport private static final String blobPath = "/path/to/storage/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java index 0f54dedf08a..af76f357142 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -26,12 +26,12 @@ import com.google.common.collect.Maps; import com.google.common.io.Files; import com.microsoft.azure.storage.StorageException; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -55,7 +55,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", containerName, "blobPath", blobPath), null, @@ -94,7 +94,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java index 3014fcd5ad0..b257efb1282 100644 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java +++ b/extensions-contrib/cloudfiles-extensions/src/test/java/io/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; import org.jclouds.io.Payload; import org.jclouds.openstack.swift.v1.features.ObjectApi; import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -74,7 +74,7 @@ public class CloudFilesDataSegmentPusherTest DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java b/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java index a6578b9de2e..350283cfe15 100644 --- a/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java +++ b/extensions-contrib/distinctcount/src/main/java/io/druid/query/aggregation/distinctcount/DistinctCountAggregatorFactory.java @@ -29,6 +29,8 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; import io.druid.query.aggregation.LongSumAggregatorFactory; +import io.druid.query.aggregation.LongSumAggregateCombiner; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.DimensionSelector; @@ -119,6 +121,13 @@ public class DistinctCountAggregatorFactory extends AggregatorFactory return ((Number) lhs).longValue() + ((Number) rhs).longValue(); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + // This is likely wrong as well as combine(), see https://github.com/druid-io/druid/pull/2602#issuecomment-321224202 + return new LongSumAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index eeb6dfbf85e..14e70157819 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -22,6 +22,7 @@ package io.druid.query.aggregation.distinctcount; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -61,7 +62,7 @@ public class DistinctCountTimeseriesQueryTest String visitor_id = "visitor_id"; String client_type = "client_type"; - DateTime time = new DateTime("2016-03-04T00:00:00.000Z"); + DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z"); long timestamp = time.getMillis(); index.add( new MapBasedInputRow( diff --git a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index 190d79ed9bd..3e6b6a6f5ae 100644 --- a/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/io/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -22,9 +22,9 @@ package io.druid.query.aggregation.distinctcount; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; - import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryRunnerTestHelper; @@ -79,7 +79,7 @@ public class DistinctCountTopNQueryTest String visitor_id = "visitor_id"; String client_type = "client_type"; - DateTime time = new DateTime("2016-03-04T00:00:00.000Z"); + DateTime time = DateTimes.of("2016-03-04T00:00:00.000Z"); long timestamp = time.getMillis(); index.add( new MapBasedInputRow( diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java index 31dac78e5f3..3e74a87e689 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentKillerTest.java @@ -20,11 +20,11 @@ package io.druid.storage.google; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -39,7 +39,7 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", bucket, "path", indexPath), null, diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java index a6d3facfd9c..1504a20333f 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPullerTest.java @@ -21,11 +21,11 @@ package io.druid.storage.google; import com.google.common.collect.ImmutableMap; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Test; import java.io.File; @@ -42,7 +42,7 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport private static final String path = "/path/to/storage/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", bucket, "path", path), null, diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java index 83d35601ff1..6c845d433c7 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleDataSegmentPusherTest.java @@ -25,11 +25,11 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -50,7 +50,7 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport private static final String path = "prefix/test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; private static final DataSegment dataSegment = new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("bucket", bucket, "path", path), null, @@ -87,7 +87,7 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java index 11873618dfb..156718f02fb 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitter.java @@ -19,6 +19,8 @@ package io.druid.emitter.graphite; +import com.codahale.metrics.graphite.Graphite; +import com.codahale.metrics.graphite.GraphiteSender; import com.codahale.metrics.graphite.PickledGraphite; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.metamx.emitter.core.Emitter; @@ -136,17 +138,32 @@ public class GraphiteEmitter implements Emitter private class ConsumerRunnable implements Runnable { - @Override - public void run() + private final GraphiteSender graphite; + + public ConsumerRunnable() { - try (PickledGraphite pickledGraphite = new PickledGraphite( + if (graphiteEmitterConfig.getProtocol().equals(GraphiteEmitterConfig.PLAINTEXT_PROTOCOL)) { + graphite = new Graphite( + graphiteEmitterConfig.getHostname(), + graphiteEmitterConfig.getPort() + ); + } else { + graphite = new PickledGraphite( graphiteEmitterConfig.getHostname(), graphiteEmitterConfig.getPort(), graphiteEmitterConfig.getBatchSize() - )) { - if (!pickledGraphite.isConnected()) { + ); + } + log.info("Using %s protocol.", graphiteEmitterConfig.getProtocol()); + } + + @Override + public void run() + { + try { + if (!graphite.isConnected()) { log.info("trying to connect to graphite server"); - pickledGraphite.connect(); + graphite.connect(); } while (eventsQueue.size() > 0 && !exec.isShutdown()) { try { @@ -161,7 +178,7 @@ public class GraphiteEmitter implements Emitter graphiteEvent.getValue(), graphiteEvent.getTimestamp() ); - pickledGraphite.send( + graphite.send( graphiteEvent.getEventPath(), graphiteEvent.getValue(), graphiteEvent.getTimestamp() @@ -176,9 +193,9 @@ public class GraphiteEmitter implements Emitter } else if (e instanceof SocketException) { // This is antagonistic to general Closeable contract in Java, // it is needed to allow re-connection in case of the socket is closed due long period of inactivity - pickledGraphite.close(); + graphite.close(); log.warn("Trying to re-connect to graphite server"); - pickledGraphite.connect(); + graphite.connect(); } } } @@ -219,8 +236,17 @@ public class GraphiteEmitter implements Emitter } protected static String sanitize(String namespace) + { + return sanitize(namespace, false); + } + + protected static String sanitize(String namespace, Boolean replaceSlashToDot) { Pattern DOT_OR_WHITESPACE = Pattern.compile("[\\s]+|[.]+"); - return DOT_OR_WHITESPACE.matcher(namespace).replaceAll("_"); + String sanitizedNamespace = DOT_OR_WHITESPACE.matcher(namespace).replaceAll("_"); + if (replaceSlashToDot) { + sanitizedNamespace = sanitizedNamespace.replace("/", "."); + } + return sanitizedNamespace; } } diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java index 1df015b7a65..3758279964b 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterConfig.java @@ -29,6 +29,8 @@ import java.util.List; public class GraphiteEmitterConfig { + public final static String PLAINTEXT_PROTOCOL = "plaintext"; + public final static String PICKLE_PROTOCOL = "pickle"; private final static int DEFAULT_BATCH_SIZE = 100; private static final Long DEFAULT_FLUSH_PERIOD = (long) (60 * 1000); // flush every one minute private final static long DEFAULT_GET_TIMEOUT = 1000; // default wait for get operations on the queue 1 sec @@ -40,18 +42,17 @@ public class GraphiteEmitterConfig @JsonProperty final private int batchSize; @JsonProperty + final private String protocol; + @JsonProperty final private Long flushPeriod; @JsonProperty final private Integer maxQueueSize; @JsonProperty("eventConverter") final private DruidToGraphiteEventConverter druidToGraphiteEventConverter; - @JsonProperty final private List alertEmitters; - @JsonProperty final private Long emitWaitTime; - //waiting up to the specified wait time if necessary for an event to become available. @JsonProperty final private Long waitForEventTime; @@ -74,6 +75,9 @@ public class GraphiteEmitterConfig if (getBatchSize() != that.getBatchSize()) { return false; } + if (!getProtocol().equals(that.getProtocol())) { + return false; + } if (!getHostname().equals(that.getHostname())) { return false; } @@ -104,6 +108,7 @@ public class GraphiteEmitterConfig int result = getHostname().hashCode(); result = 31 * result + getPort(); result = 31 * result + getBatchSize(); + result = 31 * result + getProtocol().hashCode(); result = 31 * result + getFlushPeriod().hashCode(); result = 31 * result + getMaxQueueSize().hashCode(); result = 31 * result + getDruidToGraphiteEventConverter().hashCode(); @@ -118,6 +123,7 @@ public class GraphiteEmitterConfig @JsonProperty("hostname") String hostname, @JsonProperty("port") Integer port, @JsonProperty("batchSize") Integer batchSize, + @JsonProperty("protocol") String protocol, @JsonProperty("flushPeriod") Long flushPeriod, @JsonProperty("maxQueueSize") Integer maxQueueSize, @JsonProperty("eventConverter") DruidToGraphiteEventConverter druidToGraphiteEventConverter, @@ -138,6 +144,7 @@ public class GraphiteEmitterConfig this.hostname = Preconditions.checkNotNull(hostname, "hostname can not be null"); this.port = Preconditions.checkNotNull(port, "port can not be null"); this.batchSize = (batchSize == null) ? DEFAULT_BATCH_SIZE : batchSize; + this.protocol = (protocol == null) ? PICKLE_PROTOCOL : protocol; } @JsonProperty @@ -158,6 +165,12 @@ public class GraphiteEmitterConfig return batchSize; } + @JsonProperty + public String getProtocol() + { + return protocol; + } + @JsonProperty public Integer getMaxQueueSize() { diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/SendAllGraphiteEventConverter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/SendAllGraphiteEventConverter.java index c443b220d12..dc26014e1b3 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/SendAllGraphiteEventConverter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/SendAllGraphiteEventConverter.java @@ -53,6 +53,9 @@ public class SendAllGraphiteEventConverter implements DruidToGraphiteEventConver @JsonProperty private final String namespacePrefix; + @JsonProperty + private final boolean replaceSlashWithDot; + @JsonProperty public String getNamespacePrefix() { @@ -71,15 +74,23 @@ public class SendAllGraphiteEventConverter implements DruidToGraphiteEventConver return ignoreHostname; } + @JsonProperty + public boolean replaceSlashWithDot() + { + return replaceSlashWithDot; + } + @JsonCreator public SendAllGraphiteEventConverter( @JsonProperty("namespacePrefix") String namespacePrefix, @JsonProperty("ignoreHostname") Boolean ignoreHostname, - @JsonProperty("ignoreServiceName") Boolean ignoreServiceName + @JsonProperty("ignoreServiceName") Boolean ignoreServiceName, + @JsonProperty("replaceSlashWithDot") Boolean replaceSlashWithDot ) { this.ignoreHostname = ignoreHostname == null ? false : ignoreHostname; this.ignoreServiceName = ignoreServiceName == null ? false : ignoreServiceName; + this.replaceSlashWithDot = replaceSlashWithDot == null ? false : replaceSlashWithDot; this.namespacePrefix = Preconditions.checkNotNull(namespacePrefix, "namespace prefix can not be null"); } @@ -100,7 +111,7 @@ public class SendAllGraphiteEventConverter implements DruidToGraphiteEventConver metricPathBuilder.add(GraphiteEmitter.sanitize(String.valueOf(serviceMetricEvent.getUserDims() .get(dimName)))); } - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric())); + metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric(), this.replaceSlashWithDot())); return new GraphiteEvent( Joiner.on(".").join(metricPathBuilder.build()), @@ -127,6 +138,9 @@ public class SendAllGraphiteEventConverter implements DruidToGraphiteEventConver if (isIgnoreServiceName() != that.isIgnoreServiceName()) { return false; } + if (replaceSlashWithDot() != that.replaceSlashWithDot()) { + return false; + } return getNamespacePrefix().equals(that.getNamespacePrefix()); } @@ -136,6 +150,7 @@ public class SendAllGraphiteEventConverter implements DruidToGraphiteEventConver { int result = (isIgnoreHostname() ? 1 : 0); result = 31 * result + (isIgnoreServiceName() ? 1 : 0); + result = 31 * result + (replaceSlashWithDot() ? 1 : 0); result = 31 * result + getNamespacePrefix().hashCode(); return result; } diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java index 75250ac909a..ac53033ca6a 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/WhiteListBasedConverter.java @@ -69,6 +69,9 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter @JsonProperty private final String namespacePrefix; + @JsonProperty + private final boolean replaceSlashWithDot; + @JsonProperty private final String mapPath; @@ -79,6 +82,7 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter @JsonProperty("namespacePrefix") String namespacePrefix, @JsonProperty("ignoreHostname") Boolean ignoreHostname, @JsonProperty("ignoreServiceName") Boolean ignoreServiceName, + @JsonProperty("replaceSlashWithDot") Boolean replaceSlashWithDot, @JsonProperty("mapPath") String mapPath, @JacksonInject ObjectMapper mapper ) @@ -88,6 +92,7 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter this.whiteListDimsMapper = readMap(this.mapPath); this.ignoreHostname = ignoreHostname == null ? false : ignoreHostname; this.ignoreServiceName = ignoreServiceName == null ? false : ignoreServiceName; + this.replaceSlashWithDot = replaceSlashWithDot == null ? false : replaceSlashWithDot; this.namespacePrefix = Preconditions.checkNotNull(namespacePrefix, "namespace prefix can not be null"); } @@ -109,6 +114,12 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter return namespacePrefix; } + @JsonProperty + public boolean replaceSlashWithDot() + { + return replaceSlashWithDot; + } + public ImmutableSortedMap> getWhiteListDimsMapper() { return whiteListDimsMapper; @@ -200,7 +211,7 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getHost())); } metricPathBuilder.addAll(this.getOrderedDimValues(serviceMetricEvent)); - metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric())); + metricPathBuilder.add(GraphiteEmitter.sanitize(serviceMetricEvent.getMetric(), this.replaceSlashWithDot())); final GraphiteEvent graphiteEvent = new GraphiteEvent( Joiner.on(".").join(metricPathBuilder.build()), @@ -228,6 +239,9 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter if (isIgnoreServiceName() != that.isIgnoreServiceName()) { return false; } + if (replaceSlashWithDot() != that.replaceSlashWithDot()) { + return false; + } if (!getNamespacePrefix().equals(that.getNamespacePrefix())) { return false; } @@ -240,6 +254,7 @@ public class WhiteListBasedConverter implements DruidToGraphiteEventConverter { int result = (isIgnoreHostname() ? 1 : 0); result = 31 * result + (isIgnoreServiceName() ? 1 : 0); + result = 31 * result + (replaceSlashWithDot() ? 1 : 0); result = 31 * result + getNamespacePrefix().hashCode(); result = 31 * result + (mapPath != null ? mapPath.hashCode() : 0); return result; diff --git a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java index f6e1fcb0a15..4a6a5287dfd 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/DruidToWhiteListBasedConverterTest.java @@ -31,4 +31,11 @@ public class DruidToWhiteListBasedConverterTest String test = "host name.yahoo.com:8080"; Assert.assertEquals("host_name_yahoo_com:8080", GraphiteEmitter.sanitize(test)); } + + @Test + public void testSanitizeAndReplaceSlashWithDot() + { + String test = "query/cache/delta/hitRate"; + Assert.assertEquals("query.cache.delta.hitRate", GraphiteEmitter.sanitize(test, true)); + } } diff --git a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/GraphiteEmitterConfigTest.java b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/GraphiteEmitterConfigTest.java index 020ca61824f..43d36f6d427 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/GraphiteEmitterConfigTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/GraphiteEmitterConfigTest.java @@ -49,9 +49,10 @@ public class GraphiteEmitterConfigTest "hostname", 8080, 1000, + GraphiteEmitterConfig.PICKLE_PROTOCOL, 1000L, 100, - new SendAllGraphiteEventConverter("prefix", true, true), + new SendAllGraphiteEventConverter("prefix", true, true, false), Collections.EMPTY_LIST, null, null @@ -66,7 +67,12 @@ public class GraphiteEmitterConfigTest @Test public void testSerDeserDruidToGraphiteEventConverter() throws IOException { - SendAllGraphiteEventConverter sendAllGraphiteEventConverter = new SendAllGraphiteEventConverter("prefix", true, true); + SendAllGraphiteEventConverter sendAllGraphiteEventConverter = new SendAllGraphiteEventConverter( + "prefix", + true, + true, + false + ); String noopGraphiteEventConverterString = mapper.writeValueAsString(sendAllGraphiteEventConverter); DruidToGraphiteEventConverter druidToGraphiteEventConverter = mapper.reader(DruidToGraphiteEventConverter.class) .readValue(noopGraphiteEventConverterString); @@ -76,6 +82,7 @@ public class GraphiteEmitterConfigTest "prefix", true, true, + false, "", new DefaultObjectMapper() ); diff --git a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java index 4a93f9ec5d3..11908fb1bb2 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/io/druid/emitter/graphite/WhiteListBasedConverterTest.java @@ -22,6 +22,7 @@ package io.druid.emitter.graphite; import com.google.common.collect.Maps; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.easymock.EasyMock; @@ -40,11 +41,12 @@ public class WhiteListBasedConverterTest prefix, false, false, + false, null, new DefaultObjectMapper() ); private ServiceMetricEvent event; - private DateTime createdTime = new DateTime(); + private DateTime createdTime = DateTimes.nowUtc(); private String hostname = "testHost.yahoo.com:8080"; private String serviceName = "historical"; private String defaultNamespace = prefix + "." + serviceName + "." + GraphiteEmitter.sanitize(hostname); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java index 9b0939219b3..b0cb778c302 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java @@ -18,9 +18,10 @@ */ package io.druid.data.input.orc; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.MapBasedInputRow; import io.druid.indexer.HadoopDruidIndexerConfig; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector; @@ -41,7 +42,6 @@ import org.apache.orc.CompressionKind; import org.apache.orc.OrcFile; import org.apache.orc.TypeDescription; import org.apache.orc.Writer; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -101,7 +101,7 @@ public class DruidOrcInputFormatTest MapBasedInputRow row = (MapBasedInputRow) parser.parse(data); Assert.assertTrue(row.getEvent().keySet().size() == 4); - Assert.assertEquals(new DateTime(timestamp), row.getTimestamp()); + Assert.assertEquals(DateTimes.of(timestamp), row.getTimestamp()); Assert.assertEquals(parser.getParseSpec().getDimensionsSpec().getDimensionNames(), row.getDimensions()); Assert.assertEquals(col1, row.getEvent().get("col1")); Assert.assertEquals(Arrays.asList(col2), row.getDimension("col2")); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java index 93d57e55ef6..cc53a6db4a7 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcHadoopInputRowParserTest.java @@ -36,6 +36,7 @@ import io.druid.data.input.impl.TimestampSpec; import io.druid.guice.GuiceInjectors; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import org.apache.hadoop.hive.ql.io.orc.OrcStruct; import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable; import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector; @@ -43,7 +44,6 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -167,7 +167,7 @@ public class OrcHadoopInputRowParserTest oi.setStructFieldData(struct, oi.getStructFieldRef("col6"), null); final InputRow row = parser.parse(struct); - Assert.assertEquals("timestamp", new DateTime("2000-01-01"), row.getTimestamp()); + Assert.assertEquals("timestamp", DateTimes.of("2000-01-01"), row.getTimestamp()); Assert.assertEquals("col1", "foo", row.getRaw("col1")); Assert.assertEquals("col2", ImmutableList.of("foo", "bar"), row.getRaw("col2")); Assert.assertEquals("col3", 1.0f, row.getRaw("col3")); diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index ff301b55b88..0cbac888e92 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; @@ -38,6 +37,8 @@ import io.druid.indexer.HadoopyShardSpec; import io.druid.indexer.IndexGeneratorJob; import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -117,7 +118,7 @@ public class OrcIndexGeneratorJobTest "2014102212,i.example.com,963", "2014102212,j.example.com,333" ); - private final Interval interval = new Interval("2014-10-22T00:00:00Z/P1D"); + private final Interval interval = Intervals.of("2014-10-22T00:00:00Z/P1D"); private File dataRoot; private File outputRoot; private Integer[][][] shardInfoForEachSegment = new Integer[][][]{{ diff --git a/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java b/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java index 2b5b705ca29..a1d3b3a2fa6 100755 --- a/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java +++ b/extensions-contrib/parquet-extensions/src/main/java/io/druid/data/input/parquet/ParquetHadoopInputRowParser.java @@ -29,6 +29,7 @@ import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import org.apache.avro.LogicalType; import org.apache.avro.LogicalTypes; import org.apache.avro.Schema; @@ -92,7 +93,7 @@ public class ParquetHadoopInputRowParser implements InputRowParser make() { - final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME); + final LongColumnSelector timestampColumnSelector = + cursor.getColumnSelectorFactory().makeLongColumnSelector(Column.TIME_COLUMN_NAME); final List> selectorPlusList = Arrays.asList( DimensionHandlerUtils.createColumnSelectorPluses( STRATEGY_FACTORY, Lists.newArrayList(dims), - cursor + cursor.getColumnSelectorFactory() ) ); final Map metSelectors = Maps.newHashMap(); for (String metric : metrics) { - final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric); + final ObjectColumnSelector metricSelector = + cursor.getColumnSelectorFactory().makeObjectColumnSelector(metric); metSelectors.put(metric, metricSelector); } final int batchSize = query.getBatchSize(); diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryRunnerFactory.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryRunnerFactory.java index e520f981a0e..8d124367c07 100644 --- a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryRunnerFactory.java +++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryRunnerFactory.java @@ -20,7 +20,7 @@ package io.druid.query.scan; import com.google.common.base.Function; import com.google.inject.Inject; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; diff --git a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java index 0a4a557a8c8..f4f121d2ea1 100644 --- a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java +++ b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/MultiSegmentScanQueryTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.io.CharSource; import com.google.common.util.concurrent.MoreExecutors; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.MergeSequence; import io.druid.java.util.common.guava.Sequence; @@ -43,7 +44,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Assert; @@ -145,7 +145,7 @@ public class MultiSegmentScanQueryTest private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) + .withMinTimestamp(DateTimes.of(minTimeStamp).getMillis()) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); diff --git a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java index c5e03915ac5..ca625ecb36b 100644 --- a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java +++ b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java @@ -25,7 +25,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.ObjectArrays; import com.google.common.collect.Sets; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; import io.druid.query.DefaultGenericQueryMetricsFactory; import io.druid.query.QueryPlus; @@ -39,8 +41,6 @@ import io.druid.query.filter.SelectorDimFilter; import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.spec.LegacySegmentSpec; import io.druid.query.spec.QuerySegmentSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -94,7 +94,7 @@ public class ScanQueryRunnerTest }; public static final QuerySegmentSpec I_0112_0114 = new LegacySegmentSpec( - new Interval("2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z") ); public static final String[] V_0112_0114 = ObjectArrays.concat(V_0112, V_0113, String.class); @@ -506,7 +506,7 @@ public class ScanQueryRunnerTest event.put( specs[0], specs.length == 1 || specs[1].equals("STRING") ? values[i] : - specs[1].equals("TIME") ? new DateTime(values[i]) : + specs[1].equals("TIME") ? DateTimes.of(values[i]) : specs[1].equals("FLOAT") ? Float.valueOf(values[i]) : specs[1].equals("DOUBLE") ? Double.valueOf(values[i]) : specs[1].equals("LONG") ? Long.valueOf(values[i]) : diff --git a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java index 64d92b71547..8567b62f56f 100644 --- a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java +++ b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQuerySpecTest.java @@ -21,10 +21,10 @@ package io.druid.query.scan; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.TableDataSource; import io.druid.query.spec.LegacySegmentSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -58,7 +58,7 @@ public class ScanQuerySpecTest ScanQuery query = new ScanQuery( new TableDataSource(QueryRunnerTestHelper.dataSource), - new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), + new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), null, 0, 3, diff --git a/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java b/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java index 9ede6077260..b4c8fa33243 100644 --- a/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java +++ b/extensions-contrib/statsd-emitter/src/test/java/DimensionConverterTest.java @@ -22,7 +22,7 @@ import com.google.common.collect.ImmutableList; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.emitter.statsd.DimensionConverter; import io.druid.emitter.statsd.StatsDMetric; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Test; import static org.junit.Assert.assertEquals; @@ -46,7 +46,7 @@ public class DimensionConverterTest .setDimension("remoteAddress", "194.0.90.2") .setDimension("id", "ID") .setDimension("context", "{context}") - .build(new DateTime(), "query/time", 10) + .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1"); ImmutableList.Builder actual = new ImmutableList.Builder<>(); diff --git a/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java b/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java index a6286e46504..0f5694038d9 100644 --- a/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java +++ b/extensions-contrib/statsd-emitter/src/test/java/StatsDEmitterTest.java @@ -22,14 +22,13 @@ import com.metamx.emitter.service.ServiceMetricEvent; import com.timgroup.statsd.StatsDClient; import io.druid.emitter.statsd.StatsDEmitter; import io.druid.emitter.statsd.StatsDEmitterConfig; +import io.druid.java.util.common.DateTimes; +import org.junit.Test; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; -import org.joda.time.DateTime; -import org.junit.Test; - /** */ public class StatsDEmitterTest @@ -47,7 +46,7 @@ public class StatsDEmitterTest replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") - .build(new DateTime(), "query/cache/total/hitRate", 0.54) + .build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54) .build("broker", "brokerHost1") ); verify(client); @@ -75,7 +74,7 @@ public class StatsDEmitterTest .setDimension("remoteAddress", "194.0.90.2") .setDimension("id", "ID") .setDimension("context", "{context}") - .build(new DateTime(), "query/time", 10) + .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); verify(client); @@ -103,7 +102,7 @@ public class StatsDEmitterTest .setDimension("remoteAddress", "194.0.90.2") .setDimension("id", "ID") .setDimension("context", "{context}") - .build(new DateTime(), "query/time", 10) + .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); verify(client); diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java index ecd4921fcea..78209b1acd6 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java @@ -28,9 +28,13 @@ public class TimestampAggregator implements Aggregator { static final Comparator COMPARATOR = LongMaxAggregator.COMPARATOR; - static long combineValues(Object lhs, Object rhs) + static Object combineValues(Comparator comparator, Object lhs, Object rhs) { - return Math.max(((Number) lhs).longValue(), ((Number) rhs).longValue()); + if (comparator.compare(((Number) lhs).longValue(), ((Number) rhs).longValue()) > 0) { + return lhs; + } else { + return rhs; + } } private final ObjectColumnSelector selector; diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java index 4466aa92a51..7b9fe52f86a 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregatorFactory.java @@ -21,9 +21,12 @@ package io.druid.query.aggregation; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.primitives.Longs; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; +import io.druid.segment.ObjectColumnSelector; import org.joda.time.DateTime; import java.nio.ByteBuffer; @@ -82,7 +85,49 @@ public class TimestampAggregatorFactory extends AggregatorFactory @Override public Object combine(Object lhs, Object rhs) { - return TimestampAggregator.combineValues(lhs, rhs); + return TimestampAggregator.combineValues(comparator, lhs, rhs); + } + + @Override + public AggregateCombiner makeAggregateCombiner() + { + // TimestampAggregatorFactory.combine() delegates to TimestampAggregator.combineValues() and it doesn't check + // for nulls, so this AggregateCombiner neither. + return new LongAggregateCombiner() + { + private long result; + + @Override + public void reset(ColumnValueSelector selector) + { + result = getTimestamp(selector); + } + + private long getTimestamp(ColumnValueSelector selector) + { + if (selector instanceof ObjectColumnSelector) { + Object input = ((ObjectColumnSelector) selector).get(); + return convertLong(timestampSpec, input); + } else { + return selector.getLong(); + } + } + + @Override + public void fold(ColumnValueSelector selector) + { + long other = getTimestamp(selector); + if (comparator.compare(result, other) <= 0) { + result = other; + } + } + + @Override + public long getLong() + { + return result; + } + }; } @Override @@ -116,7 +161,7 @@ public class TimestampAggregatorFactory extends AggregatorFactory @Override public Object finalizeComputation(Object object) { - return new DateTime((long) object); + return DateTimes.utc((long) object); } @Override diff --git a/extensions-contrib/virtual-columns/src/main/java/io/druid/segment/MapVirtualColumn.java b/extensions-contrib/virtual-columns/src/main/java/io/druid/segment/MapVirtualColumn.java index ce019e2246f..ccfa85154e5 100644 --- a/extensions-contrib/virtual-columns/src/main/java/io/druid/segment/MapVirtualColumn.java +++ b/extensions-contrib/virtual-columns/src/main/java/io/druid/segment/MapVirtualColumn.java @@ -165,25 +165,25 @@ public class MapVirtualColumn implements VirtualColumn public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec, ColumnSelectorFactory factory) { // Could probably do something useful here if the column name is dot-style. But for now just return nothing. - return null; + return dimensionSpec.decorate(DimensionSelectorUtils.constantSelector(null, dimensionSpec.getExtractionFn())); } @Override public FloatColumnSelector makeFloatColumnSelector(String columnName, ColumnSelectorFactory factory) { - return null; + return ZeroFloatColumnSelector.instance(); } @Override public LongColumnSelector makeLongColumnSelector(String columnName, ColumnSelectorFactory factory) { - return null; + return ZeroLongColumnSelector.instance(); } @Override public DoubleColumnSelector makeDoubleColumnSelector(String columnName, ColumnSelectorFactory factory) { - return null; + return ZeroDoubleColumnSelector.instance(); } @Override diff --git a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java index bc5a3629895..d930b862a99 100644 --- a/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/io/druid/segment/MapVirtualColumnTest.java @@ -30,6 +30,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryPlus; @@ -46,7 +47,6 @@ import io.druid.query.select.SelectQueryRunnerFactory; import io.druid.query.select.SelectResultValue; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -85,7 +85,7 @@ public class MapVirtualColumnTest ); final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .build(); final IncrementalIndex index = new IncrementalIndex.Builder() .setIndexSchema(schema) diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java index 607d6b71dc5..f062a3acd95 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java @@ -39,6 +39,7 @@ import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import org.junit.Before; import org.junit.Test; import org.schemarepo.InMemoryRepository; @@ -75,7 +76,7 @@ public class AvroStreamInputRowParserTest public static final float SOME_FLOAT_VALUE = 0.23555f; public static final int SOME_INT_VALUE = 1; public static final long SOME_LONG_VALUE = 679865987569912369L; - public static final DateTime DATE_TIME = new DateTime(2015, 10, 25, 19, 30); + public static final DateTime DATE_TIME = new DateTime(2015, 10, 25, 19, 30, ISOChronology.getInstanceUTC()); public static final List DIMENSIONS = Arrays.asList(EVENT_TYPE, ID, SOME_OTHER_ID, IS_VALID); public static final TimeAndDimsParseSpec PARSE_SPEC = new TimeAndDimsParseSpec( new TimestampSpec("timestamp", "millis", null), diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java index f0924140992..0bf8ee09e5e 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchAggregatorFactory.java @@ -22,15 +22,21 @@ package io.druid.query.aggregation.datasketches.theta; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.primitives.Ints; +import com.yahoo.sketches.Family; import com.yahoo.sketches.Util; import com.yahoo.sketches.theta.SetOperation; +import com.yahoo.sketches.theta.Union; import io.druid.java.util.common.StringUtils; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.ObjectAggregateCombiner; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; import io.druid.segment.ObjectColumnSelector; +import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Collections; import java.util.Comparator; @@ -98,6 +104,47 @@ public abstract class SketchAggregatorFactory extends AggregatorFactory return SketchHolder.combine(lhs, rhs, size); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new ObjectAggregateCombiner() + { + private final Union union = (Union) SetOperation.builder().build(size, Family.UNION); + private final SketchHolder combined = SketchHolder.of(union); + + @Override + public void reset(ColumnValueSelector selector) + { + union.reset(); + fold(selector); + } + + @Override + public void fold(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + SketchHolder other = ((ObjectColumnSelector) selector).get(); + // SketchAggregatorFactory.combine() delegates to SketchHolder.combine() and it doesn't check for nulls, so we + // neither. + other.updateUnion(union); + combined.invalidateCache(); + } + + @Override + public Class classOfObject() + { + return SketchHolder.class; + } + + @Nullable + @Override + public SketchHolder get() + { + return combined; + } + }; + } + @Override @JsonProperty public String getName() diff --git a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchHolder.java b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchHolder.java index 891d3975404..33a1779b9e0 100644 --- a/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchHolder.java +++ b/extensions-core/datasketches/src/main/java/io/druid/query/aggregation/datasketches/theta/SketchHolder.java @@ -171,7 +171,7 @@ public class SketchHolder return result; } - public static Object combine(Object o1, Object o2, int nomEntries) + public static SketchHolder combine(Object o1, Object o2, int nomEntries) { SketchHolder holder1 = (SketchHolder) o1; SketchHolder holder2 = (SketchHolder) o2; @@ -194,7 +194,7 @@ public class SketchHolder } } - private void invalidateCache() + void invalidateCache() { cachedEstimate = null; cachedSketch = null; diff --git a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java index b4b10c0e585..2583229d6ef 100644 --- a/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java +++ b/extensions-core/datasketches/src/test/java/io/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.Lists; import com.google.common.io.Files; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -221,7 +222,7 @@ public class SketchAggregationWithSimpleDataTest Sequences.toList(seq, Lists.newArrayList()) ); - Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketch_count"), 0.01); Assert.assertEquals(50.0, result.getValue().getDoubleMetric("sketchEstimatePostAgg"), 0.01); @@ -249,7 +250,7 @@ public class SketchAggregationWithSimpleDataTest Sequences.toList(seq, Lists.newArrayList()) ); - Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); DimensionAndMetricValueExtractor value = Iterables.getOnlyElement(result.getValue().getValue()); Assert.assertEquals(38.0, value.getDoubleMetric("sketch_count"), 0.01); @@ -277,7 +278,7 @@ public class SketchAggregationWithSimpleDataTest ); Result result = (Result) Iterables.getOnlyElement(Sequences.toList(seq, Lists.newArrayList())); - Assert.assertEquals(new DateTime("2014-10-20T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-20T00:00:00.000Z"), result.getTimestamp()); Assert.assertEquals(100, result.getValue().getEvents().size()); Assert.assertEquals("AgMDAAAazJMCAAAAAACAPzz9j7pWTMdROWGf15uY1nI=", result.getValue().getEvents().get(0).getEvent().get("pty_country")); } diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java index 76806fb6e41..47fe6bb830f 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java @@ -48,7 +48,6 @@ import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.net.URI; -import java.util.ArrayList; import java.util.concurrent.Callable; /** @@ -194,7 +193,6 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller, URIDataPuller } final RemoteIterator children = fs.listFiles(path, false); - final ArrayList localChildren = new ArrayList<>(); final FileUtils.FileCopyResult result = new FileUtils.FileCopyResult(); while (children.hasNext()) { final LocatedFileStatus child = children.next(); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index cc0e19929ae..70b5e0c5906 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IOE; +import io.druid.java.util.common.Intervals; import io.druid.storage.hdfs.HdfsDataSegmentFinder; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; @@ -36,7 +37,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; @@ -56,80 +56,62 @@ public class HdfsDataSegmentFinderTest private static final ObjectMapper mapper = new DefaultObjectMapper(); private static final String DESCRIPTOR_JSON = "descriptor.json"; private static final String INDEX_ZIP = "index.zip"; - private static final DataSegment SEGMENT_1 = DataSegment.builder() - .dataSource("wikipedia") - .interval( - new Interval( - "2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z" - ) - ) - .version("2015-10-21T22:07:57.074Z") - .loadSpec( - ImmutableMap.of( - "type", - "hdfs", - "path", - "hdfs://abc.com:1234/somewhere/index.zip" - ) - ) - .dimensions(ImmutableList.of("language", "page")) - .metrics(ImmutableList.of("count")) - .build(); + private static final DataSegment SEGMENT_1 = DataSegment + .builder() + .dataSource("wikipedia") + .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) + .version("2015-10-21T22:07:57.074Z") + .loadSpec( + ImmutableMap.of( + "type", + "hdfs", + "path", + "hdfs://abc.com:1234/somewhere/index.zip" + ) + ) + .dimensions(ImmutableList.of("language", "page")) + .metrics(ImmutableList.of("count")) + .build(); - private static final DataSegment SEGMENT_2 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z" - ) - ) - .build(); + private static final DataSegment SEGMENT_2 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z")) + .build(); - private static final DataSegment SEGMENT_3 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .version("2015-10-22T22:07:57.074Z") - .build(); + private static final DataSegment SEGMENT_3 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .version("2015-10-22T22:07:57.074Z") + .build(); - private static final DataSegment SEGMENT_4_0 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(0, 2)) - .build(); + private static final DataSegment SEGMENT_4_0 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(0, 2)) + .build(); - private static final DataSegment SEGMENT_4_1 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(1, 2)) - .build(); + private static final DataSegment SEGMENT_4_1 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(1, 2)) + .build(); - private static final DataSegment SEGMENT_5 = DataSegment.builder() - .dataSource("wikipedia") - .interval( - new Interval( - "2013-09-03T00:00:00.000Z/2013-09-04T00:00:00.000Z" - ) - ) - .version("2015-10-21T22:07:57.074Z") - .loadSpec( - ImmutableMap.of( - "type", - "hdfs", - "path", - "hdfs://abc.com:1234/somewhere/1_index.zip" - ) - ) - .dimensions(ImmutableList.of("language", "page")) - .metrics(ImmutableList.of("count")) - .build(); + private static final DataSegment SEGMENT_5 = DataSegment + .builder() + .dataSource("wikipedia") + .interval(Intervals.of("2013-09-03T00:00:00.000Z/2013-09-04T00:00:00.000Z")) + .version("2015-10-21T22:07:57.074Z") + .loadSpec( + ImmutableMap.of( + "type", + "hdfs", + "path", + "hdfs://abc.com:1234/somewhere/1_index.zip" + ) + ) + .dimensions(ImmutableList.of("language", "page")) + .metrics(ImmutableList.of("count")) + .build(); private static MiniDFSCluster miniCluster; private static File hdfsTmpDir; diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java index d9118bf2292..bdf499dcd90 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java @@ -21,6 +21,7 @@ package io.druid.storage.hdfs; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -28,7 +29,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -183,7 +183,7 @@ public class HdfsDataSegmentKillerTest { return new DataSegment( "dataSource", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "hdfs", diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index e12f0ed516c..f5baebb9339 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -20,7 +20,6 @@ package io.druid.storage.hdfs; import com.fasterxml.jackson.core.JsonParser; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.InjectableValues; @@ -42,6 +41,7 @@ import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.JobHelper; import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.GranularityModule; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.segment.loading.LocalDataSegmentPusher; import io.druid.segment.loading.LocalDataSegmentPusherConfig; @@ -57,6 +57,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -147,7 +148,7 @@ public class HdfsDataSegmentPusherTest DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), @@ -230,7 +231,7 @@ public class HdfsDataSegmentPusherTest for (int i = 0; i < numberOfSegments; i++) { segments[i] = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), @@ -337,11 +338,10 @@ public class HdfsDataSegmentPusherTest Interval.class, new StdDeserializer(Interval.class) { @Override - public Interval deserialize( - JsonParser jsonParser, DeserializationContext deserializationContext - ) throws IOException, JsonProcessingException + public Interval deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) + throws IOException { - return new Interval(jsonParser.getText()); + return Intervals.of(jsonParser.getText()); } } ); @@ -353,7 +353,7 @@ public class HdfsDataSegmentPusherTest public void shouldNotHaveColonsInHdfsStorageDir() throws Exception { - Interval interval = new Interval("2011-10-01/2011-10-02"); + Interval interval = Intervals.of("2011-10-01/2011-10-02"); ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -414,7 +414,7 @@ public class HdfsDataSegmentPusherTest ) ); - Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30), 4712); + Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30, ISOChronology.getInstanceUTC()), 4712); Path path = JobHelper.makeFileNamePath( new Path(cfg.getSchema().getIOConfig().getSegmentOutputPath()), new DistributedFileSystem(), @@ -524,7 +524,7 @@ public class HdfsDataSegmentPusherTest ) ); - Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30), 4712); + Bucket bucket = new Bucket(4711, new DateTime(2012, 07, 10, 5, 30, ISOChronology.getInstanceUTC()), 4712); Path path = JobHelper.makeFileNamePath( new Path(cfg.getSchema().getIOConfig().getSegmentOutputPath()), new LocalFileSystem(), diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java index 494b18681ec..61845127213 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java @@ -534,9 +534,11 @@ public class ApproximateHistogram */ public ApproximateHistogram copy(ApproximateHistogram h) { - this.size = h.size; - this.positions = new float[size]; - this.bins = new long[size]; + if (h.size > this.size) { + this.size = h.size; + this.positions = new float[size]; + this.bins = new long[size]; + } System.arraycopy(h.positions, 0, this.positions, 0, h.binCount); System.arraycopy(h.bins, 0, this.bins, 0, h.binCount); diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java index 3601aa5da8a..fcc4951d19e 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregator.java @@ -36,7 +36,7 @@ public class ApproximateHistogramAggregator implements Aggregator } }; - static Object combineHistograms(Object lhs, Object rhs) + static ApproximateHistogram combineHistograms(Object lhs, Object rhs) { return ((ApproximateHistogram) lhs).foldFast((ApproximateHistogram) rhs); } diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java index 7bfa9f05c10..40dffb786b2 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogramAggregatorFactory.java @@ -26,16 +26,20 @@ import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; - import io.druid.java.util.common.StringUtils; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.ObjectAggregateCombiner; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; +import io.druid.segment.ObjectColumnSelector; import org.apache.commons.codec.binary.Base64; +import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; @@ -111,6 +115,46 @@ public class ApproximateHistogramAggregatorFactory extends AggregatorFactory return ApproximateHistogramAggregator.combineHistograms(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + // ApproximateHistogramAggregatorFactory.combine() delegates to ApproximateHistogramAggregator.combineHistograms() + // and it doesn't check for nulls, so this AggregateCombiner neither. + return new ObjectAggregateCombiner() + { + private final ApproximateHistogram combined = new ApproximateHistogram(); + + @Override + public void reset(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + ApproximateHistogram first = ((ObjectColumnSelector) selector).get(); + combined.copy(first); + } + + @Override + public void fold(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + ApproximateHistogram other = ((ObjectColumnSelector) selector).get(); + combined.foldFast(other); + } + + @Override + public Class classOfObject() + { + return ApproximateHistogram.class; + } + + @Nullable + @Override + public ApproximateHistogram get() + { + return combined; + } + }; + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java index d0578bb55e3..8ecda69327c 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java @@ -183,7 +183,7 @@ public class QuantileSqlAggregator implements SqlAggregator } } else { final ExpressionVirtualColumn virtualColumn = input.toVirtualColumn( - String.format("%s:v", name), + StringUtils.format("%s:v", name), ValueType.FLOAT, plannerContext.getExprMacroTable() ); diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java index e42b0f4a7ac..b48ea4b6708 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.StupidPool; +import io.druid.java.util.common.DateTimes; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; @@ -40,7 +41,6 @@ import io.druid.query.topn.TopNQueryQueryToolChest; import io.druid.query.topn.TopNQueryRunnerFactory; import io.druid.query.topn.TopNResultValue; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -146,7 +146,7 @@ public class ApproximateHistogramTopNQueryTest List> expectedResults = Collections.singletonList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java index 807c7b434f1..2fed81cabde 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregatorTest.java @@ -132,7 +132,8 @@ public class QuantileSqlAggregatorTest CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, CalciteTests.createExprMacroTable(), - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ); } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java index 68450cc5066..f81515ba18e 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java @@ -55,6 +55,7 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.AbstractTask; import io.druid.indexing.common.task.RealtimeIndexTask; import io.druid.indexing.common.task.TaskResource; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; @@ -266,7 +267,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler public TaskStatus run(final TaskToolbox toolbox) throws Exception { log.info("Starting up!"); - startTime = DateTime.now(); + startTime = DateTimes.nowUtc(); mapper = toolbox.getObjectMapper(); status = Status.STARTING; @@ -612,10 +613,10 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler if (chatHandlerProvider.isPresent()) { chatHandlerProvider.get().unregister(getId()); } - } - toolbox.getDruidNodeAnnouncer().unannounce(discoveryDruidNode); - toolbox.getDataSegmentServerAnnouncer().unannounce(); + toolbox.getDruidNodeAnnouncer().unannounce(discoveryDruidNode); + toolbox.getDataSegmentServerAnnouncer().unannounce(); + } return success(); } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index eedeed801e7..d3c225095ef 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -65,6 +65,7 @@ import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.Supervisor; import io.druid.indexing.overlord.supervisor.SupervisorReport; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; @@ -349,7 +350,7 @@ public class KafkaSupervisor implements Supervisor } } ); - firstRunTime = DateTime.now().plus(ioConfig.getStartDelay()); + firstRunTime = DateTimes.nowUtc().plus(ioConfig.getStartDelay()); scheduledExec.scheduleAtFixedRate( buildRunTask(), ioConfig.getStartDelay().getMillis(), @@ -666,7 +667,7 @@ public class KafkaSupervisor implements Supervisor if (taskInfoProvider.getTaskLocation(entry.getKey()).equals(TaskLocation.unknown())) { killTask(entry.getKey()); } else { - entry.getValue().startTime = new DateTime(0); + entry.getValue().startTime = DateTimes.EPOCH; } } } @@ -969,7 +970,7 @@ public class KafkaSupervisor implements Supervisor TaskGroup newTaskGroup = new TaskGroup(ImmutableMap.copyOf(startingPartitions), Optional.absent(), Optional.absent()); newTaskGroup.tasks.put(taskId, new TaskData()); - newTaskGroup.completionTimeout = DateTime.now().plus(ioConfig.getCompletionTimeout()); + newTaskGroup.completionTimeout = DateTimes.nowUtc().plus(ioConfig.getCompletionTimeout()); taskGroupList.add(newTaskGroup); } @@ -1052,7 +1053,7 @@ public class KafkaSupervisor implements Supervisor TaskGroup group = entry.getValue(); // find the longest running task from this group - DateTime earliestTaskStart = DateTime.now(); + DateTime earliestTaskStart = DateTimes.nowUtc(); for (TaskData taskData : group.tasks.values()) { if (earliestTaskStart.isAfter(taskData.startTime)) { earliestTaskStart = taskData.startTime; @@ -1075,7 +1076,7 @@ public class KafkaSupervisor implements Supervisor if (endOffsets != null) { // set a timeout and put this group in pendingCompletionTaskGroups so that it can be monitored for completion - group.completionTimeout = DateTime.now().plus(ioConfig.getCompletionTimeout()); + group.completionTimeout = DateTimes.nowUtc().plus(ioConfig.getCompletionTimeout()); pendingCompletionTaskGroups.putIfAbsent(groupId, Lists.newCopyOnWriteArrayList()); pendingCompletionTaskGroups.get(groupId).add(group); @@ -1362,11 +1363,11 @@ public class KafkaSupervisor implements Supervisor log.info("Creating new task group [%d] for partitions %s", groupId, partitionGroups.get(groupId).keySet()); Optional minimumMessageTime = (ioConfig.getLateMessageRejectionPeriod().isPresent() ? Optional.of( - DateTime.now().minus(ioConfig.getLateMessageRejectionPeriod().get()) + DateTimes.nowUtc().minus(ioConfig.getLateMessageRejectionPeriod().get()) ) : Optional.absent()); Optional maximumMessageTime = (ioConfig.getEarlyMessageRejectionPeriod().isPresent() ? Optional.of( - DateTime.now().plus(ioConfig.getEarlyMessageRejectionPeriod().get()) + DateTimes.nowUtc().plus(ioConfig.getEarlyMessageRejectionPeriod().get()) ) : Optional.absent()); taskGroups.put(groupId, new TaskGroup(generateStartingOffsetsForPartitionGroup(groupId), minimumMessageTime, maximumMessageTime)); @@ -1626,7 +1627,7 @@ public class KafkaSupervisor implements Supervisor Map partitionLag = getLagPerPartition(getHighestCurrentOffsets()); KafkaSupervisorReport report = new KafkaSupervisorReport( dataSource, - DateTime.now(), + DateTimes.nowUtc(), ioConfig.getTopic(), numPartitions, ioConfig.getReplicas(), @@ -1648,7 +1649,7 @@ public class KafkaSupervisor implements Supervisor Long remainingSeconds = null; if (startTime != null) { remainingSeconds = Math.max( - 0, ioConfig.getTaskDuration().getMillis() - (DateTime.now().getMillis() - startTime.getMillis()) + 0, ioConfig.getTaskDuration().getMillis() - (System.currentTimeMillis() - startTime.getMillis()) ) / 1000; } @@ -1674,7 +1675,7 @@ public class KafkaSupervisor implements Supervisor Map currentOffsets = entry.getValue().currentOffsets; Long remainingSeconds = null; if (taskGroup.completionTimeout != null) { - remainingSeconds = Math.max(0, taskGroup.completionTimeout.getMillis() - DateTime.now().getMillis()) + remainingSeconds = Math.max(0, taskGroup.completionTimeout.getMillis() - System.currentTimeMillis()) / 1000; } @@ -1822,7 +1823,7 @@ public class KafkaSupervisor implements Supervisor try { updateCurrentOffsets(); updateLatestOffsetsFromKafka(); - offsetsLastUpdated = DateTime.now(); + offsetsLastUpdated = DateTimes.nowUtc(); } catch (Exception e) { log.warn(e, "Exception while getting current/latest offsets"); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java index c1c75754787..49a9b90033d 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIOConfigTest.java @@ -24,9 +24,9 @@ import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.segment.indexing.IOConfig; import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -111,8 +111,8 @@ public class KafkaIOConfigTest Assert.assertEquals(ImmutableMap.of("bootstrap.servers", "localhost:9092"), config.getConsumerProperties()); Assert.assertEquals(false, config.isUseTransaction()); Assert.assertEquals(true, config.isPauseAfterRead()); - Assert.assertEquals(new DateTime("2016-05-31T12:00Z"), config.getMinimumMessageTime().get()); - Assert.assertEquals(new DateTime("2016-05-31T14:00Z"), config.getMaximumMessageTime().get()); + Assert.assertEquals(DateTimes.of("2016-05-31T12:00Z"), config.getMinimumMessageTime().get()); + Assert.assertEquals(DateTimes.of("2016-05-31T14:00Z"), config.getMaximumMessageTime().get()); Assert.assertTrue("skipOffsetGaps", config.isSkipOffsetGaps()); } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index e27e29e4339..70084eb8983 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -35,6 +35,7 @@ import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import org.easymock.Capture; @@ -346,7 +347,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testGetStartTime() throws Exception { client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); - DateTime now = DateTime.now(); + DateTime now = DateTimes.nowUtc(); Capture captured = Capture.newInstance(); expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) @@ -789,7 +790,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport @Test public void testGetStartTimeAsync() throws Exception { - final DateTime now = DateTime.now(); + final DateTime now = DateTimes.nowUtc(); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index cbcaca7ff81..ec02dc372ca 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -78,7 +78,9 @@ import io.druid.indexing.test.TestDataSegmentAnnouncer; import io.druid.indexing.test.TestDataSegmentKiller; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; @@ -128,7 +130,6 @@ import org.apache.curator.test.TestingCluster; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; import org.junit.After; @@ -420,7 +421,7 @@ public class KafkaIndexTaskTest kafkaServer.consumerProperties(), true, false, - new DateTime("2010"), + DateTimes.of("2010"), null, false ), @@ -477,7 +478,7 @@ public class KafkaIndexTaskTest true, false, null, - new DateTime("2010"), + DateTimes.of("2010"), false ), null, @@ -1663,7 +1664,7 @@ public class KafkaIndexTaskTest return FluentIterable.from( metadataStorageCoordinator.getUsedSegmentsForInterval( DATA_SCHEMA.getDataSource(), - new Interval("0000/3000") + Intervals.of("0000/3000") ) ).transform( new Function() @@ -1759,7 +1760,7 @@ public class KafkaIndexTaskTest private SegmentDescriptor SD(final Task task, final String intervalString, final int partitionNum) { - final Interval interval = new Interval(intervalString); + final Interval interval = Intervals.of(intervalString); return new SegmentDescriptor(interval, getLock(task, interval).getVersion(), partitionNum); } } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index b57814ca9c5..9ed12b7cbff 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.JSONParseSpec; @@ -57,7 +56,9 @@ import io.druid.indexing.overlord.TaskRunnerListener; import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.TaskStorage; import io.druid.indexing.overlord.supervisor.SupervisorReport; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -607,7 +608,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -696,7 +697,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getTask("id5")).andReturn(Optional.of(id3)).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -728,7 +729,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -790,8 +791,8 @@ public class KafkaSupervisorTest extends EasyMockSupport supervisor = getSupervisor(2, 1, true, "PT1H", null, null, false); addSomeEvents(1); - DateTime now = DateTime.now(); - DateTime maxi = DateTime.now().plusMinutes(60); + DateTime now = DateTimes.nowUtc(); + DateTime maxi = now.plusMinutes(60); Task id1 = createKafkaIndexTask( "id1", DATASOURCE, @@ -876,7 +877,7 @@ public class KafkaSupervisorTest extends EasyMockSupport expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); expect(taskClient.getStatusAsync(anyString())).andReturn(Futures.immediateFuture(KafkaIndexTask.Status.NOT_STARTED)) .anyTimes(); - expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTime.now())).anyTimes(); + expect(taskClient.getStartTimeAsync(anyString())).andReturn(Futures.immediateFuture(DateTimes.nowUtc())).anyTimes(); expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null @@ -976,10 +977,10 @@ public class KafkaSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.READING)) .anyTimes(); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTime.now().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTime.now())); + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTime.now())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) @@ -1202,7 +1203,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); - final DateTime startTime = new DateTime(); + final DateTime startTime = DateTimes.nowUtc(); supervisor = getSupervisor(1, 1, true, "PT1H", null, null, false); addSomeEvents(6); @@ -1383,10 +1384,10 @@ public class KafkaSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.READING)) .anyTimes(); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTime.now().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTime.now())); + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTime.now())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.>immediateFailedFuture(new RuntimeException())).times(2); @@ -1450,10 +1451,10 @@ public class KafkaSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(KafkaIndexTask.Status.READING)) .anyTimes(); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTime.now().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTime.now())); + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTime.now())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .times(2); expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) @@ -1508,7 +1509,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); - final DateTime startTime = new DateTime(); + final DateTime startTime = DateTimes.nowUtc(); supervisor = getSupervisor(2, 1, true, "PT1H", null, null, false); addSomeEvents(1); @@ -1697,7 +1698,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { final TaskLocation location1 = new TaskLocation("testHost", 1234, -1); final TaskLocation location2 = new TaskLocation("testHost2", 145, -1); - final DateTime startTime = new DateTime(); + final DateTime startTime = DateTimes.nowUtc(); supervisor = getSupervisor(2, 1, true, "PT1H", null, null, false); addSomeEvents(1); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java index 445237d2f31..5e36feeac68 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java @@ -26,7 +26,6 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; @@ -38,6 +37,7 @@ import io.druid.guice.annotations.Json; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.parsers.CSVParser; import io.druid.java.util.common.parsers.DelimitedParser; import io.druid.java.util.common.parsers.JSONParser; @@ -591,10 +591,6 @@ public class UriExtractionNamespace implements ExtractionNamespace @JsonTypeName("simpleJson") public static class ObjectMapperFlatDataParser implements FlatDataParser { - private static final TypeReference> MAP_STRING_STRING = new TypeReference>() - { - }; - private final Parser parser; @JsonCreator @@ -612,7 +608,7 @@ public class UriExtractionNamespace implements ExtractionNamespace public Map parse(String input) { try { - return jsonFactory.createParser(input).readValueAs(MAP_STRING_STRING); + return jsonFactory.createParser(input).readValueAs(JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING); } catch (IOException e) { throw Throwables.propagate(e); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java index aa274985474..5a7f5755326 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java @@ -19,7 +19,7 @@ package io.druid.server.lookup.namespace; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; diff --git a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java index 74e613d5977..80adc7a3e91 100644 --- a/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/io/druid/query/lookup/NamespaceLookupExtractorFactoryTest.java @@ -19,7 +19,6 @@ package io.druid.query.lookup; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.BeanProperty; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.InjectableValues; @@ -37,6 +36,7 @@ import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.ISE; import io.druid.query.lookup.namespace.ExtractionNamespace; import io.druid.query.lookup.namespace.UriExtractionNamespace; @@ -462,9 +462,7 @@ public class NamespaceLookupExtractorFactoryTest Assert.assertFalse(namespaceLookupExtractorFactory.replaces(mapper.readValue(str, LookupExtractorFactory.class))); final Map map = new HashMap<>(mapper.>readValue( str, - new TypeReference>() - { - } + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT )); map.put("firstCacheTimeout", "1"); Assert.assertTrue(namespaceLookupExtractorFactory.replaces(mapper.convertValue(map, LookupExtractorFactory.class))); diff --git a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java index 19a95087ca1..863c0619bc9 100644 --- a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java +++ b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtobufInputRowParserTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import org.junit.Before; import org.junit.Test; @@ -116,7 +117,7 @@ public class ProtobufInputRowParserTest ProtobufInputRowParser parser = new ProtobufInputRowParser(parseSpec, "prototest.desc", "ProtoTestEvent"); //create binary of proto test event - DateTime dateTime = new DateTime(2012, 07, 12, 9, 30); + DateTime dateTime = new DateTime(2012, 07, 12, 9, 30, ISOChronology.getInstanceUTC()); ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder() .setDescription("description") .setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE) diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java index d8b1450bf96..75a4d2f0f13 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentArchiverTest.java @@ -27,11 +27,11 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; import org.jets3t.service.impl.rest.httpclient.RestS3Service; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; @@ -73,7 +73,7 @@ public class S3DataSegmentArchiverTest .binaryVersion(1) .dataSource("dataSource") .dimensions(ImmutableList.of()) - .interval(Interval.parse("2015/2016")) + .interval(Intervals.of("2015/2016")) .version("version") .loadSpec(ImmutableMap.of( "type", diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java index 00e01700089..5d0e4bd4b00 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java @@ -31,6 +31,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; @@ -42,7 +43,6 @@ import org.jets3t.service.StorageObjectsChunk; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; import org.jets3t.service.model.StorageObject; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -62,62 +62,47 @@ public class S3DataSegmentFinderTest { private static final ObjectMapper mapper = new DefaultObjectMapper(); - private static final DataSegment SEGMENT_1 = DataSegment.builder() - .dataSource("wikipedia") - .interval( - new Interval( - "2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z" - ) - ) - .version("2015-10-21T22:07:57.074Z") - .loadSpec( - ImmutableMap.of( - "type", - "s3_zip", - "bucket", - "bucket1", - "key", - "abc/somewhere/index.zip" - ) - ) - .dimensions(ImmutableList.of("language", "page")) - .metrics(ImmutableList.of("count")) - .build(); + private static final DataSegment SEGMENT_1 = DataSegment + .builder() + .dataSource("wikipedia") + .interval(Intervals.of("2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z")) + .version("2015-10-21T22:07:57.074Z") + .loadSpec( + ImmutableMap.of( + "type", + "s3_zip", + "bucket", + "bucket1", + "key", + "abc/somewhere/index.zip" + ) + ) + .dimensions(ImmutableList.of("language", "page")) + .metrics(ImmutableList.of("count")) + .build(); - private static final DataSegment SEGMENT_2 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z" - ) - ) - .build(); + private static final DataSegment SEGMENT_2 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z")) + .build(); - private static final DataSegment SEGMENT_3 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .version("2015-10-22T22:07:57.074Z") - .build(); + private static final DataSegment SEGMENT_3 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .version("2015-10-22T22:07:57.074Z") + .build(); - private static final DataSegment SEGMENT_4_0 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(0, 2)) - .build(); + private static final DataSegment SEGMENT_4_0 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(0, 2)) + .build(); - private static final DataSegment SEGMENT_4_1 = DataSegment.builder(SEGMENT_1) - .interval( - new Interval( - "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" - ) - ) - .shardSpec(new NumberedShardSpec(1, 2)) - .build(); + private static final DataSegment SEGMENT_4_1 = DataSegment + .builder(SEGMENT_1) + .interval(Intervals.of("2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z")) + .shardSpec(new NumberedShardSpec(1, 2)) + .build(); @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index 4b9653219c8..fbd676f700b 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; - +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; @@ -33,7 +33,6 @@ import org.jets3t.service.ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; import org.jets3t.service.model.StorageObject; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -44,7 +43,7 @@ public class S3DataSegmentMoverTest { private static final DataSegment sourceSegment = new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), + Intervals.of("2013-01-01/2013-01-02"), "1", ImmutableMap.of( "key", @@ -119,7 +118,7 @@ public class S3DataSegmentMoverTest S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mover.move(new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), + Intervals.of("2013-01-01/2013-01-02"), "1", ImmutableMap.of( "key", @@ -142,7 +141,7 @@ public class S3DataSegmentMoverTest S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mover.move(new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), + Intervals.of("2013-01-01/2013-01-02"), "1", ImmutableMap.of( "key", diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java index a8bcbde4e8c..32818b17e48 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.io.Files; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.commons.io.IOUtils; @@ -32,7 +33,6 @@ import org.easymock.EasyMock; import org.easymock.IAnswer; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -103,7 +103,7 @@ public class S3DataSegmentPusherTest DataSegment segmentToPush = new DataSegment( "foo", - new Interval("2015/2016"), + Intervals.of("2015/2016"), "0", Maps.newHashMap(), Lists.newArrayList(), diff --git a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java index 9705ee90954..9de19dbbd3e 100644 --- a/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java +++ b/extensions-core/simple-client-sslcontext/src/main/java/io/druid/https/SSLContextModule.java @@ -26,6 +26,7 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Client; import io.druid.guice.annotations.Global; import io.druid.initialization.DruidModule; +import io.druid.server.router.Router; import javax.net.ssl.SSLContext; import java.util.List; @@ -46,5 +47,6 @@ public class SSLContextModule implements DruidModule binder.bind(SSLContext.class).toProvider(SSLContextProvider.class); binder.bind(SSLContext.class).annotatedWith(Global.class).toProvider(SSLContextProvider.class); binder.bind(SSLContext.class).annotatedWith(Client.class).toProvider(SSLContextProvider.class); + binder.bind(SSLContext.class).annotatedWith(Router.class).toProvider(SSLContextProvider.class); } } diff --git a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java index 3c388ad8c3a..3d048cf835a 100644 --- a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java +++ b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java @@ -74,29 +74,29 @@ public class VarianceAggregatorCollector } }; + void fold(VarianceAggregatorCollector other) + { + if (other.count == 0) { + return; + } + if (this.count == 0) { + this.nvariance = other.nvariance; + this.count = other.count; + this.sum = other.sum; + return; + } + final double ratio = this.count / (double) other.count; + final double t = this.sum / ratio - other.sum; + + this.nvariance += other.nvariance + (ratio / (this.count + other.count) * t * t); + this.count += other.count; + this.sum += other.sum; + } + static Object combineValues(Object lhs, Object rhs) { - final VarianceAggregatorCollector holder1 = (VarianceAggregatorCollector) lhs; - final VarianceAggregatorCollector holder2 = (VarianceAggregatorCollector) rhs; - - if (holder2.count == 0) { - return holder1; - } - if (holder1.count == 0) { - holder1.nvariance = holder2.nvariance; - holder1.count = holder2.count; - holder1.sum = holder2.sum; - return holder1; - } - - final double ratio = holder1.count / (double) holder2.count; - final double t = holder1.sum / ratio - holder2.sum; - - holder1.nvariance += holder2.nvariance + (ratio / (holder1.count + holder2.count) * t * t); - holder1.count += holder2.count; - holder1.sum += holder2.sum; - - return holder1; + ((VarianceAggregatorCollector) lhs).fold((VarianceAggregatorCollector) rhs); + return lhs; } static int getMaxIntermediateSize() @@ -120,6 +120,13 @@ public class VarianceAggregatorCollector nvariance = 0; } + void copyFrom(VarianceAggregatorCollector other) + { + this.count = other.count; + this.sum = other.sum; + this.nvariance = other.nvariance; + } + public VarianceAggregatorCollector(long count, double sum, double nvariance) { this.count = count; diff --git a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java index 4af69154384..de27f1e0c10 100644 --- a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java +++ b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorFactory.java @@ -30,9 +30,12 @@ import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.NoopAggregator; import io.druid.query.aggregation.NoopBufferAggregator; +import io.druid.query.aggregation.ObjectAggregateCombiner; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; import io.druid.segment.ObjectColumnSelector; import org.apache.commons.codec.binary.Base64; @@ -135,6 +138,51 @@ public class VarianceAggregatorFactory extends AggregatorFactory ); } + @Override + public Object combine(Object lhs, Object rhs) + { + return VarianceAggregatorCollector.combineValues(lhs, rhs); + } + + @Override + public AggregateCombiner makeAggregateCombiner() + { + // VarianceAggregatorFactory.combine() delegates to VarianceAggregatorCollector.combineValues() and it doesn't check + // for nulls, so this AggregateCombiner neither. + return new ObjectAggregateCombiner() + { + private final VarianceAggregatorCollector combined = new VarianceAggregatorCollector(); + + @Override + public void reset(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + VarianceAggregatorCollector first = ((ObjectColumnSelector) selector).get(); + combined.copyFrom(first); + } + + @Override + public void fold(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + VarianceAggregatorCollector other = ((ObjectColumnSelector) selector).get(); + combined.fold(other); + } + + @Override + public Class classOfObject() + { + return VarianceAggregatorCollector.class; + } + + @Override + public VarianceAggregatorCollector get() + { + return combined; + } + }; + } + @Override public AggregatorFactory getCombiningFactory() { @@ -163,12 +211,6 @@ public class VarianceAggregatorFactory extends AggregatorFactory return VarianceAggregatorCollector.COMPARATOR; } - @Override - public Object combine(Object lhs, Object rhs) - { - return VarianceAggregatorCollector.combineValues(lhs, rhs); - } - @Override public Object finalizeComputation(Object object) { diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java index 3799d03d06d..48c75c4fda3 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTestHelper.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; @@ -99,7 +100,7 @@ public class VarianceTestHelper extends QueryRunnerTestHelper for (int i = 0; i < values.length; i++) { theVals.put(names[i], values[i]); } - DateTime ts = new DateTime(timestamp); + DateTime ts = DateTimes.of(timestamp); return new MapBasedRow(ts, theVals); } } diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java index 707e1364ce4..aeafee083ba 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation.variance; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryPlus; @@ -31,7 +32,6 @@ import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesQueryRunnerTest; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -79,7 +79,7 @@ public class VarianceTimeseriesQueryTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( VarianceTestHelper.of( "rows", 13L, @@ -92,7 +92,7 @@ public class VarianceTimeseriesQueryTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( VarianceTestHelper.of( "rows", 13L, diff --git a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java index 170592aac50..415e0aa3022 100644 --- a/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java +++ b/extensions-core/stats/src/test/java/io/druid/query/aggregation/variance/VarianceTopNQueryTest.java @@ -22,6 +22,7 @@ package io.druid.query.aggregation.variance; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; @@ -38,7 +39,6 @@ import io.druid.query.topn.TopNQueryQueryToolChest; import io.druid.query.topn.TopNQueryRunnerTest; import io.druid.query.topn.TopNResultValue; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -92,7 +92,7 @@ public class VarianceTopNQueryTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() diff --git a/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java b/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java index 3b2a283c005..1c30f2227c9 100644 --- a/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java +++ b/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java @@ -24,6 +24,7 @@ import com.google.common.primitives.UnsignedBytes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import javax.annotation.Nullable; import java.nio.ByteBuffer; /** @@ -358,7 +359,7 @@ public abstract class HyperLogLogCollector implements Comparable maybeInterval = config.getGranularitySpec() - .bucketInterval(new DateTime(inputRow.getTimestampFromEpoch())); + .bucketInterval(DateTimes.utc(inputRow.getTimestampFromEpoch())); if (!maybeInterval.isPresent()) { throw new ISE("WTF?! No bucket found for timestamp: %s", inputRow.getTimestampFromEpoch()); @@ -324,7 +325,7 @@ public class DetermineHashedPartitionsJob implements Jobby HyperLogLogCollector.makeCollector(ByteBuffer.wrap(value.getBytes(), 0, value.getLength())) ); } - Optional intervalOptional = config.getGranularitySpec().bucketInterval(new DateTime(key.get())); + Optional intervalOptional = config.getGranularitySpec().bucketInterval(DateTimes.utc(key.get())); if (!intervalOptional.isPresent()) { throw new ISE("WTF?! No bucket found for timestamp: %s", key.get()); @@ -343,7 +344,7 @@ public class DetermineHashedPartitionsJob implements Jobby } ).writeValue( out, - new Double(aggregate.estimateCardinality()).longValue() + aggregate.estimateCardinalityRound() ); } finally { diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java index a1182969f33..a1848dba554 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java @@ -37,6 +37,7 @@ import io.druid.collections.CombiningIterable; import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; @@ -67,6 +68,7 @@ import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import java.io.IOException; import java.io.OutputStream; @@ -322,7 +324,7 @@ public class DeterminePartitionsJob implements Jobby { final List timeAndDims = HadoopDruidIndexerConfig.JSON_MAPPER.readValue(key.getBytes(), List.class); - final DateTime timestamp = new DateTime(timeAndDims.get(0)); + final DateTime timestamp = new DateTime(timeAndDims.get(0), ISOChronology.getInstanceUTC()); final Map> dims = (Map>) timeAndDims.get(1); helper.emitDimValueCounts(context, timestamp, dims); @@ -359,7 +361,7 @@ public class DeterminePartitionsJob implements Jobby for (final String dim : inputRow.getDimensions()) { dims.put(dim, inputRow.getDimension(dim)); } - helper.emitDimValueCounts(context, new DateTime(inputRow.getTimestampFromEpoch()), dims); + helper.emitDimValueCounts(context, DateTimes.utc(inputRow.getTimestampFromEpoch()), dims); } } @@ -566,7 +568,7 @@ public class DeterminePartitionsJob implements Jobby { final ByteBuffer groupKey = ByteBuffer.wrap(keyBytes.getGroupKey()); groupKey.position(4); // Skip partition - final DateTime bucket = new DateTime(groupKey.getLong()); + final DateTime bucket = DateTimes.utc(groupKey.getLong()); final PeekingIterator iterator = Iterators.peekingIterator(combinedIterable.iterator()); log.info( diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index 6b718528b92..54baa985245 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -22,7 +22,6 @@ package io.druid.indexer; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Joiner; @@ -37,7 +36,6 @@ import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; -import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.guice.GuiceInjectors; @@ -46,6 +44,9 @@ import io.druid.guice.annotations.Self; import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.path.PathSpec; import io.druid.initialization.Initialization; +import io.druid.java.util.common.jackson.JacksonUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.FunctionalIterable; @@ -159,10 +160,8 @@ public class HadoopDruidIndexerConfig { try { return fromMap( - (Map) HadoopDruidIndexerConfig.JSON_MAPPER.readValue( - file, new TypeReference>() - { - } + HadoopDruidIndexerConfig.JSON_MAPPER.readValue( + file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ) ); } @@ -177,10 +176,8 @@ public class HadoopDruidIndexerConfig // This is a map to try and prevent dependency screwbally-ness try { return fromMap( - (Map) HadoopDruidIndexerConfig.JSON_MAPPER.readValue( - str, new TypeReference>() - { - } + HadoopDruidIndexerConfig.JSON_MAPPER.readValue( + str, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ) ); } @@ -198,10 +195,8 @@ public class HadoopDruidIndexerConfig Reader reader = new InputStreamReader(fs.open(pt), StandardCharsets.UTF_8); return fromMap( - (Map) HadoopDruidIndexerConfig.JSON_MAPPER.readValue( - reader, new TypeReference>() - { - } + HadoopDruidIndexerConfig.JSON_MAPPER.readValue( + reader, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ) ); } @@ -410,9 +405,7 @@ public class HadoopDruidIndexerConfig public Optional getBucket(InputRow inputRow) { final Optional timeBucket = schema.getDataSchema().getGranularitySpec().bucketInterval( - new DateTime( - inputRow.getTimestampFromEpoch() - ) + DateTimes.utc(inputRow.getTimestampFromEpoch()) ); if (!timeBucket.isPresent()) { return Optional.absent(); @@ -562,8 +555,11 @@ public class HadoopDruidIndexerConfig public void addJobProperties(Job job) { - Configuration conf = job.getConfiguration(); + addJobProperties(job.getConfiguration()); + } + public void addJobProperties(Configuration conf) + { for (final Map.Entry entry : schema.getTuningConfig().getJobProperties().entrySet()) { conf.set(entry.getKey(), entry.getValue()); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java index 5e20467dd0c..d50549c1ee9 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerMapper.java @@ -22,13 +22,13 @@ package io.druid.indexer; import io.druid.data.input.InputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.StringInputRowParser; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.RE; import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.indexing.granularity.GranularitySpec; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Mapper; -import org.joda.time.DateTime; import java.io.IOException; @@ -82,7 +82,7 @@ public abstract class HadoopDruidIndexerMapper extends Mapper< } if (!granularitySpec.bucketIntervals().isPresent() - || granularitySpec.bucketInterval(new DateTime(inputRow.getTimestampFromEpoch())) + || granularitySpec.bucketInterval(DateTimes.utc(inputRow.getTimestampFromEpoch())) .isPresent()) { innerMap(inputRow, value, context, reportParseExceptions); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java deleted file mode 100644 index 7bb19e6c364..00000000000 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIOPeon.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.indexer; - -import io.druid.segment.data.IOPeon; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapreduce.JobContext; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - */ -class HadoopIOPeon implements IOPeon -{ - private final JobContext job; - private final Path baseDir; - private final boolean overwriteFiles; - - public HadoopIOPeon(JobContext job, Path baseDir, final boolean overwriteFiles) - { - this.job = job; - this.baseDir = baseDir; - this.overwriteFiles = overwriteFiles; - } - - @Override - public OutputStream makeOutputStream(String filename) throws IOException - { - return Utils.makePathAndOutputStream(job, new Path(baseDir, filename), overwriteFiles); - } - - @Override - public InputStream makeInputStream(String filename) throws IOException - { - return Utils.openInputStream(job, new Path(baseDir, filename)); - } - - @Override - public void close() throws IOException - { - throw new UnsupportedOperationException(); - } - - @Override - public File getFile(String filename) - { - throw new UnsupportedOperationException(); - } -} diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java index cc25ac18b2d..aeb72c033f8 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopTuningConfig.java @@ -27,9 +27,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.indexer.partitions.HashedPartitionsSpec; import io.druid.indexer.partitions.PartitionsSpec; +import io.druid.java.util.common.DateTimes; import io.druid.segment.IndexSpec; import io.druid.segment.indexing.TuningConfig; -import org.joda.time.DateTime; import java.util.List; import java.util.Map; @@ -50,7 +50,7 @@ public class HadoopTuningConfig implements TuningConfig { return new HadoopTuningConfig( null, - new DateTime().toString(), + DateTimes.nowUtc().toString(), DEFAULT_PARTITIONS_SPEC, DEFAULT_SHARD_SPECS, DEFAULT_INDEX_SPEC, @@ -115,7 +115,7 @@ public class HadoopTuningConfig implements TuningConfig ) { this.workingPath = workingPath; - this.version = version == null ? new DateTime().toString() : version; + this.version = version == null ? DateTimes.nowUtc().toString() : version; this.partitionsSpec = partitionsSpec == null ? DEFAULT_PARTITIONS_SPEC : partitionsSpec; this.shardSpecs = shardSpecs == null ? DEFAULT_SHARD_SPECS : shardSpecs; this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java index 4336d1c3c93..f7194e892eb 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java @@ -101,6 +101,8 @@ public class IndexGeneratorJob implements Jobby public static List getPublishedSegments(HadoopDruidIndexerConfig config) { final Configuration conf = JobHelper.injectSystemProperties(new Configuration()); + config.addJobProperties(conf); + final ObjectMapper jsonMapper = HadoopDruidIndexerConfig.JSON_MAPPER; ImmutableList.Builder publishedSegmentsBuilder = ImmutableList.builder(); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java index cd713880943..f3c00682f2a 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java @@ -27,6 +27,7 @@ import com.google.common.io.ByteStreams; import com.google.common.io.Files; import com.google.common.io.OutputSupplier; import io.druid.indexer.updater.HadoopDruidConverterConfig; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; import io.druid.java.util.common.IOE; @@ -51,7 +52,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Progressable; -import org.joda.time.DateTime; import java.io.BufferedOutputStream; import java.io.File; @@ -376,7 +376,9 @@ public class JobHelper Path workingPath = config.makeIntermediatePath(); log.info("Deleting path[%s]", workingPath); try { - workingPath.getFileSystem(injectSystemProperties(new Configuration())).delete(workingPath, true); + Configuration conf = injectSystemProperties(new Configuration()); + config.addJobProperties(conf); + workingPath.getFileSystem(conf).delete(workingPath, true); } catch (IOException e) { log.error(e, "Failed to cleanup path[%s]", workingPath); @@ -622,10 +624,10 @@ public class JobHelper log.info( "File[%s / %s / %sB] existed, but wasn't the same as [%s / %s / %sB]", finalIndexZipFile.getPath(), - new DateTime(finalIndexZipFile.getModificationTime()), + DateTimes.utc(finalIndexZipFile.getModificationTime()), finalIndexZipFile.getLen(), zipFile.getPath(), - new DateTime(zipFile.getModificationTime()), + DateTimes.utc(zipFile.getModificationTime()), zipFile.getLen() ); outputFS.delete(finalIndexZipFilePath, false); @@ -634,7 +636,7 @@ public class JobHelper log.info( "File[%s / %s / %sB] existed and will be kept", finalIndexZipFile.getPath(), - new DateTime(finalIndexZipFile.getModificationTime()), + DateTimes.utc(finalIndexZipFile.getModificationTime()), finalIndexZipFile.getLen() ); needRename = false; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java b/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java index 0036ad93c1a..3f3523e7404 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/Utils.java @@ -19,9 +19,9 @@ package io.druid.indexer; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.ISE; import org.apache.hadoop.fs.FileSystem; @@ -110,9 +110,7 @@ public class Utils return jsonMapper.readValue( fs.open(statsPath), - new TypeReference>() - { - } + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java index 63d10450c2c..87e671f5878 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.filter.DimFilter; import io.druid.timeline.DataSegment; diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java index 6d9a6b91b92..1e789980122 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java @@ -24,10 +24,11 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; -import io.druid.java.util.common.StringUtils; -import io.druid.java.util.common.granularity.Granularity; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.hadoop.FSSpideringIterator; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.hadoop.fs.FileStatus; @@ -88,7 +89,7 @@ public class GranularUnprocessedPathSpec extends GranularityPathSpec Set bucketsToRun = Sets.newTreeSet(Comparators.intervals()); for (Map.Entry entry : inputModifiedTimes.entrySet()) { - DateTime timeBucket = new DateTime(entry.getKey()); + DateTime timeBucket = DateTimes.utc(entry.getKey()); long mTime = entry.getValue(); String bucketOutput = StringUtils.format( diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java index ee99b0512a3..0dc0550f4cb 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularityPathSpec.java @@ -168,7 +168,7 @@ public class GranularityPathSpec implements PathSpec end = inputInterval.getEndMillis(); makeNew = true; } - return makeNew ? new Interval(start, end) : interval; + return makeNew ? new Interval(start, end, interval.getChronology()) : interval; } } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java index c0d3d3cff17..0ce67728a4d 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopDruidConverterConfig.java @@ -21,7 +21,6 @@ package io.druid.indexer.updater; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; @@ -34,6 +33,7 @@ import io.druid.guice.GuiceInjectors; import io.druid.guice.JsonConfigProvider; import io.druid.guice.annotations.Self; import io.druid.initialization.Initialization; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.segment.IndexIO; import io.druid.segment.IndexMerger; import io.druid.segment.IndexSpec; @@ -81,18 +81,14 @@ public class HadoopDruidConverterConfig DATA_SEGMENT_PUSHER = injector.getInstance(DataSegmentPusher.class); } - private static final TypeReference> mapTypeReference = new TypeReference>() - { - }; - public static HadoopDruidConverterConfig fromString(final String string) throws IOException { - return fromMap(jsonMapper.>readValue(string, mapTypeReference)); + return fromMap(jsonMapper.readValue(string, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)); } public static HadoopDruidConverterConfig fromFile(final File file) throws IOException { - return fromMap(jsonMapper.>readValue(file, mapTypeReference)); + return fromMap(jsonMapper.readValue(file, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT)); } public static HadoopDruidConverterConfig fromMap(final Map map) diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java index bfee0b37673..86cfc9ca59e 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java @@ -35,6 +35,7 @@ import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; @@ -71,8 +72,8 @@ public class BatchDeltaIngestionTest private static final ObjectMapper MAPPER; private static final IndexIO INDEX_IO; - private static final Interval INTERVAL_FULL = new Interval("2014-10-22T00:00:00Z/P1D"); - private static final Interval INTERVAL_PARTIAL = new Interval("2014-10-22T00:00:00Z/PT2H"); + private static final Interval INTERVAL_FULL = Intervals.of("2014-10-22T00:00:00Z/P1D"); + private static final Interval INTERVAL_PARTIAL = Intervals.of("2014-10-22T00:00:00Z/PT2H"); private static final DataSegment SEGMENT; static { @@ -424,7 +425,7 @@ public class BatchDeltaIngestionTest Assert.assertEquals(expected.get("visited_sum"), actual.getLongMetric("visited_sum")); Assert.assertEquals( (Double) expected.get("unique_hosts"), - (Double) HyperUniquesAggregatorFactory.estimateCardinality(actual.getRaw("unique_hosts")), + (Double) HyperUniquesAggregatorFactory.estimateCardinality(actual.getRaw("unique_hosts"), false), 0.001 ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java index ec026eeaede..e021abf9370 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BucketTest.java @@ -21,10 +21,12 @@ package io.druid.indexer; import com.google.common.primitives.Bytes; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import org.hamcrest.number.OrderingComparison; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -39,7 +41,7 @@ public class BucketTest @Before public void setUp() { - time = new DateTime(2014, 11, 24, 10, 30); + time = new DateTime(2014, 11, 24, 10, 30, ISOChronology.getInstanceUTC()); shardNum = 1; partitionNum = 1; bucket = new Bucket(shardNum, time, partitionNum); @@ -80,10 +82,12 @@ public class BucketTest bucket.equals(new Bucket(shardNum, time, partitionNum + 1))); Assert.assertFalse("Objects do not have the same shardNum", bucket.equals(new Bucket(shardNum + 1, time, partitionNum))); - Assert.assertFalse("Objects do not have the same time", bucket.equals(new Bucket(shardNum, new DateTime(), partitionNum))); + Assert.assertFalse( + "Objects do not have the same time", + bucket.equals(new Bucket(shardNum, DateTimes.nowUtc(), partitionNum)) + ); Assert.assertFalse("Object do have NULL time", bucket.equals(new Bucket(shardNum, null, partitionNum))); Assert.assertTrue("Objects must be the same", bucket.equals(new Bucket(shardNum, time, partitionNum))); - } @Test public void testHashCode() diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java index db2fe76e5c4..31219d64bb3 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DetermineHashedPartitionsJobTest.java @@ -27,12 +27,12 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.partitions.HashedPartitionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -147,7 +147,7 @@ public class DetermineHashedPartitionsJobTest new UniformGranularitySpec( Granularities.DAY, Granularities.NONE, - ImmutableList.of(new Interval(interval)) + ImmutableList.of(Intervals.of(interval)) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java index a802d3ca037..af7dfba194c 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/DeterminePartitionsJobTest.java @@ -27,6 +27,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -34,7 +35,6 @@ import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.timeline.partition.SingleDimensionShardSpec; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Test; @@ -237,7 +237,7 @@ public class DeterminePartitionsJobTest ), new AggregatorFactory[]{new LongSumAggregatorFactory("visited_num", "visited_num")}, new UniformGranularitySpec( - Granularities.DAY, Granularities.NONE, ImmutableList.of(new Interval(interval)) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of(interval)) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java index 3c1ffdc1fdd..f91f9b6d5de 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -26,14 +26,14 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.timeline.partition.HashBasedNumberedShardSpec; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -71,7 +71,7 @@ public class HadoopDruidIndexerConfigTest new UniformGranularitySpec( Granularities.MINUTE, Granularities.MINUTE, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), @@ -80,7 +80,7 @@ public class HadoopDruidIndexerConfigTest null, null, null, - ImmutableMap.of(new DateTime("2010-01-01T01:00:00").getMillis(), specs), + ImmutableMap.of(DateTimes.of("2010-01-01T01:00:00").getMillis(), specs), null, null, false, @@ -110,9 +110,9 @@ public class HadoopDruidIndexerConfigTest "dim2", "4" ); - final long timestamp = new DateTime("2010-01-01T01:00:01").getMillis(); + final long timestamp = DateTimes.of("2010-01-01T01:00:01").getMillis(); final Bucket expectedBucket = config.getBucket(new MapBasedInputRow(timestamp, dims, values)).get(); - final long nextBucketTimestamp = Granularities.MINUTE.bucketEnd(new DateTime(timestamp)).getMillis(); + final long nextBucketTimestamp = Granularities.MINUTE.bucketEnd(DateTimes.utc(timestamp)).getMillis(); // check that all rows having same set of dims and truncated timestamp hash to same bucket for (int i = 0; timestamp + i < nextBucketTimestamp; i++) { Assert.assertEquals( @@ -134,7 +134,7 @@ public class HadoopDruidIndexerConfigTest new UniformGranularitySpec( Granularities.MINUTE, Granularities.MINUTE, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), @@ -143,12 +143,12 @@ public class HadoopDruidIndexerConfigTest null, null, null, - ImmutableMap.>of(new DateTime("2010-01-01T01:00:00").getMillis(), + ImmutableMap.>of(DateTimes.of("2010-01-01T01:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec( NoneShardSpec.instance(), 1 )), - new DateTime("2010-01-01T02:00:00").getMillis(), + DateTimes.of("2010-01-01T02:00:00").getMillis(), Lists.newArrayList(new HadoopyShardSpec( NoneShardSpec.instance(), 2 @@ -183,10 +183,10 @@ public class HadoopDruidIndexerConfigTest "dim2", "4" ); - final long ts1 = new DateTime("2010-01-01T01:00:01").getMillis(); + final long ts1 = DateTimes.of("2010-01-01T01:00:01").getMillis(); Assert.assertEquals(config.getBucket(new MapBasedInputRow(ts1, dims, values)).get().getShardNum(), 1); - final long ts2 = new DateTime("2010-01-01T02:00:01").getMillis(); + final long ts2 = DateTimes.of("2010-01-01T02:00:01").getMillis(); Assert.assertEquals(config.getBucket(new MapBasedInputRow(ts2, dims, values)).get().getShardNum(), 2); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java deleted file mode 100644 index 2522487e176..00000000000 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIOPeonTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.indexer; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.mapred.JobContext; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -import java.io.IOException; -public class HadoopIOPeonTest -{ - final String TMP_FILE_NAME = "test_file"; - JobContext mockJobContext; - Configuration jobConfig; - boolean overwritesFiles = true; - HadoopIOPeon ioPeon; - - @Rule - public TemporaryFolder tmpFolder = new TemporaryFolder(); - - @Before public void setUp() throws IOException - { - jobConfig = new Configuration(); - mockJobContext = EasyMock.createMock(JobContext.class); - EasyMock.expect(mockJobContext.getConfiguration()).andReturn(jobConfig).anyTimes(); - EasyMock.replay(mockJobContext); - - ioPeon = new HadoopIOPeon(mockJobContext, new Path(tmpFolder.newFile().getParent()), overwritesFiles); - } - - @After public void tearDown() - { - jobConfig = null; - mockJobContext = null; - tmpFolder.delete(); - } - - @Test public void testMakeOutputStream() throws IOException - { - Assert.assertNotNull(ioPeon.makeOutputStream(TMP_FILE_NAME)); - } - - @Test public void testMakeInputStream() throws IOException - { - Assert.assertNotNull(ioPeon.makeInputStream(tmpFolder.newFile(TMP_FILE_NAME).getName())); - } - - @Test(expected = UnsupportedOperationException.class) public void testClose() throws IOException - { - ioPeon.close(); - } -} diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java index 1750bae6b8a..097402384af 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecTest.java @@ -30,12 +30,12 @@ import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; import io.druid.indexer.updater.MetadataStorageUpdaterJobSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Assert; import org.junit.Test; @@ -78,7 +78,7 @@ public class HadoopIngestionSpecTest Assert.assertEquals( "getIntervals", - Lists.newArrayList(new Interval("2012-01-01/P1D")), + Lists.newArrayList(Intervals.of("2012-01-01/P1D")), granularitySpec.getIntervals().get() ); diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index 9154410875c..6899ccb3954 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -32,6 +32,7 @@ import io.druid.indexer.path.PathSpec; import io.druid.indexer.path.StaticPathSpec; import io.druid.indexer.path.UsedSegmentLister; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.indexing.DataSchema; @@ -51,8 +52,8 @@ import java.util.Map; public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest { private final String testDatasource = "test"; - private final Interval testDatasourceInterval = new Interval("1970/3000"); - private final Interval testDatasourceIntervalPartial = new Interval("2050/3000"); + private final Interval testDatasourceInterval = Intervals.of("1970/3000"); + private final Interval testDatasourceIntervalPartial = Intervals.of("2050/3000"); private final ObjectMapper jsonMapper; public HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest() @@ -65,7 +66,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest private static final DataSegment SEGMENT = new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -233,9 +234,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of( - new Interval("2010-01-01/P1D") - ) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java index 0815581fbf3..4e9a1afbb9d 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorCombinerTest.java @@ -22,13 +22,15 @@ package io.druid.indexer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import io.druid.java.util.common.StringUtils; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -40,8 +42,6 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.mapreduce.Reducer; import org.easymock.Capture; import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -80,7 +80,7 @@ public class IndexGeneratorCombinerTest new HyperUniquesAggregatorFactory("unique_hosts", "host") }, new UniformGranularitySpec( - Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2010/2011")) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of("2010/2011")) ), HadoopDruidIndexerConfig.JSON_MAPPER ), @@ -136,7 +136,7 @@ public class IndexGeneratorCombinerTest { long timestamp = System.currentTimeMillis(); - Bucket bucket = new Bucket(0, new DateTime(timestamp), 0); + Bucket bucket = new Bucket(0, DateTimes.utc(timestamp), 0); SortableBytes keySortableBytes = new SortableBytes( bucket.toGroupKey(), new byte[0] @@ -187,7 +187,14 @@ public class IndexGeneratorCombinerTest Assert.assertEquals(ImmutableList.of(), capturedRow.getDimension("host")); Assert.assertEquals(Arrays.asList("bar", "foo"), capturedRow.getDimension("keywords")); Assert.assertEquals(15, capturedRow.getLongMetric("visited_sum")); - Assert.assertEquals(2.0, (Double) HyperUniquesAggregatorFactory.estimateCardinality(capturedRow.getRaw("unique_hosts")), 0.001); + Assert.assertEquals( + 2.0, + (Double) HyperUniquesAggregatorFactory.estimateCardinality( + capturedRow.getRaw("unique_hosts"), + false + ), + 0.001 + ); } @Test @@ -195,7 +202,7 @@ public class IndexGeneratorCombinerTest { long timestamp = System.currentTimeMillis(); - Bucket bucket = new Bucket(0, new DateTime(timestamp), 0); + Bucket bucket = new Bucket(0, DateTimes.utc(timestamp), 0); SortableBytes keySortableBytes = new SortableBytes( bucket.toGroupKey(), new byte[0] @@ -250,13 +257,21 @@ public class IndexGeneratorCombinerTest Assert.assertEquals(Collections.singletonList("host1"), capturedRow1.getDimension("host")); Assert.assertEquals(Arrays.asList("bar", "foo"), capturedRow1.getDimension("keywords")); Assert.assertEquals(10, capturedRow1.getLongMetric("visited_sum")); - Assert.assertEquals(1.0, (Double) HyperUniquesAggregatorFactory.estimateCardinality(capturedRow1.getRaw("unique_hosts")), 0.001); + Assert.assertEquals( + 1.0, + (Double) HyperUniquesAggregatorFactory.estimateCardinality(capturedRow1.getRaw("unique_hosts"), false), + 0.001 + ); InputRow capturedRow2 = InputRowSerde.fromBytes(captureVal2.getValue().getBytes(), aggregators); Assert.assertEquals(Arrays.asList("host", "keywords"), capturedRow2.getDimensions()); Assert.assertEquals(Collections.singletonList("host2"), capturedRow2.getDimension("host")); Assert.assertEquals(Arrays.asList("bar", "foo"), capturedRow2.getDimension("keywords")); Assert.assertEquals(5, capturedRow2.getLongMetric("visited_sum")); - Assert.assertEquals(1.0, (Double) HyperUniquesAggregatorFactory.estimateCardinality(capturedRow2.getRaw("unique_hosts")), 0.001); + Assert.assertEquals( + 1.0, + (Double) HyperUniquesAggregatorFactory.estimateCardinality(capturedRow2.getRaw("unique_hosts"), false), + 0.001 + ); } } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 412b5718c0b..f70d7f1e1e3 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.RE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -423,7 +424,7 @@ public class IndexGeneratorJobTest this.useCombiner = useCombiner; this.partitionType = partitionType; this.shardInfoForEachSegment = shardInfoForEachSegment; - this.interval = new Interval(interval); + this.interval = Intervals.of(interval); this.data = data; this.inputFormatName = inputFormatName; this.inputRowParser = inputRowParser; diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java index 1fcef910095..4ab2e6bba37 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/JobHelperTest.java @@ -25,6 +25,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -60,7 +61,7 @@ public class JobHelperTest private HadoopDruidIndexerConfig config; private File tmpDir; private File dataFile; - private Interval interval = new Interval("2014-10-22T00:00:00Z/P1D"); + private Interval interval = Intervals.of("2014-10-22T00:00:00Z/P1D"); @Before public void setup() throws Exception @@ -155,7 +156,7 @@ public class JobHelperTest { DataSegment segment = new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "google", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java index e935af6bab5..adbeeea4486 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java @@ -22,6 +22,7 @@ package io.druid.indexer.hadoop; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import io.druid.java.util.common.Intervals; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.TestHelper; import io.druid.timeline.DataSegment; @@ -40,7 +41,7 @@ public class DatasourceIngestionSpecTest @Test public void testSingleIntervalSerde() throws Exception { - Interval interval = Interval.parse("2014/2015"); + Interval interval = Intervals.of("2014/2015"); DatasourceIngestionSpec expected = new DatasourceIngestionSpec( "test", @@ -74,7 +75,7 @@ public class DatasourceIngestionSpecTest DatasourceIngestionSpec.class ); - List intervals = ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017")); + List intervals = ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")); DatasourceIngestionSpec expected = new DatasourceIngestionSpec( "test", @@ -119,7 +120,7 @@ public class DatasourceIngestionSpecTest ImmutableList.of( new DataSegment( "test", - Interval.parse("2014/2017"), + Intervals.of("2014/2017"), "v0", null, null, @@ -152,7 +153,7 @@ public class DatasourceIngestionSpecTest DatasourceIngestionSpec actual = MAPPER.readValue(jsonStr, DatasourceIngestionSpec.class); Assert.assertEquals( - new DatasourceIngestionSpec("test", Interval.parse("2014/2015"), null, null, null, null, null, false), + new DatasourceIngestionSpec("test", Intervals.of("2014/2015"), null, null, null, null, null, false), actual ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java index c36f1343d4a..6177cf76927 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java @@ -29,6 +29,7 @@ import com.google.common.collect.Sets; import com.google.common.io.Files; import io.druid.indexer.JobHelper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.fs.BlockLocation; @@ -42,7 +43,6 @@ import org.apache.hadoop.mapred.TextInputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -74,7 +74,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -90,7 +90,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test2", - Interval.parse("2050/3000"), + Intervals.of("2050/3000"), "ver", ImmutableMap.of( "type", "hdfs", @@ -106,7 +106,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test3", - Interval.parse("2030/3000"), + Intervals.of("2030/3000"), "ver", ImmutableMap.of( "type", "hdfs", @@ -287,7 +287,7 @@ public class DatasourceInputFormatTest WindowedDataSegment.of( new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java index dcbb3d6483a..0140ede1545 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputSplitTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.ByteArrayDataOutput; import com.google.common.io.ByteStreams; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; @@ -39,13 +40,13 @@ public class DatasourceInputSplitTest @Test public void testSerde() throws Exception { - Interval interval = Interval.parse("2000/3000"); + Interval interval = Intervals.of("2000/3000"); DatasourceInputSplit expected = new DatasourceInputSplit( Lists.newArrayList( new WindowedDataSegment( new DataSegment( "test", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java index 0919320616e..50aa49fa827 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java @@ -130,7 +130,7 @@ public class DatasourceRecordReaderTest Assert.assertEquals(expected.get("visited_sum"), actual.getLongMetric("visited_sum")); Assert.assertEquals( (Double) expected.get("unique_hosts"), - (Double) HyperUniquesAggregatorFactory.estimateCardinality(actual.getRaw("unique_hosts")), + (Double) HyperUniquesAggregatorFactory.estimateCardinality(actual.getRaw("unique_hosts"), false), 0.001 ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java index 48d23bc8539..f350bb7ff61 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/WindowedDataSegmentTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; @@ -36,7 +37,7 @@ public class WindowedDataSegmentTest private static final ObjectMapper MAPPER = new DefaultObjectMapper(); private static final DataSegment SEGMENT = new DataSegment( "test1", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -65,7 +66,7 @@ public class WindowedDataSegmentTest @Test public void testSerdePartialWindow() throws IOException { - final Interval partialInterval = new Interval("2500/3000"); + final Interval partialInterval = Intervals.of("2500/3000"); final WindowedDataSegment windowedDataSegment = new WindowedDataSegment(SEGMENT, partialInterval); final WindowedDataSegment roundTrip = MAPPER.readValue( MAPPER.writeValueAsBytes(windowedDataSegment), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index d0a74653b7d..fd993255738 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -44,6 +44,7 @@ import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -56,7 +57,6 @@ import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -74,7 +74,7 @@ public class DatasourcePathSpecTest { this.ingestionSpec = new DatasourceIngestionSpec( "test", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), null, null, null, @@ -87,7 +87,7 @@ public class DatasourcePathSpecTest WindowedDataSegment.of( new DataSegment( ingestionSpec.getDataSource(), - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", @@ -103,7 +103,7 @@ public class DatasourcePathSpecTest WindowedDataSegment.of( new DataSegment( ingestionSpec.getDataSource(), - Interval.parse("2050/3000"), + Intervals.of("2050/3000"), "ver", ImmutableMap.of( "type", "hdfs", @@ -278,7 +278,7 @@ public class DatasourcePathSpecTest new LongSumAggregatorFactory("visited_sum", "visited") }, new UniformGranularitySpec( - Granularities.DAY, Granularities.NONE, ImmutableList.of(Interval.parse("2000/3000")) + Granularities.DAY, Granularities.NONE, ImmutableList.of(Intervals.of("2000/3000")) ), HadoopDruidIndexerConfig.JSON_MAPPER ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java index 6317f1be111..9e3a0447530 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java @@ -29,6 +29,7 @@ import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.HadoopTuningConfig; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -40,7 +41,6 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.security.UserGroupInformation; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; @@ -151,7 +151,7 @@ public class GranularityPathSpecTest new UniformGranularitySpec( Granularities.DAY, Granularities.MINUTE, - ImmutableList.of(new Interval("2015-11-06T00:00Z/2015-11-07T00:00Z")) + ImmutableList.of(Intervals.of("2015-11-06T00:00Z/2015-11-07T00:00Z")) ), jsonMapper ), @@ -202,7 +202,7 @@ public class GranularityPathSpecTest new UniformGranularitySpec( Granularities.DAY, Granularities.ALL, - ImmutableList.of(new Interval("2015-01-01T11Z/2015-01-02T05Z")) + ImmutableList.of(Intervals.of("2015-01-01T11Z/2015-01-02T05Z")) ), jsonMapper ), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java index b25e91d4f43..61350ac9a88 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopConverterJobTest.java @@ -42,6 +42,7 @@ import io.druid.indexer.JobHelper; import io.druid.indexer.Jobby; import io.druid.indexer.SQLMetadataStorageUpdaterJobHandler; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.metadata.MetadataSegmentManagerConfig; import io.druid.metadata.MetadataStorageConnectorConfig; @@ -103,7 +104,7 @@ public class HadoopConverterJobTest private Supplier metadataStorageTablesConfigSupplier; private DerbyConnector connector; - private final Interval interval = Interval.parse("2011-01-01T00:00:00.000Z/2011-05-01T00:00:00.000Z"); + private final Interval interval = Intervals.of("2011-01-01T00:00:00.000Z/2011-05-01T00:00:00.000Z"); @After public void tearDown() diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java index 2c50969f59e..e2bbcb76a8d 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/updater/HadoopDruidConverterConfigTest.java @@ -23,9 +23,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexSpec; import io.druid.timeline.DataSegment; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -44,7 +44,7 @@ public class HadoopDruidConverterConfigTest { final HadoopDruidConverterConfig config = new HadoopDruidConverterConfig( "datasource", - Interval.parse("2000/2010"), + Intervals.of("2000/2010"), new IndexSpec(), ImmutableList.of(), true, diff --git a/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java b/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java index 6ee38cbda84..0cec2c8be06 100644 --- a/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java +++ b/indexing-service/src/main/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java @@ -23,7 +23,7 @@ import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.segment.realtime.appenderator.SegmentIdentifier; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java index 7c0ac1680a9..829a59222c7 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java @@ -19,7 +19,6 @@ package io.druid.indexing.common.actions; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Throwables; @@ -33,6 +32,7 @@ import io.druid.indexing.common.RetryPolicy; import io.druid.indexing.common.RetryPolicyFactory; import io.druid.indexing.common.task.Task; import io.druid.java.util.common.IOE; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; import org.jboss.netty.channel.ChannelException; @@ -114,9 +114,7 @@ public class RemoteTaskActionClient implements TaskActionClient if (response.getStatus().getCode() / 100 == 2) { final Map responseDict = jsonMapper.readValue( response.getContent(), - new TypeReference>() - { - } + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); return jsonMapper.convertValue(responseDict.get("result"), taskAction.getReturnTypeReference()); } else { diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java index 6e4e52c6b5d..727afbe8315 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/SegmentListUsedAction.java @@ -25,7 +25,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.indexing.common.task.Task; import io.druid.timeline.DataSegment; import org.joda.time.Interval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java index 67bf7227d3e..442204edd6b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/AbstractTask.java @@ -28,9 +28,9 @@ import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.LockListAction; +import io.druid.java.util.common.DateTimes; import io.druid.query.Query; import io.druid.query.QueryRunner; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.IOException; @@ -86,7 +86,7 @@ public abstract class AbstractTask implements Task dataSource, interval.getStart(), interval.getEnd(), - new DateTime().toString() + DateTimes.nowUtc().toString() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java index 8cbb8c434d3..a16a6547d11 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/AppendTask.java @@ -27,6 +27,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.indexing.common.TaskToolbox; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Comparators; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.IndexMerger; @@ -84,7 +85,7 @@ public class AppendTask extends MergeTaskBase final Iterable segmentsToMerge = Iterables.concat( Iterables.transform( - timeline.lookup(new Interval("1000-01-01/3000-01-01")), + timeline.lookup(Intervals.of("1000-01-01/3000-01-01")), new Function, Iterable>() { @Override diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java index b223bdabcf1..5ab8251d7f5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java @@ -27,12 +27,12 @@ import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; - import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.logger.Logger; @@ -40,7 +40,6 @@ import io.druid.segment.IndexIO; import io.druid.segment.IndexSpec; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.File; @@ -120,7 +119,7 @@ public class ConvertSegmentTask extends AbstractFixedIntervalTask { Preconditions.checkNotNull(dataSource, "dataSource"); Preconditions.checkNotNull(interval, "interval"); - return joinId(TYPE, dataSource, interval.getStart(), interval.getEnd(), new DateTime()); + return joinId(TYPE, dataSource, interval.getStart(), interval.getEnd(), DateTimes.nowUtc()); } @JsonCreator diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java index 275bf7b31da..ae79eb64ae0 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java @@ -29,7 +29,6 @@ import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import io.druid.common.utils.JodaUtils; import io.druid.indexer.HadoopDruidDetermineConfigurationJob; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.HadoopDruidIndexerJob; @@ -43,10 +42,11 @@ import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockTryAcquireAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.List; @@ -93,7 +93,7 @@ public class HadoopIndexTask extends HadoopTask ) { super( - id != null ? id : StringUtils.format("index_hadoop_%s_%s", getTheDataSource(spec), new DateTime()), + id != null ? id : StringUtils.format("index_hadoop_%s_%s", getTheDataSource(spec), DateTimes.nowUtc()), getTheDataSource(spec), hadoopDependencyCoordinates == null ? (hadoopCoordinates == null ? null : ImmutableList.of(hadoopCoordinates)) diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java index 1477d91b530..3aa0e5c2444 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java @@ -36,7 +36,8 @@ import com.google.common.collect.Iterables; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.util.concurrent.ListenableFuture; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.Committer; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; @@ -116,7 +117,11 @@ public class IndexTask extends AbstractTask private static String makeId(String id, IndexIngestionSpec ingestionSchema) { - return id != null ? id : StringUtils.format("index_%s_%s", makeDataSource(ingestionSchema), new DateTime()); + if (id != null) { + return id; + } else { + return StringUtils.format("index_%s_%s", makeDataSource(ingestionSchema), DateTimes.nowUtc()); + } } private static String makeGroupId(IndexIngestionSpec ingestionSchema) @@ -372,7 +377,7 @@ public class IndexTask extends AbstractTask final int numShards; if (determineNumPartitions) { - final long numRows = new Double(collector.estimateCardinality()).longValue(); + final long numRows = collector.estimateCardinalityRound(); numShards = (int) Math.ceil((double) numRows / tuningConfig.getTargetPartitionSize()); log.info("Estimated [%,d] rows of data for interval [%s], creating [%,d] shards", numRows, interval, numShards); } else { diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java index f6d9b3e860c..13c2660aec8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java @@ -42,6 +42,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.segment.IndexIO; @@ -75,7 +76,7 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask super( // _not_ the version, just something uniqueish id != null ? id : StringUtils.format( - "merge_%s_%s", computeProcessingID(dataSource, segments), new DateTime().toString() + "merge_%s_%s", computeProcessingID(dataSource, segments), DateTimes.nowUtc().toString() ), dataSource, computeMergedInterval(segments), diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java index 487b8e401ee..43ca99bce8a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java @@ -27,10 +27,10 @@ import io.druid.data.input.FirehoseFactory; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; -import org.joda.time.DateTime; import java.util.Map; import java.util.UUID; @@ -74,7 +74,7 @@ public class NoopTask extends AbstractTask ) { super( - id == null ? StringUtils.format("noop_%s_%s", new DateTime(), UUID.randomUUID().toString()) : id, + id == null ? StringUtils.format("noop_%s_%s", DateTimes.nowUtc(), UUID.randomUUID().toString()) : id, "none", context ); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java index 7e641da3f84..fe2a92f2b9d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java @@ -40,6 +40,7 @@ import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockReleaseAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.query.DruidMetrics; @@ -91,7 +92,7 @@ public class RealtimeIndexTask extends AbstractTask return makeTaskId( fireDepartment.getDataSchema().getDataSource(), fireDepartment.getTuningConfig().getShardSpec().getPartitionNum(), - new DateTime(), + DateTimes.nowUtc(), random.nextInt() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java index 3a112e83ebb..b47249eb113 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/SameIntervalMergeTask.java @@ -25,10 +25,10 @@ import com.google.common.base.Preconditions; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; +import io.druid.java.util.common.DateTimes; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.IndexSpec; import io.druid.timeline.DataSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.List; @@ -102,7 +102,7 @@ public class SameIntervalMergeTask extends AbstractFixedIntervalTask dataSource, interval.getStart(), interval.getEnd(), - new DateTime().toString() + DateTimes.nowUtc().toString() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index 9e17bfe9fd9..e4034354bcc 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -53,6 +53,7 @@ import io.druid.indexing.common.tasklogs.LogUtils; import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.config.ForkingTaskRunnerConfig; import io.druid.indexing.worker.config.WorkerConfig; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IOE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; @@ -560,7 +561,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer } } - final DateTime start = new DateTime(); + final DateTime start = DateTimes.nowUtc(); final long timeout = new Interval(start, taskConfig.getGracefulShutdownTimeout()).toDurationMillis(); // Things should be terminating now. Wait for it to happen so logs can be uploaded and all that good stuff. diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java index 0906fef89a1..f41b1d7590c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java @@ -35,6 +35,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.EntryExistsException; import org.joda.time.DateTime; @@ -84,7 +85,7 @@ public class HeapMemoryTaskStorage implements TaskStorage } log.info("Inserting task %s with status: %s", task.getId(), status); - tasks.put(task.getId(), new TaskStuff(task, status, new DateTime())); + tasks.put(task.getId(), new TaskStuff(task, status, DateTimes.nowUtc())); } finally { giant.unlock(); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java index 72dcd3ec34d..d8c1a3f7dfb 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java @@ -34,6 +34,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -134,7 +135,7 @@ public class MetadataTaskStorage implements TaskStorage try { handler.insert( task.getId(), - new DateTime(), + DateTimes.nowUtc(), task.getDataSource(), task, status.isRunnable(), @@ -213,7 +214,7 @@ public class MetadataTaskStorage implements TaskStorage @Override public List getRecentlyFinishedTaskStatuses() { - final DateTime start = new DateTime().minus(config.getRecentlyFinishedThreshold()); + final DateTime start = DateTimes.nowUtc().minus(config.getRecentlyFinishedThreshold()); return ImmutableList.copyOf( Iterables.filter( diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index 5cbc4a5b346..ec096a5ff1b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -64,6 +64,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.RE; @@ -84,7 +85,6 @@ import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Period; @@ -753,7 +753,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer } ZkWorker assignedWorker = null; - Optional immutableZkWorker = null; + final ImmutableWorkerInfo immutableZkWorker; try { synchronized (workersWithUnacknowledgedTask) { immutableZkWorker = strategy.findWorkerForTask( @@ -787,10 +787,10 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer task ); - if (immutableZkWorker.isPresent() && - workersWithUnacknowledgedTask.putIfAbsent(immutableZkWorker.get().getWorker().getHost(), task.getId()) - == null) { - assignedWorker = zkWorkers.get(immutableZkWorker.get().getWorker().getHost()); + if (immutableZkWorker != null && + workersWithUnacknowledgedTask.putIfAbsent(immutableZkWorker.getWorker().getHost(), task.getId()) + == null) { + assignedWorker = zkWorkers.get(immutableZkWorker.getWorker().getHost()); } } @@ -1181,7 +1181,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer taskStatus.getStatusCode() ); // Worker is done with this task - zkWorker.setLastCompletedTaskTime(new DateTime()); + zkWorker.setLastCompletedTaskTime(DateTimes.nowUtc()); } else { log.info("Workerless task[%s] completed with status[%s]", taskStatus.getId(), taskStatus.getStatusCode()); } @@ -1206,7 +1206,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer synchronized (blackListedWorkers) { if (zkWorker.getContinuouslyFailedTasksCount() > config.getMaxRetriesBeforeBlacklist() && blackListedWorkers.size() <= zkWorkers.size() * (config.getMaxPercentageBlacklistWorkers() / 100.0) - 1) { - zkWorker.setBlacklistedUntil(DateTime.now().plus(config.getWorkerBlackListBackoffTime())); + zkWorker.setBlacklistedUntil(DateTimes.nowUtc().plus(config.getWorkerBlackListBackoffTime())); if (blackListedWorkers.add(zkWorker)) { log.info( "Blacklisting [%s] until [%s] after [%,d] failed tasks in a row.", diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java index 334b72b46aa..29ab3a7aa65 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunnerWorkQueue.java @@ -19,7 +19,7 @@ package io.druid.indexing.overlord; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import java.util.concurrent.ConcurrentSkipListMap; @@ -30,6 +30,6 @@ public class RemoteTaskRunnerWorkQueue extends ConcurrentSkipListMap dsLockbox = dsRunning.navigableKeySet(); final Iterable searchIntervals = Iterables.concat( // Single interval that starts at or before ours - Collections.singletonList(dsLockbox.floor(new Interval(interval.getStart(), new DateTime(JodaUtils.MAX_INSTANT)))), + Collections.singletonList(dsLockbox.floor(new Interval(interval.getStart(), DateTimes.MAX))), // All intervals that start somewhere between our start instant (exclusive) and end instant (exclusive) dsLockbox.subSet( - new Interval(interval.getStart(), new DateTime(JodaUtils.MAX_INSTANT)), + new Interval(interval.getStart(), DateTimes.MAX), false, new Interval(interval.getEnd(), interval.getEnd()), false diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java index f46b627ebb6..989b97b0a2a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskMaster.java @@ -24,7 +24,9 @@ import com.google.common.base.Throwables; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.client.indexing.IndexingService; import io.druid.curator.discovery.ServiceAnnouncer; +import io.druid.discovery.DruidLeaderSelector; import io.druid.guice.annotations.Self; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; @@ -38,15 +40,8 @@ import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.DruidNode; import io.druid.server.coordinator.CoordinatorOverlordServiceConfig; -import io.druid.server.initialization.IndexerZkConfig; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.leader.LeaderSelector; -import org.apache.curator.framework.recipes.leader.LeaderSelectorListener; -import org.apache.curator.framework.recipes.leader.Participant; -import org.apache.curator.framework.state.ConnectionState; import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; /** @@ -54,15 +49,15 @@ import java.util.concurrent.locks.ReentrantLock; */ public class TaskMaster { - private final LeaderSelector leaderSelector; + private final DruidLeaderSelector overlordLeaderSelector; + private final DruidLeaderSelector.Listener leadershipListener; + private final ReentrantLock giant = new ReentrantLock(true); - private final Condition mayBeStopped = giant.newCondition(); private final TaskActionClientFactory taskActionClientFactory; private final SupervisorManager supervisorManager; private final AtomicReference leaderLifecycleRef = new AtomicReference<>(null); - private volatile boolean leading = false; private volatile TaskRunner taskRunner; private volatile TaskQueue taskQueue; @@ -75,115 +70,99 @@ public class TaskMaster final TaskStorage taskStorage, final TaskActionClientFactory taskActionClientFactory, @Self final DruidNode selfNode, - final IndexerZkConfig zkPaths, final TaskRunnerFactory runnerFactory, - final CuratorFramework curator, final ServiceAnnouncer serviceAnnouncer, final CoordinatorOverlordServiceConfig coordinatorOverlordServiceConfig, final ServiceEmitter emitter, final SupervisorManager supervisorManager, - final OverlordHelperManager overlordHelperManager - ) + final OverlordHelperManager overlordHelperManager, + @IndexingService final DruidLeaderSelector overlordLeaderSelector + ) { this.supervisorManager = supervisorManager; this.taskActionClientFactory = taskActionClientFactory; + this.overlordLeaderSelector = overlordLeaderSelector; + final DruidNode node = coordinatorOverlordServiceConfig.getOverlordService() == null ? selfNode : selfNode.withService(coordinatorOverlordServiceConfig.getOverlordService()); - this.leaderSelector = new LeaderSelector( - curator, - zkPaths.getLeaderLatchPath(), - new LeaderSelectorListener() - { - @Override - public void takeLeadership(CuratorFramework client) throws Exception - { - giant.lock(); + this.leadershipListener = new DruidLeaderSelector.Listener() + { + @Override + public void becomeLeader() + { + giant.lock(); - try { - // Make sure the previous leadership cycle is really, really over. - stopLeading(); + // I AM THE MASTER OF THE UNIVERSE. + log.info("By the power of Grayskull, I have the power!"); - // I AM THE MASTER OF THE UNIVERSE. - log.info("By the power of Grayskull, I have the power!"); - taskLockbox.syncFromStorage(); - taskRunner = runnerFactory.build(); - taskQueue = new TaskQueue( - taskQueueConfig, - taskStorage, - taskRunner, - taskActionClientFactory, - taskLockbox, - emitter - ); + try { + taskLockbox.syncFromStorage(); + taskRunner = runnerFactory.build(); + taskQueue = new TaskQueue( + taskQueueConfig, + taskStorage, + taskRunner, + taskActionClientFactory, + taskLockbox, + emitter + ); - // Sensible order to start stuff: - final Lifecycle leaderLifecycle = new Lifecycle(); - if (leaderLifecycleRef.getAndSet(leaderLifecycle) != null) { - log.makeAlert("TaskMaster set a new Lifecycle without the old one being cleared! Race condition") - .emit(); - } + // Sensible order to start stuff: + final Lifecycle leaderLifecycle = new Lifecycle(); + if (leaderLifecycleRef.getAndSet(leaderLifecycle) != null) { + log.makeAlert("TaskMaster set a new Lifecycle without the old one being cleared! Race condition") + .emit(); + } - leaderLifecycle.addManagedInstance(taskRunner); - leaderLifecycle.addManagedInstance(taskQueue); - leaderLifecycle.addManagedInstance(supervisorManager); - leaderLifecycle.addManagedInstance(overlordHelperManager); + leaderLifecycle.addManagedInstance(taskRunner); + leaderLifecycle.addManagedInstance(taskQueue); + leaderLifecycle.addManagedInstance(supervisorManager); + leaderLifecycle.addManagedInstance(overlordHelperManager); - leaderLifecycle.addHandler( - new Lifecycle.Handler() - { - @Override - public void start() throws Exception - { - serviceAnnouncer.announce(node); - } + leaderLifecycle.addHandler( + new Lifecycle.Handler() + { + @Override + public void start() throws Exception + { + serviceAnnouncer.announce(node); + } - @Override - public void stop() - { - serviceAnnouncer.unannounce(node); - } - } - ); - try { - leaderLifecycle.start(); - leading = true; - while (leading && !Thread.currentThread().isInterrupted()) { - mayBeStopped.await(); + @Override + public void stop() + { + serviceAnnouncer.unannounce(node); } } - catch (InterruptedException e) { - log.debug("Interrupted while waiting"); - // Suppress so we can bow out gracefully - } - finally { - log.info("Bowing out!"); - stopLeading(); - } - } - catch (Exception e) { - log.makeAlert(e, "Failed to lead").emit(); - throw Throwables.propagate(e); - } - finally { - giant.unlock(); - } - } + ); - @Override - public void stateChanged(CuratorFramework client, ConnectionState newState) - { - if (newState == ConnectionState.LOST || newState == ConnectionState.SUSPENDED) { - // disconnected from zk. assume leadership is gone - stopLeading(); - } + leaderLifecycle.start(); + } + catch (Exception e) { + throw Throwables.propagate(e); + } + finally { + giant.unlock(); + } + } + + @Override + public void stopBeingLeader() + { + giant.lock(); + try { + final Lifecycle leaderLifecycle = leaderLifecycleRef.getAndSet(null); + if (leaderLifecycle != null) { + leaderLifecycle.stop(); } } - ); - - leaderSelector.setId(node.getHostAndPortToUse()); - leaderSelector.autoRequeue(); + finally { + giant.unlock(); + } + } + }; } /** @@ -195,7 +174,7 @@ public class TaskMaster giant.lock(); try { - leaderSelector.start(); + overlordLeaderSelector.registerListener(leadershipListener); } finally { giant.unlock(); @@ -212,30 +191,7 @@ public class TaskMaster giant.lock(); try { - leaderSelector.close(); - stopLeading(); - } - finally { - giant.unlock(); - } - } - - /** - * Relinquish leadership. May be called multiple times, even when not currently the leader. - */ - private void stopLeading() - { - giant.lock(); - - try { - if (leading) { - leading = false; - mayBeStopped.signalAll(); - final Lifecycle leaderLifecycle = leaderLifecycleRef.getAndSet(null); - if (leaderLifecycle != null) { - leaderLifecycle.stop(); - } - } + overlordLeaderSelector.unregisterListener(); } finally { giant.unlock(); @@ -244,27 +200,17 @@ public class TaskMaster public boolean isLeader() { - return leading; + return overlordLeaderSelector.isLeader(); } public String getCurrentLeader() { - try { - final Participant leader = leaderSelector.getLeader(); - if (leader != null && leader.isLeader()) { - return leader.getId(); - } else { - return null; - } - } - catch (Exception e) { - throw Throwables.propagate(e); - } + return overlordLeaderSelector.getCurrentLeader(); } public Optional getTaskRunner() { - if (leading) { + if (overlordLeaderSelector.isLeader()) { return Optional.of(taskRunner); } else { return Optional.absent(); @@ -273,7 +219,7 @@ public class TaskMaster public Optional getTaskQueue() { - if (leading) { + if (overlordLeaderSelector.isLeader()) { return Optional.of(taskQueue); } else { return Optional.absent(); @@ -282,7 +228,7 @@ public class TaskMaster public Optional getTaskActionClient(Task task) { - if (leading) { + if (overlordLeaderSelector.isLeader()) { return Optional.of(taskActionClientFactory.create(task)); } else { return Optional.absent(); @@ -291,7 +237,7 @@ public class TaskMaster public Optional getScalingStats() { - if (leading) { + if (overlordLeaderSelector.isLeader()) { return taskRunner.getScalingStats(); } else { return Optional.absent(); @@ -300,7 +246,7 @@ public class TaskMaster public Optional getSupervisorManager() { - if (leading) { + if (overlordLeaderSelector.isLeader()) { return Optional.of(supervisorManager); } else { return Optional.absent(); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java index b6cd2604457..f3f3e3e066f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskRunnerWorkItem.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.util.concurrent.ListenableFuture; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; /** @@ -36,12 +37,14 @@ public abstract class TaskRunnerWorkItem private final DateTime createdTime; private final DateTime queueInsertionTime; - public TaskRunnerWorkItem( - String taskId, - ListenableFuture result - ) + public TaskRunnerWorkItem(String taskId, ListenableFuture result) { - this(taskId, result, new DateTime(), new DateTime()); + this(taskId, result, DateTimes.nowUtc()); + } + + private TaskRunnerWorkItem(String taskId, ListenableFuture result, DateTime createdTime) + { + this(taskId, result, createdTime, createdTime); } public TaskRunnerWorkItem( diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java index 694d382471a..907704e2d77 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ThreadPoolTaskRunner.java @@ -42,6 +42,7 @@ import io.druid.indexing.common.TaskToolboxFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.autoscaling.ScalingStats; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -51,7 +52,6 @@ import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; import io.druid.query.SegmentDescriptor; import io.druid.server.DruidNode; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.Collection; @@ -179,7 +179,7 @@ public class ThreadPoolTaskRunner implements TaskRunner, QuerySegmentWalker try { task.stopGracefully(); final TaskStatus taskStatus = item.getResult().get( - new Interval(new DateTime(start), taskConfig.getGracefulShutdownTimeout()).toDurationMillis(), + new Interval(DateTimes.utc(start), taskConfig.getGracefulShutdownTimeout()).toDurationMillis(), TimeUnit.MILLISECONDS ); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java index 1ce283254fe..73e7d0ddbfa 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java @@ -29,6 +29,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; @@ -51,9 +52,8 @@ public class ZkWorker implements Closeable private final Function cacheConverter; private AtomicReference worker; - private AtomicReference lastCompletedTaskTime = new AtomicReference<>(new DateTime()); + private AtomicReference lastCompletedTaskTime = new AtomicReference<>(DateTimes.nowUtc()); private AtomicReference blacklistedUntil = new AtomicReference<>(); - private AtomicInteger continuouslyFailedTasksCount = new AtomicInteger(0); public ZkWorker(Worker worker, PathChildrenCache statusCache, final ObjectMapper jsonMapper) diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java index 89915853e37..ce53188416b 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/AbstractWorkerProvisioningStrategy.java @@ -22,8 +22,8 @@ package io.druid.indexing.overlord.autoscaling; import com.google.common.base.Supplier; import com.metamx.emitter.EmittingLogger; import io.druid.indexing.overlord.WorkerTaskRunner; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.PeriodGranularity; -import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Period; @@ -91,7 +91,7 @@ public abstract class AbstractWorkerProvisioningStrategy implements Provisioning provisioningSchedulerConfig.getOriginTime(), null ); - final long startTime = granularity.bucketEnd(new DateTime()).getMillis(); + final long startTime = granularity.bucketEnd(DateTimes.nowUtc()).getMillis(); exec.scheduleAtFixedRate( new Runnable() diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java index 8663e8c7fd9..dcd3a2d978e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java @@ -21,7 +21,6 @@ package io.druid.indexing.overlord.autoscaling; import com.google.common.base.Function; import com.google.common.base.Joiner; -import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.collect.Collections2; @@ -40,6 +39,7 @@ import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Duration; @@ -109,8 +109,8 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr private final Set currentlyProvisioning = Sets.newHashSet(); private final Set currentlyTerminating = Sets.newHashSet(); - private DateTime lastProvisionTime = new DateTime(); - private DateTime lastTerminateTime = new DateTime(); + private DateTime lastProvisionTime = DateTimes.nowUtc(); + private DateTime lastTerminateTime = lastProvisionTime; private PendingProvisioner(WorkerTaskRunner runner) { @@ -159,14 +159,14 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr break; } else { currentlyProvisioning.addAll(newNodes); - lastProvisionTime = new DateTime(); + lastProvisionTime = DateTimes.nowUtc(); scalingStats.addProvisionEvent(provisioned); want -= provisioned.getNodeIds().size(); didProvision = true; } } } else { - Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime()); + Duration durSinceLastProvision = new Duration(lastProvisionTime, DateTimes.nowUtc()); log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision); if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) { log.makeAlert("Worker node provisioning taking too long!") @@ -250,14 +250,14 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr // Simulate assigning tasks to dummy workers using configured workerSelectStrategy // the number of additional workers needed to assign all the pending tasks is noted for (Task task : pendingTasks) { - Optional selectedWorker = workerSelectStrategy.findWorkerForTask( + final ImmutableWorkerInfo selectedWorker = workerSelectStrategy.findWorkerForTask( workerTaskRunnerConfig, ImmutableMap.copyOf(workersMap), task ); final ImmutableWorkerInfo workerRunningTask; - if (selectedWorker.isPresent()) { - workerRunningTask = selectedWorker.get(); + if (selectedWorker != null) { + workerRunningTask = selectedWorker; } else { // None of the existing worker can run this task, we need to provision one worker for it. // create a dummy worker and try to simulate assigning task to it. @@ -329,13 +329,13 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr .terminate(ImmutableList.copyOf(laziestWorkerIps)); if (terminated != null) { currentlyTerminating.addAll(terminated.getNodeIds()); - lastTerminateTime = new DateTime(); + lastTerminateTime = DateTimes.nowUtc(); scalingStats.addTerminateEvent(terminated); didTerminate = true; } } } else { - Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime()); + Duration durSinceLastTerminate = new Duration(lastTerminateTime, DateTimes.nowUtc()); log.info("%s terminating. Current wait time: %s", currentlyTerminating, durSinceLastTerminate); @@ -407,7 +407,7 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr task.getId() ) ), - DateTime.now() + DateTimes.nowUtc() ); } @@ -418,7 +418,7 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java index 4c7c9b03692..3bff13e999a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ProvisioningSchedulerConfig.java @@ -20,6 +20,7 @@ package io.druid.indexing.overlord.autoscaling; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; @@ -37,7 +38,7 @@ public class ProvisioningSchedulerConfig private Period terminatePeriod = new Period("PT5M"); @JsonProperty - private DateTime originTime = new DateTime("2012-01-01T00:55:00.000Z"); + private DateTime originTime = DateTimes.of("2012-01-01T00:55:00.000Z"); public boolean isDoAutoscale() { diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java index b68482ccce9..e210c17ac5c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/ScalingStats.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.collect.Lists; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import java.util.Collections; @@ -68,26 +69,14 @@ public class ScalingStats public void addProvisionEvent(AutoScalingData data) { synchronized (lock) { - recentEvents.add( - new ScalingEvent( - data, - new DateTime(), - EVENT.PROVISION - ) - ); + recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.PROVISION)); } } public void addTerminateEvent(AutoScalingData data) { synchronized (lock) { - recentEvents.add( - new ScalingEvent( - data, - new DateTime(), - EVENT.TERMINATE - ) - ); + recentEvents.add(new ScalingEvent(data, DateTimes.nowUtc(), EVENT.TERMINATE)); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java index 9c2da6e1b37..d5a79c18dff 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java @@ -36,6 +36,7 @@ import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.overlord.WorkerTaskRunner; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Duration; @@ -103,8 +104,8 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning private final Set currentlyTerminating = Sets.newHashSet(); private int targetWorkerCount = -1; - private DateTime lastProvisionTime = new DateTime(); - private DateTime lastTerminateTime = new DateTime(); + private DateTime lastProvisionTime = DateTimes.nowUtc(); + private DateTime lastTerminateTime = lastProvisionTime; SimpleProvisioner(WorkerTaskRunner runner) { @@ -154,7 +155,7 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning break; } else { currentlyProvisioning.addAll(newNodes); - lastProvisionTime = new DateTime(); + lastProvisionTime = DateTimes.nowUtc(); scalingStats.addProvisionEvent(provisioned); want -= provisioned.getNodeIds().size(); didProvision = true; @@ -162,7 +163,7 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning } if (!currentlyProvisioning.isEmpty()) { - Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime()); + Duration durSinceLastProvision = new Duration(lastProvisionTime, DateTimes.nowUtc()); log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision); if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) { log.makeAlert("Worker node provisioning taking too long!") @@ -250,14 +251,14 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning .terminate(ImmutableList.copyOf(laziestWorkerIps)); if (terminated != null) { currentlyTerminating.addAll(terminated.getNodeIds()); - lastTerminateTime = new DateTime(); + lastTerminateTime = DateTimes.nowUtc(); scalingStats.addTerminateEvent(terminated); didTerminate = true; } } } } else { - Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime()); + Duration durSinceLastTerminate = new Duration(lastTerminateTime, DateTimes.nowUtc()); log.info("%s terminating. Current wait time: %s", currentlyTerminating, durSinceLastTerminate); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java index 4b25c540b63..f98df52c743 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java @@ -52,6 +52,8 @@ import io.druid.indexing.overlord.WorkerTaskRunner; import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.http.security.TaskResourceFilter; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -291,7 +293,7 @@ public class OverlordResource @QueryParam("count") final Integer count ) { - Interval theInterval = interval == null ? null : new Interval(interval); + Interval theInterval = interval == null ? null : Intervals.of(interval); if (theInterval == null && count != null) { try { return Response.ok( @@ -420,8 +422,8 @@ public class OverlordResource new TaskRunnerWorkItem( task.getId(), SettableFuture.create(), - new DateTime(0), - new DateTime(0) + DateTimes.EPOCH, + DateTimes.EPOCH ) { @Override @@ -539,8 +541,8 @@ public class OverlordResource // Would be nice to include the real created date, but the TaskStorage API doesn't yet allow it. return new TaskResponseObject( taskStatus.getId(), - new DateTime(0), - new DateTime(0), + DateTimes.EPOCH, + DateTimes.EPOCH, Optional.of(taskStatus), TaskLocation.unknown() ); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/AffinityConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/AffinityConfig.java index 7d06bcd6940..41f21697933 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/AffinityConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/AffinityConfig.java @@ -21,34 +21,55 @@ package io.druid.indexing.overlord.setup; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Maps; -import java.util.List; +import java.util.Collection; import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; /** */ public class AffinityConfig { // key:Datasource, value:[nodeHostNames] - private Map> affinity = Maps.newHashMap(); + private final Map> affinity; + private final boolean strong; + + // Cache of the names of workers that have affinity for any dataSource. + // Not part of the serialized JSON or equals/hashCode. + private final Set affinityWorkers; @JsonCreator public AffinityConfig( - @JsonProperty("affinity") Map> affinity + @JsonProperty("affinity") Map> affinity, + @JsonProperty("strong") boolean strong ) { this.affinity = affinity; + this.strong = strong; + this.affinityWorkers = affinity.values().stream().flatMap(Collection::stream).collect(Collectors.toSet()); } @JsonProperty - public Map> getAffinity() + public Map> getAffinity() { return affinity; } + @JsonProperty + public boolean isStrong() + { + return strong; + } + + public Set getAffinityWorkers() + { + return affinityWorkers; + } + @Override - public boolean equals(Object o) + public boolean equals(final Object o) { if (this == o) { return true; @@ -56,21 +77,23 @@ public class AffinityConfig if (o == null || getClass() != o.getClass()) { return false; } - - AffinityConfig that = (AffinityConfig) o; - - if (affinity != null - ? !Maps.difference(affinity, that.affinity).entriesDiffering().isEmpty() - : that.affinity != null) { - return false; - } - - return true; + final AffinityConfig that = (AffinityConfig) o; + return strong == that.strong && + Objects.equals(affinity, that.affinity); } @Override public int hashCode() { - return affinity != null ? affinity.hashCode() : 0; + return Objects.hash(affinity, strong); + } + + @Override + public String toString() + { + return "AffinityConfig{" + + "affinity=" + affinity + + ", strong=" + strong + + '}'; } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategy.java index 17b7b81228c..22139f80bba 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategy.java @@ -21,104 +21,17 @@ package io.druid.indexing.overlord.setup; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Optional; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import io.druid.indexing.common.task.Task; -import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; - -import java.util.List; -import java.util.Set; /** + * Only exists for backwards compatibility with existing "equalDistributionWithAffinity" worker configs. */ public class EqualDistributionWithAffinityWorkerSelectStrategy extends EqualDistributionWorkerSelectStrategy { - private final AffinityConfig affinityConfig; - private final Set affinityWorkerHosts = Sets.newHashSet(); - @JsonCreator public EqualDistributionWithAffinityWorkerSelectStrategy( - @JsonProperty("affinityConfig") AffinityConfig affinityConfig + @JsonProperty("affinityConfig") AffinityConfig affinityConfig ) { - this.affinityConfig = affinityConfig; - for (List affinityWorkers : affinityConfig.getAffinity().values()) { - for (String affinityWorker : affinityWorkers) { - this.affinityWorkerHosts.add(affinityWorker); - } - } - } - - @JsonProperty - public AffinityConfig getAffinityConfig() - { - return affinityConfig; - } - - @Override - public Optional findWorkerForTask( - final WorkerTaskRunnerConfig config, - final ImmutableMap zkWorkers, - final Task task - ) - { - // don't run other datasources on affinity workers; we only want our configured datasources to run on them - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (String workerHost : zkWorkers.keySet()) { - if (!affinityWorkerHosts.contains(workerHost)) { - builder.put(workerHost, zkWorkers.get(workerHost)); - } - } - ImmutableMap eligibleWorkers = builder.build(); - - List workerHosts = affinityConfig.getAffinity().get(task.getDataSource()); - if (workerHosts == null) { - return super.findWorkerForTask(config, eligibleWorkers, task); - } - - ImmutableMap.Builder affinityBuilder = new ImmutableMap.Builder<>(); - for (String workerHost : workerHosts) { - ImmutableWorkerInfo zkWorker = zkWorkers.get(workerHost); - if (zkWorker != null) { - affinityBuilder.put(workerHost, zkWorker); - } - } - ImmutableMap affinityWorkers = affinityBuilder.build(); - - if (!affinityWorkers.isEmpty()) { - Optional retVal = super.findWorkerForTask(config, affinityWorkers, task); - if (retVal.isPresent()) { - return retVal; - } - } - - return super.findWorkerForTask(config, eligibleWorkers, task); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - EqualDistributionWithAffinityWorkerSelectStrategy that = (EqualDistributionWithAffinityWorkerSelectStrategy) o; - - if (affinityConfig != null ? !affinityConfig.equals(that.affinityConfig) : that.affinityConfig != null) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - return affinityConfig != null ? affinityConfig.hashCode() : 0; + super(affinityConfig); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java index becc8053b54..cebcc231df3 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategy.java @@ -19,41 +19,84 @@ package io.druid.indexing.overlord.setup; -import com.google.common.base.Optional; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import java.util.Comparator; -import java.util.TreeSet; +import java.util.Map; +import java.util.Objects; /** */ public class EqualDistributionWorkerSelectStrategy implements WorkerSelectStrategy { - @Override - public Optional findWorkerForTask( - WorkerTaskRunnerConfig config, ImmutableMap zkWorkers, Task task + private final AffinityConfig affinityConfig; + + @JsonCreator + public EqualDistributionWorkerSelectStrategy( + @JsonProperty("affinityConfig") AffinityConfig affinityConfig ) { - // the version sorting is needed because if the workers have the same available capacity only one of them is - // returned. Exists the possibility that this worker is disabled and doesn't have valid version so can't - // run new tasks, so in this case the workers are sorted using version to ensure that if exists enable - // workers the comparator return one of them. - final TreeSet sortedWorkers = Sets.newTreeSet( - Comparator.comparing(ImmutableWorkerInfo::getAvailableCapacity).reversed() - .thenComparing(zkWorker -> zkWorker.getWorker().getVersion())); - sortedWorkers.addAll(zkWorkers.values()); - final String minWorkerVer = config.getMinWorkerVersion(); + this.affinityConfig = affinityConfig; + } - for (ImmutableWorkerInfo zkWorker : sortedWorkers) { - if (zkWorker.canRunTask(task) && zkWorker.isValidVersion(minWorkerVer)) { - return Optional.of(zkWorker); - } + @JsonProperty + public AffinityConfig getAffinityConfig() + { + return affinityConfig; + } + + @Override + public ImmutableWorkerInfo findWorkerForTask( + final WorkerTaskRunnerConfig config, + final ImmutableMap zkWorkers, + final Task task + ) + { + return WorkerSelectUtils.selectWorker( + task, + zkWorkers, + config, + affinityConfig, + EqualDistributionWorkerSelectStrategy::selectFromEligibleWorkers + ); + } + + private static ImmutableWorkerInfo selectFromEligibleWorkers(final Map eligibleWorkers) + { + return eligibleWorkers.values().stream().max( + Comparator.comparing(ImmutableWorkerInfo::getAvailableCapacity) + ).orElse(null); + } + + @Override + public boolean equals(final Object o) + { + if (this == o) { + return true; } + if (o == null || getClass() != o.getClass()) { + return false; + } + final EqualDistributionWorkerSelectStrategy that = (EqualDistributionWorkerSelectStrategy) o; + return Objects.equals(affinityConfig, that.affinityConfig); + } - return Optional.absent(); + @Override + public int hashCode() + { + return Objects.hash(affinityConfig); + } + + @Override + public String toString() + { + return "EqualDistributionWorkerSelectStrategy{" + + "affinityConfig=" + affinityConfig + + '}'; } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java index 2b01bb70268..6dd76ff0469 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategy.java @@ -21,105 +21,17 @@ package io.druid.indexing.overlord.setup; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Optional; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import io.druid.indexing.common.task.Task; -import io.druid.indexing.overlord.ImmutableWorkerInfo; -import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; - -import java.util.List; -import java.util.Set; /** + * Only exists for backwards compatibility with existing "fillCapacityWithAffinity" worker configs. */ public class FillCapacityWithAffinityWorkerSelectStrategy extends FillCapacityWorkerSelectStrategy { - private final AffinityConfig affinityConfig; - private final Set affinityWorkerHosts = Sets.newHashSet(); - @JsonCreator public FillCapacityWithAffinityWorkerSelectStrategy( @JsonProperty("affinityConfig") AffinityConfig affinityConfig ) { - this.affinityConfig = affinityConfig; - for (List affinityWorkers : affinityConfig.getAffinity().values()) { - for (String affinityWorker : affinityWorkers) { - this.affinityWorkerHosts.add(affinityWorker); - } - } - } - - @JsonProperty - public AffinityConfig getAffinityConfig() - { - return affinityConfig; - } - - @Override - public Optional findWorkerForTask( - final WorkerTaskRunnerConfig config, - final ImmutableMap zkWorkers, - final Task task - ) - { - // don't run other datasources on affinity workers; we only want our configured datasources to run on them - ImmutableMap.Builder builder = new ImmutableMap.Builder<>(); - for (String workerHost : zkWorkers.keySet()) { - if (!affinityWorkerHosts.contains(workerHost)) { - builder.put(workerHost, zkWorkers.get(workerHost)); - } - } - ImmutableMap eligibleWorkers = builder.build(); - - List workerHosts = affinityConfig.getAffinity().get(task.getDataSource()); - if (workerHosts == null) { - return super.findWorkerForTask(config, eligibleWorkers, task); - } - - ImmutableMap.Builder affinityBuilder = new ImmutableMap.Builder<>(); - for (String workerHost : workerHosts) { - ImmutableWorkerInfo zkWorker = zkWorkers.get(workerHost); - if (zkWorker != null) { - affinityBuilder.put(workerHost, zkWorker); - } - } - ImmutableMap affinityWorkers = affinityBuilder.build(); - - if (!affinityWorkers.isEmpty()) { - Optional retVal = super.findWorkerForTask(config, affinityWorkers, task); - if (retVal.isPresent()) { - return retVal; - } - } - - return super.findWorkerForTask(config, eligibleWorkers, task); - } - - @Override - public boolean equals(Object o) - { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - FillCapacityWithAffinityWorkerSelectStrategy that = (FillCapacityWithAffinityWorkerSelectStrategy) o; - - if (affinityConfig != null ? !affinityConfig.equals(that.affinityConfig) : that.affinityConfig != null) { - return false; - } - - return true; - } - - @Override - public int hashCode() - { - return affinityConfig != null ? affinityConfig.hashCode() : 0; + super(affinityConfig); } } - diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java index b098516080d..f49c4854afb 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/FillCapacityWorkerSelectStrategy.java @@ -19,59 +19,82 @@ package io.druid.indexing.overlord.setup; -import com.google.common.base.Optional; +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; -import com.google.common.primitives.Ints; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; import java.util.Comparator; -import java.util.TreeSet; +import java.util.Map; +import java.util.Objects; -/** - */ public class FillCapacityWorkerSelectStrategy implements WorkerSelectStrategy { + private final AffinityConfig affinityConfig; + + @JsonCreator + public FillCapacityWorkerSelectStrategy( + @JsonProperty("affinityConfig") AffinityConfig affinityConfig + ) + { + this.affinityConfig = affinityConfig; + } + + @JsonProperty + public AffinityConfig getAffinityConfig() + { + return affinityConfig; + } + @Override - public Optional findWorkerForTask( + public ImmutableWorkerInfo findWorkerForTask( final WorkerTaskRunnerConfig config, final ImmutableMap zkWorkers, final Task task ) { - TreeSet sortedWorkers = Sets.newTreeSet( - new Comparator() - { - @Override - public int compare( - ImmutableWorkerInfo zkWorker, ImmutableWorkerInfo zkWorker2 - ) - { - int retVal = Ints.compare(zkWorker2.getCurrCapacityUsed(), zkWorker.getCurrCapacityUsed()); - // the version sorting is needed because if the workers have the same currCapacityUsed only one of them is - // returned. Exists the possibility that this worker is disabled and doesn't have valid version so can't - // run new tasks, so in this case the workers are sorted using version to ensure that if exists enable - // workers the comparator return one of them. - - if (retVal == 0) { - retVal = zkWorker.getWorker().getVersion().compareTo(zkWorker2.getWorker().getVersion()); - } - - return retVal; - } - } + return WorkerSelectUtils.selectWorker( + task, + zkWorkers, + config, + affinityConfig, + FillCapacityWorkerSelectStrategy::selectFromEligibleWorkers ); - sortedWorkers.addAll(zkWorkers.values()); - final String minWorkerVer = config.getMinWorkerVersion(); + } - for (ImmutableWorkerInfo zkWorker : sortedWorkers) { - if (zkWorker.canRunTask(task) && zkWorker.isValidVersion(minWorkerVer)) { - return Optional.of(zkWorker); - } + private static ImmutableWorkerInfo selectFromEligibleWorkers(final Map eligibleWorkers) + { + return eligibleWorkers.values().stream().max( + Comparator.comparing(ImmutableWorkerInfo::getCurrCapacityUsed) + ).orElse(null); + } + + @Override + public boolean equals(final Object o) + { + if (this == o) { + return true; } + if (o == null || getClass() != o.getClass()) { + return false; + } + final FillCapacityWorkerSelectStrategy that = (FillCapacityWorkerSelectStrategy) o; + return Objects.equals(affinityConfig, that.affinityConfig); + } - return Optional.absent(); + @Override + public int hashCode() + { + return Objects.hash(affinityConfig); + } + + @Override + public String toString() + { + return "FillCapacityWorkerSelectStrategy{" + + "affinityConfig=" + affinityConfig + + '}'; } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java index 1d8860a6177..cd0f487e11c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategy.java @@ -22,11 +22,9 @@ package io.druid.indexing.overlord.setup; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; - import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; @@ -73,12 +71,12 @@ public class JavaScriptWorkerSelectStrategy implements WorkerSelectStrategy } @Override - public Optional findWorkerForTask( + public ImmutableWorkerInfo findWorkerForTask( WorkerTaskRunnerConfig config, ImmutableMap zkWorkers, Task task ) { String worker = fnSelector.apply(config, zkWorkers, task); - return Optional.fromNullable(worker == null ? null : zkWorkers.get(worker)); + return worker == null ? null : zkWorkers.get(worker); } @JsonProperty diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfig.java index 0e58fc39c5b..bbe163a4635 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfig.java @@ -29,7 +29,7 @@ import io.druid.indexing.overlord.autoscaling.NoopAutoScaler; public class WorkerBehaviorConfig { public static final String CONFIG_KEY = "worker.config"; - public static WorkerSelectStrategy DEFAULT_STRATEGY = new EqualDistributionWorkerSelectStrategy(); + public static WorkerSelectStrategy DEFAULT_STRATEGY = new EqualDistributionWorkerSelectStrategy(null); public static AutoScaler DEFAULT_AUTOSCALER = new NoopAutoScaler(); public static WorkerBehaviorConfig defaultConfig() diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java index d58de1d6b7b..aae5c1f8350 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectStrategy.java @@ -21,12 +21,13 @@ package io.druid.indexing.overlord.setup; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; +import javax.annotation.Nullable; + /** * The {@link io.druid.indexing.overlord.RemoteTaskRunner} uses this class to select a worker to assign tasks to. */ @@ -49,7 +50,8 @@ public interface WorkerSelectStrategy * * @return A {@link ImmutableWorkerInfo} to run the task if one is available. */ - Optional findWorkerForTask( + @Nullable + ImmutableWorkerInfo findWorkerForTask( final WorkerTaskRunnerConfig config, final ImmutableMap zkWorkers, final Task task diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectUtils.java b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectUtils.java new file mode 100644 index 00000000000..834076b15f2 --- /dev/null +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/setup/WorkerSelectUtils.java @@ -0,0 +1,118 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.indexing.overlord.setup; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; +import io.druid.indexing.common.task.Task; +import io.druid.indexing.overlord.ImmutableWorkerInfo; +import io.druid.indexing.overlord.config.WorkerTaskRunnerConfig; + +import javax.annotation.Nullable; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class WorkerSelectUtils +{ + private WorkerSelectUtils() + { + // No instantiation. + } + + /** + * Helper for {@link WorkerSelectStrategy} implementations. + * + * @param allWorkers map of all workers, in the style provided to {@link WorkerSelectStrategy} + * @param affinityConfig affinity config, or null + * @param workerSelector function that receives a list of eligible workers: version is high enough, worker can run + * the task, and worker satisfies the affinity config. may return null. + * + * @return selected worker from "allWorkers", or null. + */ + @Nullable + public static ImmutableWorkerInfo selectWorker( + final Task task, + final Map allWorkers, + final WorkerTaskRunnerConfig workerTaskRunnerConfig, + @Nullable final AffinityConfig affinityConfig, + final Function, ImmutableWorkerInfo> workerSelector + ) + { + // Workers that could potentially run this task, ignoring affinityConfig. + final Map runnableWorkers = allWorkers + .values() + .stream() + .filter(worker -> worker.canRunTask(task) + && worker.isValidVersion(workerTaskRunnerConfig.getMinWorkerVersion())) + .collect(Collectors.toMap(w -> w.getWorker().getHost(), Function.identity())); + + if (affinityConfig == null) { + // All runnable workers are valid. + return workerSelector.apply(ImmutableMap.copyOf(runnableWorkers)); + } else { + // Workers assigned to the affinity pool for our task. + final Set dataSourceWorkers = affinityConfig.getAffinity().get(task.getDataSource()); + + if (dataSourceWorkers == null) { + // No affinity config for this dataSource; use non-affinity workers. + return workerSelector.apply(getNonAffinityWorkers(affinityConfig, runnableWorkers)); + } else { + // Get runnable, affinity workers. + final ImmutableMap dataSourceWorkerMap = + ImmutableMap.copyOf(Maps.filterKeys(runnableWorkers, dataSourceWorkers::contains)); + + final ImmutableWorkerInfo selected = workerSelector.apply(dataSourceWorkerMap); + + if (selected != null) { + return selected; + } else if (affinityConfig.isStrong()) { + return null; + } else { + // Weak affinity allows us to use nonAffinityWorkers for this dataSource, if no affinity workers + // are available. + return workerSelector.apply(getNonAffinityWorkers(affinityConfig, runnableWorkers)); + } + } + } + } + + /** + * Return workers not assigned to any affinity pool at all. + * + * @param affinityConfig affinity config + * @param workerMap map of worker hostname to worker info + * + * @return map of worker hostname to worker info + */ + private static ImmutableMap getNonAffinityWorkers( + final AffinityConfig affinityConfig, + final Map workerMap + ) + { + return ImmutableMap.copyOf( + Maps.filterKeys( + workerMap, + workerHost -> !affinityConfig.getAffinityWorkers().contains(workerHost) + ) + ); + } +} diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java index 80956556aee..f2b370f9cb7 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/WorkerCuratorCoordinator.java @@ -25,10 +25,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; - import io.druid.curator.CuratorUtils; import io.druid.curator.announcement.Announcer; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -37,7 +37,6 @@ import io.druid.server.initialization.IndexerZkConfig; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; -import org.joda.time.DateTime; import java.util.Arrays; import java.util.List; @@ -98,7 +97,7 @@ public class WorkerCuratorCoordinator curatorFramework, getTaskPathForWorker(), CreateMode.PERSISTENT, - jsonMapper.writeValueAsBytes(ImmutableMap.of("created", new DateTime().toString())), + jsonMapper.writeValueAsBytes(ImmutableMap.of("created", DateTimes.nowUtc().toString())), config.getMaxZnodeBytes() ); @@ -106,7 +105,7 @@ public class WorkerCuratorCoordinator curatorFramework, getStatusPathForWorker(), CreateMode.PERSISTENT, - jsonMapper.writeValueAsBytes(ImmutableMap.of("created", new DateTime().toString())), + jsonMapper.writeValueAsBytes(ImmutableMap.of("created", DateTimes.nowUtc().toString())), config.getMaxZnodeBytes() ); diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java index 3036f1f1850..6c83eea9bb8 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycle.java @@ -33,12 +33,11 @@ import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskRunner; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; -import org.joda.time.DateTime; - import java.io.File; import java.io.IOException; import java.io.InputStream; @@ -118,7 +117,7 @@ public class ExecutorLifecycle log.info("Attempting to lock file[%s].", taskLockFile); final long startLocking = System.currentTimeMillis(); - final long timeout = new DateTime(startLocking).plus(taskConfig.getDirectoryLockTimeout()).getMillis(); + final long timeout = DateTimes.utc(startLocking).plus(taskConfig.getDirectoryLockTimeout()).getMillis(); while (taskLockFileLock == null && System.currentTimeMillis() < timeout) { taskLockFileLock = taskLockChannel.tryLock(); if (taskLockFileLock == null) { diff --git a/indexing-service/src/main/java/io/druid/server/initialization/IndexerZkConfig.java b/indexing-service/src/main/java/io/druid/server/initialization/IndexerZkConfig.java index 50c45f443f6..eee927116d0 100644 --- a/indexing-service/src/main/java/io/druid/server/initialization/IndexerZkConfig.java +++ b/indexing-service/src/main/java/io/druid/server/initialization/IndexerZkConfig.java @@ -35,8 +35,7 @@ public class IndexerZkConfig @JsonProperty("base") String base, @JsonProperty("announcementsPath") String announcementsPath, @JsonProperty("tasksPath") String tasksPath, - @JsonProperty("statusPath") String statusPath, - @JsonProperty("leaderLatchPath") String leaderLatchPath + @JsonProperty("statusPath") String statusPath ) { this.zkPathsConfig = zkPathsConfig; @@ -44,7 +43,6 @@ public class IndexerZkConfig this.announcementsPath = announcementsPath; this.tasksPath = tasksPath; this.statusPath = statusPath; - this.leaderLatchPath = leaderLatchPath; } @JacksonInject @@ -62,9 +60,6 @@ public class IndexerZkConfig @JsonProperty private final String statusPath; - @JsonProperty - private final String leaderLatchPath; - private String defaultIndexerPath(final String subPath) { return ZKPaths.makePath(getBase(), subPath); @@ -90,11 +85,6 @@ public class IndexerZkConfig return statusPath == null ? defaultIndexerPath("status") : statusPath; } - public String getLeaderLatchPath() - { - return leaderLatchPath == null ? defaultIndexerPath("leaderLatchPath") : leaderLatchPath; - } - public ZkPathsConfig getZkPathsConfig() { return zkPathsConfig; @@ -120,9 +110,6 @@ public class IndexerZkConfig if (base != null ? !base.equals(that.base) : that.base != null) { return false; } - if (leaderLatchPath != null ? !leaderLatchPath.equals(that.leaderLatchPath) : that.leaderLatchPath != null) { - return false; - } if (statusPath != null ? !statusPath.equals(that.statusPath) : that.statusPath != null) { return false; } @@ -144,7 +131,6 @@ public class IndexerZkConfig result = 31 * result + (announcementsPath != null ? announcementsPath.hashCode() : 0); result = 31 * result + (tasksPath != null ? tasksPath.hashCode() : 0); result = 31 * result + (statusPath != null ? statusPath.hashCode() : 0); - result = 31 * result + (leaderLatchPath != null ? leaderLatchPath.hashCode() : 0); return result; } } diff --git a/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java b/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java index e011a22b615..2d7b14541c8 100644 --- a/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/appenderator/ActionBasedUsedSegmentCheckerTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.Intervals; import io.druid.segment.realtime.appenderator.SegmentIdentifier; import io.druid.segment.realtime.appenderator.UsedSegmentChecker; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.junit.Assert; import org.easymock.EasyMock; -import org.joda.time.Interval; +import org.junit.Assert; import org.junit.Test; import java.io.IOException; @@ -43,19 +43,19 @@ public class ActionBasedUsedSegmentCheckerTest final TaskActionClient taskActionClient = EasyMock.createMock(TaskActionClient.class); EasyMock.expect( taskActionClient.submit( - new SegmentListUsedAction("bar", null, ImmutableList.of(new Interval("2002/P1D"))) + new SegmentListUsedAction("bar", null, ImmutableList.of(Intervals.of("2002/P1D"))) ) ).andReturn( ImmutableList.of( DataSegment.builder() .dataSource("bar") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(0)) .version("b") .build(), DataSegment.builder() .dataSource("bar") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(1)) .version("b") .build() @@ -63,31 +63,31 @@ public class ActionBasedUsedSegmentCheckerTest ); EasyMock.expect( taskActionClient.submit( - new SegmentListUsedAction("foo", null, ImmutableList.of(new Interval("2000/P1D"), new Interval("2001/P1D"))) + new SegmentListUsedAction("foo", null, ImmutableList.of(Intervals.of("2000/P1D"), Intervals.of("2001/P1D"))) ) ).andReturn( ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2000/P1D")) + .interval(Intervals.of("2000/P1D")) .shardSpec(new LinearShardSpec(0)) .version("a") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2000/P1D")) + .interval(Intervals.of("2000/P1D")) .shardSpec(new LinearShardSpec(1)) .version("a") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2001/P1D")) + .interval(Intervals.of("2001/P1D")) .shardSpec(new LinearShardSpec(1)) .version("b") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(1)) .version("b") .build() @@ -98,9 +98,9 @@ public class ActionBasedUsedSegmentCheckerTest final UsedSegmentChecker checker = new ActionBasedUsedSegmentChecker(taskActionClient); final Set segments = checker.findUsedSegments( ImmutableSet.of( - new SegmentIdentifier("foo", new Interval("2000/P1D"), "a", new LinearShardSpec(1)), - new SegmentIdentifier("foo", new Interval("2001/P1D"), "b", new LinearShardSpec(0)), - new SegmentIdentifier("bar", new Interval("2002/P1D"), "b", new LinearShardSpec(0)) + new SegmentIdentifier("foo", Intervals.of("2000/P1D"), "a", new LinearShardSpec(1)), + new SegmentIdentifier("foo", Intervals.of("2001/P1D"), "b", new LinearShardSpec(0)), + new SegmentIdentifier("bar", Intervals.of("2002/P1D"), "b", new LinearShardSpec(0)) ) ); @@ -108,13 +108,13 @@ public class ActionBasedUsedSegmentCheckerTest ImmutableSet.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2000/P1D")) + .interval(Intervals.of("2000/P1D")) .shardSpec(new LinearShardSpec(1)) .version("a") .build(), DataSegment.builder() .dataSource("bar") - .interval(new Interval("2002/P1D")) + .interval(Intervals.of("2002/P1D")) .shardSpec(new LinearShardSpec(0)) .version("b") .build() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java b/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java index da3b66f3280..1542b33aef6 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TaskToolboxTest.java @@ -28,6 +28,7 @@ import io.druid.client.cache.CacheConfig; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.Intervals; import io.druid.query.QueryRunnerFactoryConglomerate; import io.druid.segment.IndexIO; import io.druid.segment.IndexMergerV9; @@ -42,7 +43,6 @@ import io.druid.server.coordination.DataSegmentAnnouncer; import io.druid.server.coordination.DataSegmentServerAnnouncer; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -165,7 +165,7 @@ public class TaskToolboxTest .expect(mockSegmentLoaderLocalCacheManager.withConfig(EasyMock.anyObject())) .andReturn(mockSegmentLoaderLocalCacheManager).anyTimes(); EasyMock.replay(mockSegmentLoaderLocalCacheManager); - DataSegment dataSegment = DataSegment.builder().dataSource("source").interval(new Interval("2012-01-01/P1D")).version("1").size(1).build(); + DataSegment dataSegment = DataSegment.builder().dataSource("source").interval(Intervals.of("2012-01-01/P1D")).version("1").size(1).build(); List segments = ImmutableList.of ( dataSegment diff --git a/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java b/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java index 506799085c3..4b107023370 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/TestUtils.java @@ -23,9 +23,11 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Stopwatch; +import io.druid.TestUtil; import io.druid.guice.ServerModule; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.logger.Logger; import io.druid.math.expr.ExprMacroTable; import io.druid.query.expression.TestExprMacroTable; import io.druid.segment.IndexIO; @@ -41,6 +43,8 @@ import java.util.concurrent.TimeUnit; */ public class TestUtils { + private static final Logger log = new Logger(TestUtil.class); + private final ObjectMapper jsonMapper; private final IndexMergerV9 indexMergerV9; private final IndexIO indexIO; @@ -103,11 +107,12 @@ public class TestUtils while (!condition.isValid()) { Thread.sleep(100); if (stopwatch.elapsed(TimeUnit.MILLISECONDS) > timeout) { - throw new ISE("Cannot find running task"); + throw new ISE("Condition[%s] not met", condition); } } } catch (Exception e) { + log.warn(e, "Condition[%s] not met within timeout[%,d]", condition, timeout); return false; } return true; diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java index 50d75608631..c07d444dde9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/RemoteTaskActionClientTest.java @@ -34,9 +34,9 @@ import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import org.easymock.EasyMock; import org.jboss.netty.handler.codec.http.HttpResponseStatus; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -100,7 +100,7 @@ public class RemoteTaskActionClientTest result = Collections.singletonList(new TaskLock( "groupId", "dataSource", - new Interval(now - 30 * 1000, now), + Intervals.utc(now - 30 * 1000, now), "version" )); } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java index a587b38e1c1..888ae59dc90 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentAllocateActionTest.java @@ -31,6 +31,7 @@ import io.druid.indexing.common.TaskLock; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -56,8 +57,8 @@ public class SegmentAllocateActionTest public TaskActionTestKit taskActionTestKit = new TaskActionTestKit(); private static final String DATA_SOURCE = "none"; - private static final DateTime PARTY_TIME = new DateTime("1999"); - private static final DateTime THE_DISTANT_FUTURE = new DateTime("3000"); + private static final DateTime PARTY_TIME = DateTimes.of("1999"); + private static final DateTime THE_DISTANT_FUTURE = DateTimes.of("3000"); @Before public void setUp() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java index 5cac6ce6a65..1dd84feb265 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentInsertActionTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.hamcrest.CoreMatchers; @@ -44,7 +45,7 @@ public class SegmentInsertActionTest public TaskActionTestKit actionTestKit = new TaskActionTestKit(); private static final String DATA_SOURCE = "none"; - private static final Interval INTERVAL = new Interval("2020/2020T01"); + private static final Interval INTERVAL = Intervals.of("2020/2020T01"); private static final String PARTY_YEAR = "1999"; private static final String THE_DISTANT_FUTURE = "3000"; @@ -90,7 +91,7 @@ public class SegmentInsertActionTest final Task task = new NoopTask(null, 0, 0, null, null, null); final SegmentInsertAction action = new SegmentInsertAction(ImmutableSet.of(SEGMENT1, SEGMENT2)); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); action.perform(task, actionTestKit.getTaskActionToolbox()); Assert.assertEquals( @@ -108,7 +109,7 @@ public class SegmentInsertActionTest final Task task = new NoopTask(null, 0, 0, null, null, null); final SegmentInsertAction action = new SegmentInsertAction(ImmutableSet.of(SEGMENT3)); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); thrown.expect(IllegalStateException.class); thrown.expectMessage(CoreMatchers.startsWith("Segments not covered by locks for task")); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java index e22f600253b..6ae7c181f3b 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentListUsedActionTest.java @@ -22,6 +22,7 @@ package io.druid.indexing.common.actions; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import io.druid.TestUtil; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -37,7 +38,7 @@ public class SegmentListUsedActionTest @Test public void testSingleIntervalSerde() throws Exception { - Interval interval = Interval.parse("2014/2015"); + Interval interval = Intervals.of("2014/2015"); SegmentListUsedAction expected = new SegmentListUsedAction( "dataSource", @@ -53,7 +54,7 @@ public class SegmentListUsedActionTest @Test public void testMultiIntervalSerde() throws Exception { - List intervals = ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017")); + List intervals = ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")); SegmentListUsedAction expected = new SegmentListUsedAction( "dataSource", null, @@ -71,6 +72,6 @@ public class SegmentListUsedActionTest String jsonStr = "{\"type\": \"segmentListUsed\", \"dataSource\": \"test\", \"interval\": \"2014/2015\"}"; SegmentListUsedAction actual = (SegmentListUsedAction) MAPPER.readValue(jsonStr, TaskAction.class); - Assert.assertEquals(new SegmentListUsedAction("test", Interval.parse("2014/2015"), null), actual); + Assert.assertEquals(new SegmentListUsedAction("test", Intervals.of("2014/2015"), null), actual); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java index c88d1db02ff..359fd762a57 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/actions/SegmentTransactionalInsertActionTest.java @@ -26,6 +26,7 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ObjectMetadata; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.hamcrest.CoreMatchers; @@ -44,7 +45,7 @@ public class SegmentTransactionalInsertActionTest public TaskActionTestKit actionTestKit = new TaskActionTestKit(); private static final String DATA_SOURCE = "none"; - private static final Interval INTERVAL = new Interval("2020/2020T01"); + private static final Interval INTERVAL = Intervals.of("2020/2020T01"); private static final String PARTY_YEAR = "1999"; private static final String THE_DISTANT_FUTURE = "3000"; @@ -89,7 +90,7 @@ public class SegmentTransactionalInsertActionTest { final Task task = new NoopTask(null, 0, 0, null, null, null); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); SegmentPublishResult result1 = new SegmentTransactionalInsertAction( ImmutableSet.of(SEGMENT1), @@ -130,7 +131,7 @@ public class SegmentTransactionalInsertActionTest { final Task task = new NoopTask(null, 0, 0, null, null, null); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); SegmentPublishResult result = new SegmentTransactionalInsertAction( ImmutableSet.of(SEGMENT1), @@ -150,7 +151,7 @@ public class SegmentTransactionalInsertActionTest final Task task = new NoopTask(null, 0, 0, null, null, null); final SegmentTransactionalInsertAction action = new SegmentTransactionalInsertAction(ImmutableSet.of(SEGMENT3)); actionTestKit.getTaskLockbox().add(task); - actionTestKit.getTaskLockbox().lock(task, new Interval(INTERVAL), 5000); + actionTestKit.getTaskLockbox().lock(task, INTERVAL, 5000); thrown.expect(IllegalStateException.class); thrown.expectMessage(CoreMatchers.startsWith("Segments not covered by locks for task")); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java index 74b07627fa2..a512da94117 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/ConvertSegmentTaskTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.TestUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; @@ -46,7 +48,8 @@ public class ConvertSegmentTaskTest public void testSerializationSimple() throws Exception { final String dataSource = "billy"; - final Interval interval = new Interval(new DateTime().minus(1000), new DateTime()); + DateTime start = DateTimes.nowUtc(); + final Interval interval = new Interval(start.minus(1000), start); ConvertSegmentTask task = ConvertSegmentTask.create(dataSource, interval, null, false, true, null); @@ -56,7 +59,7 @@ public class ConvertSegmentTaskTest DataSegment segment = new DataSegment( dataSource, interval, - new DateTime().toString(), + DateTimes.nowUtc().toString(), ImmutableMap.of(), ImmutableList.of(), ImmutableList.of(), @@ -81,7 +84,7 @@ public class ConvertSegmentTaskTest + "}"; ConvertSegmentTask task = (ConvertSegmentTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("billy", task.getDataSource()); - Assert.assertEquals(new Interval("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); + Assert.assertEquals(Intervals.of("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); } @Test @@ -94,6 +97,6 @@ public class ConvertSegmentTaskTest + "}"; ConvertSegmentTask task = (ConvertSegmentTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("billy", task.getDataSource()); - Assert.assertEquals(new Interval("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); + Assert.assertEquals(Intervals.of("2015-08-27T00:00:00.000Z/2015-08-28T00:00:00.000Z"), task.getInterval()); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java index c120845eb1b..be8885818c0 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopConverterTaskSerDeTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.TestUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexSpec; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; @@ -44,7 +45,7 @@ public class HadoopConverterTaskSerDeTest private static final String TASK_ID = "task id"; private static final String DATA_SOURCE = "datasource"; - private static final Interval INTERVAL = Interval.parse("2010/2011"); + private static final Interval INTERVAL = Intervals.of("2010/2011"); private static final String SEGMENT_VERSION = "some version"; private static final Map LOAD_SPEC = ImmutableMap.of("someKey", "someVal"); private static final List DIMENSIONS = ImmutableList.of("dim1", "dim2"); diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java index fd2adc740a5..0091c40b15d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/IndexTaskTest.java @@ -41,6 +41,8 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.IndexTask.IndexIngestionSpec; import io.druid.indexing.common.task.IndexTask.IndexTuningConfig; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.parsers.ParseException; @@ -61,7 +63,6 @@ import io.druid.timeline.partition.HashBasedNumberedShardSpec; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.ShardSpec; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; @@ -151,13 +152,13 @@ public class IndexTaskTest Assert.assertEquals(2, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertEquals(HashBasedNumberedShardSpec.class, segments.get(0).getShardSpec().getClass()); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals(2, ((NumberedShardSpec) segments.get(0).getShardSpec()).getPartitions()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval()); Assert.assertEquals(HashBasedNumberedShardSpec.class, segments.get(1).getShardSpec().getClass()); Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum()); Assert.assertEquals(2, ((NumberedShardSpec) segments.get(1).getShardSpec()).getPartitions()); @@ -196,12 +197,12 @@ public class IndexTaskTest Assert.assertEquals(2, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertEquals(NumberedShardSpec.class, segments.get(0).getShardSpec().getClass()); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval()); Assert.assertEquals(NumberedShardSpec.class, segments.get(1).getShardSpec().getClass()); Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum()); } @@ -227,7 +228,7 @@ public class IndexTaskTest null, new ArbitraryGranularitySpec( Granularities.MINUTE, - Collections.singletonList(new Interval("2014/2015")) + Collections.singletonList(Intervals.of("2014/2015")) ), createTuningConfig(10, null, false, true), false @@ -261,7 +262,7 @@ public class IndexTaskTest new UniformGranularitySpec( Granularities.HOUR, Granularities.HOUR, - Collections.singletonList(new Interval("2015-03-01T08:00:00Z/2015-03-01T09:00:00Z")) + Collections.singletonList(Intervals.of("2015-03-01T08:00:00Z/2015-03-01T09:00:00Z")) ), createTuningConfig(50, null, false, true), false @@ -304,7 +305,7 @@ public class IndexTaskTest Assert.assertEquals(1, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); } @@ -343,12 +344,12 @@ public class IndexTaskTest Assert.assertEquals(2, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NumberedShardSpec.class)); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(1).getInterval()); Assert.assertTrue(segments.get(1).getShardSpec().getClass().equals(NumberedShardSpec.class)); Assert.assertEquals(1, segments.get(1).getShardSpec().getPartitionNum()); } @@ -387,17 +388,17 @@ public class IndexTaskTest Assert.assertEquals(3, segments.size()); Assert.assertEquals("test", segments.get(0).getDataSource()); - Assert.assertEquals(new Interval("2014-01-01T00/PT1H"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014-01-01T00/PT1H"), segments.get(0).getInterval()); Assert.assertTrue(segments.get(0).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(0).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(1).getDataSource()); - Assert.assertEquals(new Interval("2014-01-01T01/PT1H"), segments.get(1).getInterval()); + Assert.assertEquals(Intervals.of("2014-01-01T01/PT1H"), segments.get(1).getInterval()); Assert.assertTrue(segments.get(1).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(1).getShardSpec().getPartitionNum()); Assert.assertEquals("test", segments.get(2).getDataSource()); - Assert.assertEquals(new Interval("2014-01-01T02/PT1H"), segments.get(2).getInterval()); + Assert.assertEquals(Intervals.of("2014-01-01T02/PT1H"), segments.get(2).getInterval()); Assert.assertTrue(segments.get(2).getShardSpec().getClass().equals(NoneShardSpec.class)); Assert.assertEquals(0, segments.get(2).getShardSpec().getPartitionNum()); } @@ -448,7 +449,7 @@ public class IndexTaskTest Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @Test @@ -497,7 +498,7 @@ public class IndexTaskTest Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @Test @@ -541,7 +542,7 @@ public class IndexTaskTest for (int i = 0; i < 6; i++) { final DataSegment segment = segments.get(i); - final Interval expectedInterval = new Interval(StringUtils.format("2014-01-01T0%d/PT1H", (i / 2))); + final Interval expectedInterval = Intervals.of(StringUtils.format("2014-01-01T0%d/PT1H", (i / 2))); final int expectedPartitionNum = i % 2; Assert.assertEquals("test", segment.getDataSource()); @@ -583,7 +584,7 @@ public class IndexTaskTest for (int i = 0; i < 3; i++) { final DataSegment segment = segments.get(i); - final Interval expectedInterval = new Interval("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); + final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); Assert.assertEquals("test", segment.getDataSource()); Assert.assertEquals(expectedInterval, segment.getInterval()); @@ -624,7 +625,7 @@ public class IndexTaskTest for (int i = 0; i < 5; i++) { final DataSegment segment = segments.get(i); - final Interval expectedInterval = new Interval("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); + final Interval expectedInterval = Intervals.of("2014-01-01T00:00:00.000Z/2014-01-02T00:00:00.000Z"); Assert.assertEquals("test", segment.getDataSource()); Assert.assertEquals(expectedInterval, segment.getInterval()); @@ -697,7 +698,7 @@ public class IndexTaskTest Assert.assertEquals(Arrays.asList("d"), segments.get(0).getDimensions()); Assert.assertEquals(Arrays.asList("val"), segments.get(0).getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segments.get(0).getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segments.get(0).getInterval()); } @Test @@ -826,7 +827,7 @@ public class IndexTaskTest ); Assert.assertEquals(Arrays.asList("val"), segment.getMetrics()); - Assert.assertEquals(new Interval("2014/P1D"), segment.getInterval()); + Assert.assertEquals(Intervals.of("2014/P1D"), segment.getInterval()); } } @@ -892,7 +893,7 @@ public class IndexTaskTest if (taskAction instanceof LockListAction) { return (RetType) Collections.singletonList( new TaskLock( - "", "", null, new DateTime().toString() + "", "", null, DateTimes.nowUtc().toString() ) ); } @@ -902,7 +903,7 @@ public class IndexTaskTest "groupId", "test", ((LockAcquireAction) taskAction).getInterval(), - new DateTime().toString() + DateTimes.nowUtc().toString() ); } @@ -983,7 +984,7 @@ public class IndexTaskTest granularitySpec != null ? granularitySpec : new UniformGranularitySpec( Granularities.DAY, Granularities.MINUTE, - Arrays.asList(new Interval("2014/2015")) + Arrays.asList(Intervals.of("2014/2015")) ), jsonMapper ), diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java index bb3f982d8fd..0a61e0bdbbd 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/MergeTaskBaseTest.java @@ -23,8 +23,8 @@ import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import com.google.common.hash.Hashing; import io.druid.indexing.common.TaskToolbox; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -39,9 +39,9 @@ public class MergeTaskBaseTest .version("V1"); final List segments = ImmutableList.builder() - .add(segmentBuilder.interval(new Interval("2012-01-04/2012-01-06")).build()) - .add(segmentBuilder.interval(new Interval("2012-01-05/2012-01-07")).build()) - .add(segmentBuilder.interval(new Interval("2012-01-03/2012-01-05")).build()) + .add(segmentBuilder.interval(Intervals.of("2012-01-04/2012-01-06")).build()) + .add(segmentBuilder.interval(Intervals.of("2012-01-05/2012-01-07")).build()) + .add(segmentBuilder.interval(Intervals.of("2012-01-03/2012-01-05")).build()) .build(); final MergeTaskBase testMergeTaskBase = new MergeTaskBase(null, "foo", segments, null) @@ -68,7 +68,7 @@ public class MergeTaskBaseTest @Test public void testInterval() { - Assert.assertEquals(new Interval("2012-01-03/2012-01-07"), testMergeTaskBase.getInterval()); + Assert.assertEquals(Intervals.of("2012-01-03/2012-01-07"), testMergeTaskBase.getInterval()); } @Test diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java index c6d954420fc..c40d8e30ec9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -69,6 +69,7 @@ import io.druid.indexing.test.TestDataSegmentKiller; import io.druid.indexing.test.TestDataSegmentPusher; import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; @@ -246,7 +247,7 @@ public class RealtimeIndexTaskTest EmittingLogger.registerEmitter(emitter); emitter.start(); taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d")); - now = new DateTime(); + now = DateTimes.nowUtc(); } @After @@ -260,7 +261,7 @@ public class RealtimeIndexTaskTest { Assert.assertEquals( "index_realtime_test_0_2015-01-02T00:00:00.000Z_abcdefgh", - RealtimeIndexTask.makeTaskId("test", 0, new DateTime("2015-01-02"), 0x76543210) + RealtimeIndexTask.makeTaskId("test", 0, DateTimes.of("2015-01-02"), 0x76543210) ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java index 265e66a177c..6e68e8f9952 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/SameIntervalMergeTaskTest.java @@ -31,6 +31,7 @@ import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.IndexIO; @@ -45,7 +46,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -85,7 +85,7 @@ public class SameIntervalMergeTaskTest final SameIntervalMergeTask task = new SameIntervalMergeTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), aggregators, true, indexSpec, @@ -107,7 +107,7 @@ public class SameIntervalMergeTaskTest Assert.assertEquals("foo", mergeSegment.getDataSource()); Assert.assertEquals(newVersion, mergeSegment.getVersion()); // the merged segment's interval is within the requested interval - Assert.assertTrue(new Interval("2010-01-01/P1D").contains(mergeSegment.getInterval())); + Assert.assertTrue(Intervals.of("2010-01-01/P1D").contains(mergeSegment.getInterval())); // the merged segment should be NoneShardSpec Assert.assertTrue(mergeSegment.getShardSpec() instanceof NoneShardSpec); } @@ -153,19 +153,19 @@ public class SameIntervalMergeTaskTest List segments = ImmutableList.of( DataSegment.builder() .dataSource(mergeTask.getDataSource()) - .interval(new Interval("2010-01-01/PT1H")) + .interval(Intervals.of("2010-01-01/PT1H")) .version("oldVersion") .shardSpec(new LinearShardSpec(0)) .build(), DataSegment.builder() .dataSource(mergeTask.getDataSource()) - .interval(new Interval("2010-01-01/PT1H")) + .interval(Intervals.of("2010-01-01/PT1H")) .version("oldVersion") .shardSpec(new LinearShardSpec(0)) .build(), DataSegment.builder() .dataSource(mergeTask.getDataSource()) - .interval(new Interval("2010-01-01/PT2H")) + .interval(Intervals.of("2010-01-01/PT2H")) .version("oldVersion") .shardSpec(new LinearShardSpec(0)) .build() diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java index b78e89997eb..02dfdbbd050 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/TaskSerdeTest.java @@ -33,6 +33,7 @@ import io.druid.indexing.common.TestUtils; import io.druid.indexing.common.task.IndexTask.IndexIOConfig; import io.druid.indexing.common.task.IndexTask.IndexIngestionSpec; import io.druid.indexing.common.task.IndexTask.IndexTuningConfig; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -53,7 +54,6 @@ import io.druid.segment.realtime.plumber.PlumberSchool; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.hamcrest.CoreMatchers; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Assert; import org.junit.Rule; @@ -183,7 +183,7 @@ public class TaskSerdeTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), jsonMapper ), @@ -245,7 +245,7 @@ public class TaskSerdeTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), jsonMapper ), @@ -283,7 +283,7 @@ public class TaskSerdeTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2010-01-01/P1D")) + .interval(Intervals.of("2010-01-01/P1D")) .version("1234") .build() ); @@ -305,7 +305,7 @@ public class TaskSerdeTest final MergeTask task2 = (MergeTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -328,7 +328,7 @@ public class TaskSerdeTest ); Assert.assertEquals("foo", task3.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task3.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task3.getInterval()); Assert.assertEquals(segments, task3.getSegments()); Assert.assertEquals(aggregators, task3.getAggregators()); } @@ -340,7 +340,7 @@ public class TaskSerdeTest final SameIntervalMergeTask task = new SameIntervalMergeTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), aggregators, true, indexSpec, @@ -354,7 +354,7 @@ public class TaskSerdeTest final SameIntervalMergeTask task2 = (SameIntervalMergeTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -374,7 +374,7 @@ public class TaskSerdeTest final KillTask task = new KillTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null ); @@ -384,7 +384,7 @@ public class TaskSerdeTest final KillTask task2 = (KillTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -395,20 +395,20 @@ public class TaskSerdeTest jsonMapper.writeValueAsString( new ClientKillQuery( "foo", - new Interval("2010-01-01/P1D") + Intervals.of("2010-01-01/P1D") ) ), Task.class ); Assert.assertEquals("foo", task3.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task3.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task3.getInterval()); } @Test public void testVersionConverterTaskSerde() throws Exception { final ConvertSegmentTask task = ConvertSegmentTask.create( - DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2010-01-01/P1D")).version("1234").build(), null, false, true, @@ -421,7 +421,7 @@ public class TaskSerdeTest final ConvertSegmentTask task2 = (ConvertSegmentTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -435,7 +435,7 @@ public class TaskSerdeTest { final ConvertSegmentTask.SubTask task = new ConvertSegmentTask.SubTask( "myGroupId", - DataSegment.builder().dataSource("foo").interval(new Interval("2010-01-01/P1D")).version("1234").build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2010-01-01/P1D")).version("1234").build(), indexSpec, false, true, @@ -546,12 +546,12 @@ public class TaskSerdeTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2010-01-01/P1D")) + .interval(Intervals.of("2010-01-01/P1D")) .version("1234") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2010-01-02/P1D")) + .interval(Intervals.of("2010-01-02/P1D")) .version("5678") .build() ); @@ -573,7 +573,7 @@ public class TaskSerdeTest final AppendTask task2 = (AppendTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P2D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P2D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -591,7 +591,7 @@ public class TaskSerdeTest ); Assert.assertEquals("foo", task3.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P2D"), task3.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P2D"), task3.getInterval()); Assert.assertEquals(task3.getSegments(), segments); Assert.assertEquals(task.getAggregators(), task2.getAggregators()); } @@ -602,7 +602,7 @@ public class TaskSerdeTest final ArchiveTask task = new ArchiveTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null ); @@ -612,7 +612,7 @@ public class TaskSerdeTest final ArchiveTask task2 = (ArchiveTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -626,7 +626,7 @@ public class TaskSerdeTest final RestoreTask task = new RestoreTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null ); @@ -636,7 +636,7 @@ public class TaskSerdeTest final RestoreTask task2 = (RestoreTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(task.getId(), task2.getId()); Assert.assertEquals(task.getGroupId(), task2.getGroupId()); @@ -650,7 +650,7 @@ public class TaskSerdeTest final ConvertSegmentTask task = ConvertSegmentTask.create( new DataSegment( "dataSource", - Interval.parse("1990-01-01/1999-12-31"), + Intervals.of("1990-01-01/1999-12-31"), "version", ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), @@ -674,7 +674,7 @@ public class TaskSerdeTest { final DataSegment segment = new DataSegment( "dataSource", - Interval.parse("1990-01-01/1999-12-31"), + Intervals.of("1990-01-01/1999-12-31"), "version", ImmutableMap.of(), ImmutableList.of("dim1", "dim2"), @@ -726,7 +726,7 @@ public class TaskSerdeTest final MoveTask task = new MoveTask( null, "foo", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), ImmutableMap.of("bucket", "hey", "baseKey", "what"), null, null @@ -738,7 +738,7 @@ public class TaskSerdeTest final MoveTask task2 = (MoveTask) jsonMapper.readValue(json, Task.class); Assert.assertEquals("foo", task.getDataSource()); - Assert.assertEquals(new Interval("2010-01-01/P1D"), task.getInterval()); + Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval()); Assert.assertEquals(ImmutableMap.of("bucket", "hey", "baseKey", "what"), task.getTargetLoadSpec()); Assert.assertEquals(task.getId(), task2.getId()); @@ -758,7 +758,7 @@ public class TaskSerdeTest "foo", null, new AggregatorFactory[0], new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), jsonMapper ), new HadoopIOConfig(ImmutableMap.of("paths", "bar"), null, null), null diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 7b8b0c33c65..59f21640036 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -33,7 +33,8 @@ import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Module; import com.metamx.emitter.service.ServiceEmitter; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.InputRowParser; @@ -323,7 +324,7 @@ public class IngestSegmentFirehoseFactoryTest new Object[]{ new IngestSegmentFirehoseFactory( DATA_SOURCE_NAME, - FOREVER, + Intervals.ETERNITY, new SelectorDimFilter(DIM_NAME, DIM_VALUE, null), dim_names, metric_names, @@ -399,7 +400,6 @@ public class IngestSegmentFirehoseFactoryTest } private static final Logger log = new Logger(IngestSegmentFirehoseFactoryTest.class); - private static final Interval FOREVER = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); private static final String DATA_SOURCE_NAME = "testDataSource"; private static final String DATA_SOURCE_VERSION = "version"; private static final Integer BINARY_VERSION = -1; @@ -450,7 +450,7 @@ public class IngestSegmentFirehoseFactoryTest Preconditions.checkArgument(shardNumber >= 0); return new DataSegment( DATA_SOURCE_NAME, - FOREVER, + Intervals.ETERNITY, DATA_SOURCE_VERSION, ImmutableMap.of( "type", "local", @@ -506,7 +506,7 @@ public class IngestSegmentFirehoseFactoryTest if (factory.getDimensions() != null) { Assert.assertArrayEquals(new String[]{DIM_NAME}, factory.getDimensions().toArray()); } - Assert.assertEquals(FOREVER, factory.getInterval()); + Assert.assertEquals(Intervals.ETERNITY, factory.getInterval()); if (factory.getMetrics() != null) { Assert.assertEquals( ImmutableSet.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME), diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 668564a0579..25c8fe77e4f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -31,7 +31,6 @@ import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; -import io.druid.common.utils.JodaUtils; import io.druid.data.input.Firehose; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; @@ -49,6 +48,9 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.actions.TaskActionClientFactory; import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.filter.NoopDimFilter; import io.druid.segment.IndexIO; @@ -66,7 +68,6 @@ import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.apache.commons.io.FileUtils; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; @@ -172,7 +173,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest return new TestCase( tmpDir, - new Interval(intervalString), + Intervals.of(intervalString), expectedCount, expectedSum, segments @@ -186,16 +187,16 @@ public class IngestSegmentFirehoseFactoryTimelineTest InputRow... rows ) { - return new DataSegmentMaker(new Interval(intervalString), version, partitionNum, Arrays.asList(rows)); + return new DataSegmentMaker(Intervals.of(intervalString), version, partitionNum, Arrays.asList(rows)); } private static InputRow IR(String timeString, long metricValue) { return new MapBasedInputRow( - new DateTime(timeString).getMillis(), + DateTimes.of(timeString).getMillis(), Arrays.asList(DIMENSIONS), ImmutableMap.of( - TIME_COLUMN, new DateTime(timeString).toString(), + TIME_COLUMN, DateTimes.of(timeString).toString(), DIMENSIONS[0], "bar", METRICS[0], metricValue ) diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java index 213fa3f5af3..7a3bb57a5f6 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/ImmutableWorkerInfoTest.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; @@ -40,7 +40,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ); ObjectMapper mapper = new DefaultObjectMapper(); final ImmutableWorkerInfo serde = mapper.readValue( @@ -61,7 +61,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker", "192.0.0.1", 10, "v1" @@ -69,7 +69,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), true); // different worker same tasks @@ -80,7 +80,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -88,7 +88,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different task groups @@ -99,7 +99,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp3", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker", "192.0.0.1", 10, "v1" @@ -107,7 +107,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different tasks @@ -118,7 +118,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -126,7 +126,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task3"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different capacity @@ -137,7 +137,7 @@ public class ImmutableWorkerInfoTest 3, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -145,7 +145,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), false); // same worker different lastCompletedTaskTime @@ -156,7 +156,7 @@ public class ImmutableWorkerInfoTest 3, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z") + DateTimes.of("2015-01-01T01:01:01Z") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -164,7 +164,7 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:02Z") + DateTimes.of("2015-01-01T01:01:02Z") ), false); // same worker different blacklistedUntil @@ -175,8 +175,8 @@ public class ImmutableWorkerInfoTest 3, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:01Z"), - new DateTime("2017-07-30") + DateTimes.of("2015-01-01T01:01:01Z"), + DateTimes.of("2017-07-30") ), new ImmutableWorkerInfo( new Worker( "http", "testWorker2", "192.0.0.1", 10, "v1" @@ -184,8 +184,8 @@ public class ImmutableWorkerInfoTest 2, ImmutableSet.of("grp1", "grp2"), ImmutableSet.of("task1", "task2"), - new DateTime("2015-01-01T01:01:02Z"), - new DateTime("2017-07-31") + DateTimes.of("2015-01-01T01:01:02Z"), + DateTimes.of("2017-07-31") ), false); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java index a168460769d..cb6d21ff74d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RealtimeishTask.java @@ -31,6 +31,7 @@ import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.AbstractTask; import io.druid.indexing.common.task.TaskResource; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.Assert; @@ -66,8 +67,8 @@ public class RealtimeishTask extends AbstractTask @Override public TaskStatus run(TaskToolbox toolbox) throws Exception { - final Interval interval1 = new Interval("2010-01-01T00/PT1H"); - final Interval interval2 = new Interval("2010-01-01T01/PT1H"); + final Interval interval1 = Intervals.of("2010-01-01T00/PT1H"); + final Interval interval2 = Intervals.of("2010-01-01T01/PT1H"); // Sort of similar to what realtime tasks do: diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java index 766163932f2..98eeea9657a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTest.java @@ -39,10 +39,11 @@ import io.druid.indexing.common.task.Task; import io.druid.indexing.common.task.TaskResource; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.StringUtils; import org.apache.curator.framework.CuratorFramework; import org.apache.zookeeper.CreateMode; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.After; import org.junit.Assert; @@ -602,11 +603,11 @@ public class RemoteTaskRunnerTest public void testSortByInsertionTime() throws Exception { RemoteTaskRunnerWorkItem item1 = new RemoteTaskRunnerWorkItem("b", null, null) - .withQueueInsertionTime(new DateTime("2015-01-01T00:00:03Z")); + .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:03Z")); RemoteTaskRunnerWorkItem item2 = new RemoteTaskRunnerWorkItem("a", null, null) - .withQueueInsertionTime(new DateTime("2015-01-01T00:00:02Z")); + .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:02Z")); RemoteTaskRunnerWorkItem item3 = new RemoteTaskRunnerWorkItem("c", null, null) - .withQueueInsertionTime(new DateTime("2015-01-01T00:00:01Z")); + .withQueueInsertionTime(DateTimes.of("2015-01-01T00:00:01Z")); ArrayList workItems = Lists.newArrayList(item1, item2, item3); RemoteTaskRunner.sortByInsertionTime(workItems); Assert.assertEquals(item3, workItems.get(0)); @@ -626,11 +627,11 @@ public class RemoteTaskRunnerTest makeRemoteTaskRunner(rtrConfig); TestRealtimeTask task1 = new TestRealtimeTask( - "realtime1", - new TaskResource("realtime1", 1), - "foo", - TaskStatus.success("realtime1"), - jsonMapper + "realtime1", + new TaskResource("realtime1", 1), + "foo", + TaskStatus.success("realtime1"), + jsonMapper ); Future taskFuture1 = remoteTaskRunner.run(task1); Assert.assertTrue(taskAnnounced(task1.getId())); @@ -638,15 +639,17 @@ public class RemoteTaskRunnerTest mockWorkerCompleteFailedTask(task1); Assert.assertTrue(taskFuture1.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); - Assert.assertEquals(1, - remoteTaskRunner.findWorkerRunningTask(task1.getId()).getContinuouslyFailedTasksCount()); + Assert.assertEquals( + 1, + remoteTaskRunner.findWorkerRunningTask(task1.getId()).getContinuouslyFailedTasksCount() + ); TestRealtimeTask task2 = new TestRealtimeTask( - "realtime2", - new TaskResource("realtime2", 1), - "foo", - TaskStatus.running("realtime2"), - jsonMapper + "realtime2", + new TaskResource("realtime2", 1), + "foo", + TaskStatus.running("realtime2"), + jsonMapper ); Future taskFuture2 = remoteTaskRunner.run(task2); Assert.assertTrue(taskAnnounced(task2.getId())); @@ -671,15 +674,17 @@ public class RemoteTaskRunnerTest // After backOffTime the nodes are removed from blacklist Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); - Assert.assertEquals(0, - remoteTaskRunner.findWorkerRunningTask(task2.getId()).getContinuouslyFailedTasksCount()); + Assert.assertEquals( + 0, + remoteTaskRunner.findWorkerRunningTask(task2.getId()).getContinuouslyFailedTasksCount() + ); TestRealtimeTask task3 = new TestRealtimeTask( - "realtime3", - new TaskResource("realtime3", 1), - "foo", - TaskStatus.running("realtime3"), - jsonMapper + "realtime3", + new TaskResource("realtime3", 1), + "foo", + TaskStatus.running("realtime3"), + jsonMapper ); Future taskFuture3 = remoteTaskRunner.run(task3); Assert.assertTrue(taskAnnounced(task3.getId())); @@ -687,8 +692,10 @@ public class RemoteTaskRunnerTest mockWorkerCompleteSuccessfulTask(task3); Assert.assertTrue(taskFuture3.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isSuccess()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); - Assert.assertEquals(0, - remoteTaskRunner.findWorkerRunningTask(task3.getId()).getContinuouslyFailedTasksCount()); + Assert.assertEquals( + 0, + remoteTaskRunner.findWorkerRunningTask(task3.getId()).getContinuouslyFailedTasksCount() + ); } /** @@ -708,17 +715,32 @@ public class RemoteTaskRunnerTest makeRemoteTaskRunner(rtrConfig); + String firstWorker = null; + String secondWorker = null; + for (int i = 1; i < 13; i++) { - String taskId = String.format("rt-%d", i); + String taskId = StringUtils.format("rt-%d", i); TestRealtimeTask task = new TestRealtimeTask( taskId, new TaskResource(taskId, 1), "foo", TaskStatus.success(taskId), jsonMapper ); Future taskFuture = remoteTaskRunner.run(task); - rtrTestUtils.taskAnnounced(i % 2 == 0 ? "worker2" : "worker", task.getId()); - rtrTestUtils.mockWorkerRunningTask(i % 2 == 0 ? "worker2" : "worker", task); - rtrTestUtils.mockWorkerCompleteFailedTask(i % 2 == 0 ? "worker2" : "worker", task); + if (i == 1) { + if (rtrTestUtils.taskAnnounced("worker2", task.getId())) { + firstWorker = "worker2"; + secondWorker = "worker"; + } else { + firstWorker = "worker"; + secondWorker = "worker2"; + } + } + + final String expectedWorker = i % 2 == 0 ? secondWorker : firstWorker; + + Assert.assertTrue(rtrTestUtils.taskAnnounced(expectedWorker, task.getId())); + rtrTestUtils.mockWorkerRunningTask(expectedWorker, task); + rtrTestUtils.mockWorkerCompleteFailedTask(expectedWorker, task); Assert.assertTrue(taskFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(0, remoteTaskRunner.getBlackListedWorkers().size()); @@ -746,17 +768,32 @@ public class RemoteTaskRunnerTest makeRemoteTaskRunner(rtrConfig); + String firstWorker = null; + String secondWorker = null; + for (int i = 1; i < 13; i++) { - String taskId = String.format("rt-%d", i); + String taskId = StringUtils.format("rt-%d", i); TestRealtimeTask task = new TestRealtimeTask( taskId, new TaskResource(taskId, 1), "foo", TaskStatus.success(taskId), jsonMapper ); Future taskFuture = remoteTaskRunner.run(task); - rtrTestUtils.taskAnnounced(i % 2 == 0 || i > 4 ? "worker2" : "worker", task.getId()); - rtrTestUtils.mockWorkerRunningTask(i % 2 == 0 || i > 4 ? "worker2" : "worker", task); - rtrTestUtils.mockWorkerCompleteFailedTask(i % 2 == 0 || i > 4 ? "worker2" : "worker", task); + if (i == 1) { + if (rtrTestUtils.taskAnnounced("worker2", task.getId())) { + firstWorker = "worker2"; + secondWorker = "worker"; + } else { + firstWorker = "worker"; + secondWorker = "worker2"; + } + } + + final String expectedWorker = i % 2 == 0 || i > 4 ? secondWorker : firstWorker; + + Assert.assertTrue(rtrTestUtils.taskAnnounced(expectedWorker, task.getId())); + rtrTestUtils.mockWorkerRunningTask(expectedWorker, task); + rtrTestUtils.mockWorkerCompleteFailedTask(expectedWorker, task); Assert.assertTrue(taskFuture.get(TIMEOUT_SECONDS, TimeUnit.SECONDS).isFailure()); Assert.assertEquals(i > 2 ? 1 : 0, remoteTaskRunner.getBlackListedWorkers().size()); diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java index aeff81a1a17..b39b4c93f42 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java @@ -125,7 +125,7 @@ public class RemoteTaskRunnerTestUtils { return basePath; } - }, null, null, null, null, null + }, null, null, null, null ), cf, new PathChildrenCacheFactory.Builder(), @@ -212,6 +212,12 @@ public class RemoteTaskRunnerTestUtils throw Throwables.propagate(e); } } + + @Override + public String toString() + { + return StringUtils.format("Path[%s] exists", path); + } } ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 1aa8a3f51ba..8d38b8abc10 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -74,7 +74,9 @@ import io.druid.indexing.overlord.config.TaskQueueConfig; import io.druid.indexing.overlord.supervisor.SupervisorManager; import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.RE; import io.druid.java.util.common.StringUtils; @@ -182,7 +184,7 @@ public class TaskLifecycleTest return Comparators.intervalsByStartThenEnd().compare(dataSegment.getInterval(), dataSegment2.getInterval()); } }; - private static DateTime now = new DateTime(); + private static DateTime now = DateTimes.nowUtc(); private static final Iterable realtimeIdxTaskInputRows = ImmutableList.of( IR(now.toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 1.0f), @@ -234,7 +236,7 @@ public class TaskLifecycleTest private static InputRow IR(String dt, String dim1, String dim2, float met) { return new MapBasedInputRow( - new DateTime(dt).getMillis(), + DateTimes.of(dt).getMillis(), ImmutableList.of("dim1", "dim2"), ImmutableMap.of( "dim1", dim1, @@ -661,7 +663,7 @@ public class TaskLifecycleTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), mapper ), @@ -686,7 +688,7 @@ public class TaskLifecycleTest Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size()); Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource()); - Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); + Assert.assertEquals("segment1 interval", Intervals.of("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); Assert.assertEquals( "segment1 dimensions", ImmutableList.of("dim1", "dim2"), @@ -695,7 +697,7 @@ public class TaskLifecycleTest Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics()); Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource()); - Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); + Assert.assertEquals("segment2 interval", Intervals.of("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); Assert.assertEquals( "segment2 dimensions", ImmutableList.of("dim1", "dim2"), @@ -718,7 +720,7 @@ public class TaskLifecycleTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P1D")) + ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), mapper ), @@ -750,7 +752,7 @@ public class TaskLifecycleTest @Override public DataSegment apply(String input) { - final Interval interval = new Interval(input); + final Interval interval = Intervals.of(input); try { return DataSegment.builder() .dataSource("test_kill_task") @@ -789,13 +791,13 @@ public class TaskLifecycleTest // manually create local segments files List segmentFiles = Lists.newArrayList(); - for (DataSegment segment : mdc.getUnusedSegmentsForInterval("test_kill_task", new Interval("2011-04-01/P4D"))) { + for (DataSegment segment : mdc.getUnusedSegmentsForInterval("test_kill_task", Intervals.of("2011-04-01/P4D"))) { File file = new File((String) segment.getLoadSpec().get("path")); file.mkdirs(); segmentFiles.add(file); } - final Task killTask = new KillTask(null, "test_kill_task", new Interval("2011-04-01/P4D"), null); + final Task killTask = new KillTask(null, "test_kill_task", Intervals.of("2011-04-01/P4D"), null); final TaskStatus status = runTask(killTask); Assert.assertEquals("merged statusCode", TaskStatus.Status.SUCCESS, status.getStatusCode()); @@ -860,7 +862,7 @@ public class TaskLifecycleTest "id1", new TaskResource("id1", 1), "ds", - new Interval("2012-01-01/P1D"), + Intervals.of("2012-01-01/P1D"), null ) { @@ -880,7 +882,7 @@ public class TaskLifecycleTest final DataSegment segment = DataSegment.builder() .dataSource("ds") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version(myLock.getVersion()) .build(); @@ -899,7 +901,7 @@ public class TaskLifecycleTest @Test public void testBadInterval() throws Exception { - final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null) + final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", Intervals.of("2012-01-01/P1D"), null) { @Override public String getType() @@ -914,7 +916,7 @@ public class TaskLifecycleTest final DataSegment segment = DataSegment.builder() .dataSource("ds") - .interval(new Interval("2012-01-01/P2D")) + .interval(Intervals.of("2012-01-01/P2D")) .version(myLock.getVersion()) .build(); @@ -933,7 +935,7 @@ public class TaskLifecycleTest @Test public void testBadVersion() throws Exception { - final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", new Interval("2012-01-01/P1D"), null) + final Task task = new AbstractFixedIntervalTask("id1", "id1", "ds", Intervals.of("2012-01-01/P1D"), null) { @Override public String getType() @@ -948,7 +950,7 @@ public class TaskLifecycleTest final DataSegment segment = DataSegment.builder() .dataSource("ds") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version(myLock.getVersion() + "1!!!1!!") .build(); @@ -1002,7 +1004,7 @@ public class TaskLifecycleTest Assert.assertEquals("test_ds", segment.getDataSource()); Assert.assertEquals(ImmutableList.of("dim1", "dim2"), segment.getDimensions()); Assert.assertEquals( - new Interval(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")), + Intervals.of(now.toString("YYYY-MM-dd") + "/" + now.plusDays(1).toString("YYYY-MM-dd")), segment.getInterval() ); Assert.assertEquals(ImmutableList.of("count"), segment.getMetrics()); @@ -1082,7 +1084,7 @@ public class TaskLifecycleTest new UniformGranularitySpec( Granularities.DAY, null, - ImmutableList.of(new Interval("2010-01-01/P2D")) + ImmutableList.of(Intervals.of("2010-01-01/P2D")) ), mapper ), @@ -1116,7 +1118,7 @@ public class TaskLifecycleTest Assert.assertEquals("num segments nuked", 0, mdc.getNuked().size()); Assert.assertEquals("segment1 datasource", "foo", publishedSegments.get(0).getDataSource()); - Assert.assertEquals("segment1 interval", new Interval("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); + Assert.assertEquals("segment1 interval", Intervals.of("2010-01-01/P1D"), publishedSegments.get(0).getInterval()); Assert.assertEquals( "segment1 dimensions", ImmutableList.of("dim1", "dim2"), @@ -1125,7 +1127,7 @@ public class TaskLifecycleTest Assert.assertEquals("segment1 metrics", ImmutableList.of("met"), publishedSegments.get(0).getMetrics()); Assert.assertEquals("segment2 datasource", "foo", publishedSegments.get(1).getDataSource()); - Assert.assertEquals("segment2 interval", new Interval("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); + Assert.assertEquals("segment2 interval", Intervals.of("2010-01-02/P1D"), publishedSegments.get(1).getInterval()); Assert.assertEquals( "segment2 dimensions", ImmutableList.of("dim1", "dim2"), diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java index f30b15d6a7f..d7fceb6a1ec 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java @@ -31,12 +31,12 @@ import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.common.task.Task; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.metadata.EntryExistsException; import io.druid.metadata.SQLMetadataStorageActionHandlerFactory; import io.druid.metadata.TestDerbyConnector; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -84,13 +84,13 @@ public class TaskLockboxTest { Task task = NoopTask.create(); lockbox.add(task); - Assert.assertNotNull(lockbox.lock(task, new Interval("2015-01-01/2015-01-02"))); + Assert.assertNotNull(lockbox.lock(task, Intervals.of("2015-01-01/2015-01-02"))); } @Test(expected = IllegalStateException.class) public void testLockForInactiveTask() throws InterruptedException { - lockbox.lock(NoopTask.create(), new Interval("2015-01-01/2015-01-02")); + lockbox.lock(NoopTask.create(), Intervals.of("2015-01-01/2015-01-02")); } @Test @@ -101,7 +101,7 @@ public class TaskLockboxTest exception.expectMessage("Unable to grant lock to inactive Task"); lockbox.add(task); lockbox.remove(task); - lockbox.lock(task, new Interval("2015-01-01/2015-01-02")); + lockbox.lock(task, Intervals.of("2015-01-01/2015-01-02")); } @Test @@ -109,18 +109,18 @@ public class TaskLockboxTest { Task task = NoopTask.create(); lockbox.add(task); - Assert.assertTrue(lockbox.tryLock(task, new Interval("2015-01-01/2015-01-03")).isPresent()); + Assert.assertTrue(lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-03")).isPresent()); // try to take lock for task 2 for overlapping interval Task task2 = NoopTask.create(); lockbox.add(task2); - Assert.assertFalse(lockbox.tryLock(task2, new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertFalse(lockbox.tryLock(task2, Intervals.of("2015-01-01/2015-01-02")).isPresent()); // task 1 unlocks the lock lockbox.remove(task); // Now task2 should be able to get the lock - Assert.assertTrue(lockbox.tryLock(task2, new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertTrue(lockbox.tryLock(task2, Intervals.of("2015-01-01/2015-01-02")).isPresent()); } @Test @@ -128,17 +128,17 @@ public class TaskLockboxTest { Task task = NoopTask.create(); lockbox.add(task); - Optional lock1 = lockbox.tryLock(task, new Interval("2015-01-01/2015-01-03")); + Optional lock1 = lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-03")); Assert.assertTrue(lock1.isPresent()); - Assert.assertEquals(new Interval("2015-01-01/2015-01-03"), lock1.get().getInterval()); + Assert.assertEquals(Intervals.of("2015-01-01/2015-01-03"), lock1.get().getInterval()); // same task tries to take partially overlapping interval; should fail - Assert.assertFalse(lockbox.tryLock(task, new Interval("2015-01-02/2015-01-04")).isPresent()); + Assert.assertFalse(lockbox.tryLock(task, Intervals.of("2015-01-02/2015-01-04")).isPresent()); // same task tries to take contained interval; should succeed and should match the original lock - Optional lock2 = lockbox.tryLock(task, new Interval("2015-01-01/2015-01-02")); + Optional lock2 = lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-02")); Assert.assertTrue(lock2.isPresent()); - Assert.assertEquals(new Interval("2015-01-01/2015-01-03"), lock2.get().getInterval()); + Assert.assertEquals(Intervals.of("2015-01-01/2015-01-03"), lock2.get().getInterval()); // only the first lock should actually exist Assert.assertEquals( @@ -151,7 +151,7 @@ public class TaskLockboxTest @Test(expected = IllegalStateException.class) public void testTryLockForInactiveTask() { - Assert.assertFalse(lockbox.tryLock(NoopTask.create(), new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertFalse(lockbox.tryLock(NoopTask.create(), Intervals.of("2015-01-01/2015-01-02")).isPresent()); } @Test @@ -162,7 +162,7 @@ public class TaskLockboxTest exception.expectMessage("Unable to grant lock to inactive Task"); lockbox.add(task); lockbox.remove(task); - Assert.assertFalse(lockbox.tryLock(task, new Interval("2015-01-01/2015-01-02")).isPresent()); + Assert.assertFalse(lockbox.tryLock(task, Intervals.of("2015-01-01/2015-01-02")).isPresent()); } @Test @@ -173,8 +173,8 @@ public class TaskLockboxTest lockbox.add(task1); lockbox.add(task2); - Assert.assertNotNull(lockbox.lock(task1, new Interval("2015-01-01/2015-01-02"), 5000)); - Assert.assertNull(lockbox.lock(task2, new Interval("2015-01-01/2015-01-15"), 1000)); + lockbox.lock(task1, Intervals.of("2015-01-01/2015-01-02"), 5000); + lockbox.lock(task2, Intervals.of("2015-01-01/2015-01-15"), 5000); } @Test @@ -186,7 +186,7 @@ public class TaskLockboxTest taskStorage.insert(task, TaskStatus.running(task.getId())); originalBox.add(task); Assert.assertTrue( - originalBox.tryLock(task, new Interval(StringUtils.format("2017-01-0%d/2017-01-0%d", (i + 1), (i + 2)))) + originalBox.tryLock(task, Intervals.of(StringUtils.format("2017-01-0%d/2017-01-0%d", (i + 1), (i + 2)))) .isPresent() ); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java index 3604838ec0d..b2f3a8be177 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java @@ -44,6 +44,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; @@ -86,7 +87,7 @@ public class PendingTaskBasedProvisioningStrategyTest workerConfig = new AtomicReference<>( new WorkerBehaviorConfig( - new FillCapacityWorkerSelectStrategy(), + new FillCapacityWorkerSelectStrategy(null), autoScaler ) ); @@ -338,7 +339,7 @@ public class PendingTaskBasedProvisioningStrategyTest testTask.getId(), null, TaskLocation.unknown() - ).withQueueInsertionTime(new DateTime()) + ).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java index 3dfc4201657..5701fa34ad7 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java @@ -42,6 +42,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Period; @@ -123,7 +124,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ); EasyMock.expect(runner.getWorkers()).andReturn( @@ -160,7 +161,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -218,7 +219,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -270,7 +271,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -314,7 +315,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -365,7 +366,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( @@ -472,7 +473,7 @@ public class SimpleProvisioningStrategyTest RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); EasyMock.expect(runner.getPendingTasks()).andReturn( Collections.singletonList( - new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(new DateTime()) + new RemoteTaskRunnerWorkItem(testTask.getId(), null, null).withQueueInsertionTime(DateTimes.nowUtc()) ) ).times(2); EasyMock.expect(runner.getWorkers()).andReturn( diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java index 2f2a79b0990..94b2fb1f283 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/http/OverlordTest.java @@ -31,6 +31,7 @@ import com.metamx.emitter.service.ServiceEmitter; import io.druid.concurrent.Execs; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.discovery.NoopServiceAnnouncer; +import io.druid.discovery.DruidLeaderSelector; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskActionClientFactory; @@ -154,11 +155,10 @@ public class OverlordTest taskCompletionCountDownLatches[0] = new CountDownLatch(1); taskCompletionCountDownLatches[1] = new CountDownLatch(1); announcementLatch = new CountDownLatch(1); - IndexerZkConfig indexerZkConfig = new IndexerZkConfig(new ZkPathsConfig(), null, null, null, null, null); + IndexerZkConfig indexerZkConfig = new IndexerZkConfig(new ZkPathsConfig(), null, null, null, null); setupServerAndCurator(); curator.start(); curator.blockUntilConnected(); - curator.create().creatingParentsIfNeeded().forPath(indexerZkConfig.getLeaderLatchPath()); druidNode = new DruidNode("hey", "what", 1234, null, new ServerConfig()); ServiceEmitter serviceEmitter = new NoopServiceEmitter(); taskMaster = new TaskMaster( @@ -167,7 +167,6 @@ public class OverlordTest taskStorage, taskActionClientFactory, druidNode, - indexerZkConfig, new TaskRunnerFactory() { @Override @@ -176,7 +175,6 @@ public class OverlordTest return new MockTaskRunner(runTaskCountDownLatches, taskCompletionCountDownLatches); } }, - curator, new NoopServiceAnnouncer() { @Override @@ -188,7 +186,8 @@ public class OverlordTest new CoordinatorOverlordServiceConfig(null, null), serviceEmitter, supervisorManager, - EasyMock.createNiceMock(OverlordHelperManager.class) + EasyMock.createNiceMock(OverlordHelperManager.class), + new TestDruidLeaderSelector() ); EmittingLogger.registerEmitter(serviceEmitter); } @@ -426,4 +425,44 @@ public class OverlordTest //Do nothing } } + + private static class TestDruidLeaderSelector implements DruidLeaderSelector + { + private volatile Listener listener; + private volatile String leader; + + @Override + public String getCurrentLeader() + { + return leader; + } + + @Override + public boolean isLeader() + { + return leader != null; + } + + @Override + public int localTerm() + { + return 0; + } + + @Override + public void registerListener(Listener listener) + { + this.listener = listener; + + leader = "what:1234"; + listener.becomeLeader(); + } + + @Override + public void unregisterListener() + { + leader = null; + listener.stopBeingLeader(); + } + } } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java index 722e5637a79..a98deebf75b 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java @@ -19,58 +19,58 @@ package io.druid.indexing.overlord.setup; -import com.google.common.base.Optional; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; +import io.druid.segment.TestHelper; import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; - public class EqualDistributionWithAffinityWorkerSelectStrategyTest { @Test public void testFindWorkerForTask() throws Exception { - EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( - new AffinityConfig(ImmutableMap.of("foo", Arrays.asList("localhost1", "localhost2", "localhost3"))) + EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost1", "localhost2", "localhost3")), false) ); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "localhost0", new ImmutableWorkerInfo( - new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), - DateTime.now() + new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, + Sets.newHashSet(), + Sets.newHashSet(), + DateTimes.nowUtc() ), "localhost1", new ImmutableWorkerInfo( new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost2", new ImmutableWorkerInfo( new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost3", new ImmutableWorkerInfo( new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -82,7 +82,6 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest } } ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("localhost1", worker.getWorker().getHost()); } @@ -90,10 +89,10 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest public void testFindWorkerForTaskWithNulls() throws Exception { EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( - new AffinityConfig(ImmutableMap.of("foo", Arrays.asList("localhost"))) + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) ); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -101,19 +100,18 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("lhost", worker.getWorker().getHost()); } @@ -121,10 +119,10 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest public void testIsolation() throws Exception { EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( - new AffinityConfig(ImmutableMap.of("foo", Arrays.asList("localhost"))) + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) ); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "localhost", @@ -132,11 +130,25 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) ); - Assert.assertFalse(optional.isPresent()); + Assert.assertNull(worker); + } + + @Test + public void testSerde() throws Exception + { + final ObjectMapper objectMapper = TestHelper.getJsonMapper(); + final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWithAffinityWorkerSelectStrategy( + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) + ); + final WorkerSelectStrategy strategy2 = objectMapper.readValue( + objectMapper.writeValueAsBytes(strategy), + WorkerSelectStrategy.class + ); + Assert.assertEquals(strategy, strategy2); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java index fad4d5ec990..286aa511ba9 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java @@ -19,26 +19,57 @@ package io.druid.indexing.overlord.setup; -import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; public class EqualDistributionWorkerSelectStrategyTest { + private static final ImmutableMap WORKERS_FOR_AFFINITY_TESTS = + ImmutableMap.of( + "localhost0", + new ImmutableWorkerInfo( + new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, + Sets.newHashSet(), + Sets.newHashSet(), + DateTimes.nowUtc() + ), + "localhost1", + new ImmutableWorkerInfo( + new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, + Sets.newHashSet(), + Sets.newHashSet(), + DateTimes.nowUtc() + ), + "localhost2", + new ImmutableWorkerInfo( + new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, + Sets.newHashSet(), + Sets.newHashSet(), + DateTimes.nowUtc() + ), + "localhost3", + new ImmutableWorkerInfo( + new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, + Sets.newHashSet(), + Sets.newHashSet(), + DateTimes.nowUtc() + ) + ); @Test public void testFindWorkerForTask() throws Exception { - final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(); + final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -46,14 +77,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 1, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -65,16 +96,15 @@ public class EqualDistributionWorkerSelectStrategyTest } } ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("lhost", worker.getWorker().getHost()); } @Test public void testFindWorkerForTaskWhenSameCurrCapacityUsed() throws Exception { - final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(); + final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -82,14 +112,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 5, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -101,7 +131,6 @@ public class EqualDistributionWorkerSelectStrategyTest } } ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("localhost", worker.getWorker().getHost()); } @@ -109,9 +138,9 @@ public class EqualDistributionWorkerSelectStrategyTest public void testOneDisableWorkerDifferentUsedCapacity() throws Exception { String DISABLED_VERSION = ""; - final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(); + final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -119,14 +148,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 2, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -138,7 +167,6 @@ public class EqualDistributionWorkerSelectStrategyTest } } ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("enableHost", worker.getWorker().getHost()); } @@ -146,9 +174,9 @@ public class EqualDistributionWorkerSelectStrategyTest public void testOneDisableWorkerSameUsedCapacity() throws Exception { String DISABLED_VERSION = ""; - final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(); + final EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy(null); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -156,14 +184,14 @@ public class EqualDistributionWorkerSelectStrategyTest new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -175,7 +203,90 @@ public class EqualDistributionWorkerSelectStrategyTest } } ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("enableHost", worker.getWorker().getHost()); } + + @Test + public void testWeakAffinity() + { + EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy( + new AffinityConfig( + ImmutableMap.of( + "foo", ImmutableSet.of("localhost1", "localhost2", "localhost3"), + "bar", ImmutableSet.of("nonexistent-worker") + ), + false + ) + ); + + ImmutableWorkerInfo workerFoo = strategy.findWorkerForTask( + new RemoteTaskRunnerConfig(), + WORKERS_FOR_AFFINITY_TESTS, + createDummyTask("foo") + ); + Assert.assertEquals("localhost1", workerFoo.getWorker().getHost()); + + // With weak affinity, bar (which has no affinity workers available) can use a non-affinity worker. + ImmutableWorkerInfo workerBar = strategy.findWorkerForTask( + new RemoteTaskRunnerConfig(), + WORKERS_FOR_AFFINITY_TESTS, + createDummyTask("bar") + ); + Assert.assertEquals("localhost0", workerBar.getWorker().getHost()); + + ImmutableWorkerInfo workerBaz = strategy.findWorkerForTask( + new RemoteTaskRunnerConfig(), + WORKERS_FOR_AFFINITY_TESTS, + createDummyTask("baz") + ); + Assert.assertEquals("localhost0", workerBaz.getWorker().getHost()); + } + + @Test + public void testStrongAffinity() + { + EqualDistributionWorkerSelectStrategy strategy = new EqualDistributionWorkerSelectStrategy( + new AffinityConfig( + ImmutableMap.of( + "foo", ImmutableSet.of("localhost1", "localhost2", "localhost3"), + "bar", ImmutableSet.of("nonexistent-worker") + ), + true + ) + ); + + ImmutableWorkerInfo workerFoo = strategy.findWorkerForTask( + new RemoteTaskRunnerConfig(), + WORKERS_FOR_AFFINITY_TESTS, + createDummyTask("foo") + ); + Assert.assertEquals("localhost1", workerFoo.getWorker().getHost()); + + // With strong affinity, no workers can be found for bar. + ImmutableWorkerInfo workerBar = strategy.findWorkerForTask( + new RemoteTaskRunnerConfig(), + WORKERS_FOR_AFFINITY_TESTS, + createDummyTask("bar") + ); + Assert.assertNull(workerBar); + + ImmutableWorkerInfo workerBaz = strategy.findWorkerForTask( + new RemoteTaskRunnerConfig(), + WORKERS_FOR_AFFINITY_TESTS, + createDummyTask("baz") + ); + Assert.assertEquals("localhost0", workerBaz.getWorker().getHost()); + } + + private static NoopTask createDummyTask(final String dataSource) + { + return new NoopTask(null, 1, 0, null, null, null) + { + @Override + public String getDataSource() + { + return dataSource; + } + }; + } } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java index 43bcf7d6b5e..00e3067c550 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java @@ -19,29 +19,27 @@ package io.druid.indexing.overlord.setup; -import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.indexing.common.task.NoopTask; import io.druid.indexing.overlord.ImmutableWorkerInfo; import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; -import java.util.Arrays; - public class FillCapacityWithAffinityWorkerSelectStrategyTest { @Test public void testFindWorkerForTask() throws Exception { FillCapacityWorkerSelectStrategy strategy = new FillCapacityWithAffinityWorkerSelectStrategy( - new AffinityConfig(ImmutableMap.of("foo", Arrays.asList("localhost"))) + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) ); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -49,14 +47,14 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) @@ -68,7 +66,6 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest } } ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("localhost", worker.getWorker().getHost()); } @@ -76,10 +73,10 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest public void testFindWorkerForTaskWithNulls() throws Exception { FillCapacityWorkerSelectStrategy strategy = new FillCapacityWithAffinityWorkerSelectStrategy( - new AffinityConfig(ImmutableMap.of("foo", Arrays.asList("localhost"))) + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) ); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "lhost", @@ -87,19 +84,18 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest new Worker("http", "lhost", "lhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) ); - ImmutableWorkerInfo worker = optional.get(); Assert.assertEquals("lhost", worker.getWorker().getHost()); } @@ -107,10 +103,10 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest public void testIsolation() throws Exception { FillCapacityWorkerSelectStrategy strategy = new FillCapacityWithAffinityWorkerSelectStrategy( - new AffinityConfig(ImmutableMap.of("foo", Arrays.asList("localhost"))) + new AffinityConfig(ImmutableMap.of("foo", ImmutableSet.of("localhost")), false) ); - Optional optional = strategy.findWorkerForTask( + ImmutableWorkerInfo worker = strategy.findWorkerForTask( new RemoteTaskRunnerConfig(), ImmutableMap.of( "localhost", @@ -118,11 +114,11 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest new Worker("http", "localhost", "localhost", 1, "v1"), 0, Sets.newHashSet(), Sets.newHashSet(), - DateTime.now() + DateTimes.nowUtc() ) ), new NoopTask(null, 1, 0, null, null, null) ); - Assert.assertFalse(optional.isPresent()); + Assert.assertNull(worker); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java index 35d1ab1bfdd..1eb7028a5da 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/JavaScriptWorkerSelectStrategyTest.java @@ -22,7 +22,6 @@ package io.druid.indexing.overlord.setup; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.ImmutableWorkerInfo; @@ -126,7 +125,7 @@ public class JavaScriptWorkerSelectStrategyTest new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("index_hadoop") - ).get(); + ); // batch tasks should be sent to worker1 Assert.assertEquals(worker1, workerForBatchTask); @@ -134,7 +133,7 @@ public class JavaScriptWorkerSelectStrategyTest new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("other_type") - ).get(); + ); // all other tasks should be sent to worker2 Assert.assertEquals(worker2, workerForOtherTask); } @@ -146,12 +145,12 @@ public class JavaScriptWorkerSelectStrategyTest "10.0.0.1", createMockWorker(1, true, true), "10.0.0.2", createMockWorker(1, true, true) ); - Optional workerForOtherTask = STRATEGY.findWorkerForTask( + ImmutableWorkerInfo workerForOtherTask = STRATEGY.findWorkerForTask( new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("other_type") ); - Assert.assertFalse(workerForOtherTask.isPresent()); + Assert.assertNull(workerForOtherTask); } @Test @@ -161,20 +160,20 @@ public class JavaScriptWorkerSelectStrategyTest "10.0.0.1", createMockWorker(1, true, false), "10.0.0.4", createMockWorker(1, true, false) ); - Optional workerForBatchTask = STRATEGY.findWorkerForTask( + ImmutableWorkerInfo workerForBatchTask = STRATEGY.findWorkerForTask( new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("index_hadoop") ); - Assert.assertFalse(workerForBatchTask.isPresent()); + Assert.assertNull(workerForBatchTask); - Optional workerForOtherTask = STRATEGY.findWorkerForTask( + ImmutableWorkerInfo workerForOtherTask = STRATEGY.findWorkerForTask( new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("otherTask") ); // all other tasks should be sent to worker2 - Assert.assertFalse(workerForOtherTask.isPresent()); + Assert.assertNull(workerForOtherTask); } @Test @@ -184,20 +183,20 @@ public class JavaScriptWorkerSelectStrategyTest "10.0.0.1", createMockWorker(1, false, true), "10.0.0.4", createMockWorker(1, false, true) ); - Optional workerForBatchTask = STRATEGY.findWorkerForTask( + ImmutableWorkerInfo workerForBatchTask = STRATEGY.findWorkerForTask( new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("index_hadoop") ); - Assert.assertFalse(workerForBatchTask.isPresent()); + Assert.assertNull(workerForBatchTask); - Optional workerForOtherTask = STRATEGY.findWorkerForTask( + ImmutableWorkerInfo workerForOtherTask = STRATEGY.findWorkerForTask( new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("otherTask") ); // all other tasks should be sent to worker2 - Assert.assertFalse(workerForOtherTask.isPresent()); + Assert.assertNull(workerForOtherTask); } @Test @@ -208,12 +207,12 @@ public class JavaScriptWorkerSelectStrategyTest "10.0.0.1", createMockWorker(1, true, true), "10.0.0.2", createMockWorker(5, true, true) ); - Optional workerForBatchTask = STRATEGY.findWorkerForTask( + ImmutableWorkerInfo workerForBatchTask = STRATEGY.findWorkerForTask( new TestRemoteTaskRunnerConfig(new Period("PT1S")), workerMap, createMockTask("index_hadoop") ); - Assert.assertEquals(workerMap.get("10.0.0.2"), workerForBatchTask.get()); + Assert.assertEquals(workerMap.get("10.0.0.2"), workerForBatchTask); } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java index d92fcfb66ec..0067c10e9cc 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/setup/WorkerBehaviorConfigTest.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import io.druid.indexing.overlord.autoscaling.ec2.EC2AutoScaler; import io.druid.indexing.overlord.autoscaling.ec2.EC2EnvironmentConfig; import io.druid.indexing.overlord.autoscaling.ec2.EC2NodeData; @@ -42,7 +43,8 @@ public class WorkerBehaviorConfigTest WorkerBehaviorConfig config = new WorkerBehaviorConfig( new FillCapacityWithAffinityWorkerSelectStrategy( new AffinityConfig( - ImmutableMap.of("foo", Arrays.asList("localhost")) + ImmutableMap.of("foo", ImmutableSet.of("localhost")), + false ) ), new EC2AutoScaler( diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java index 6778deb7130..c22dde9eb1f 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorManagerTest.java @@ -22,12 +22,12 @@ package io.druid.indexing.overlord.supervisor; import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import io.druid.indexing.overlord.DataSourceMetadata; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataSupervisorManager; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -194,7 +194,7 @@ public class SupervisorManagerTest extends EasyMockSupport @Test public void testGetSupervisorStatus() throws Exception { - SupervisorReport report = new SupervisorReport("id1", DateTime.now()) + SupervisorReport report = new SupervisorReport("id1", DateTimes.nowUtc()) { @Override public Object getPayload() diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java index c593f6986c0..f4b96b3416c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/supervisor/SupervisorResourceTest.java @@ -26,13 +26,13 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.TaskMaster; -import org.easymock.Capture; +import io.druid.java.util.common.DateTimes; import io.druid.server.security.AuthConfig; +import org.easymock.Capture; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; import org.easymock.EasyMockSupport; import org.easymock.Mock; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -148,7 +148,7 @@ public class SupervisorResourceTest extends EasyMockSupport @Test public void testSpecGetStatus() throws Exception { - SupervisorReport report = new SupervisorReport("id", DateTime.now()) + SupervisorReport report = new SupervisorReport("id", DateTimes.nowUtc()) { @Override public Object getPayload() diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java index 82cacf88dcd..2e8cee71a46 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -126,7 +126,7 @@ public class WorkerTaskMonitorTest { return basePath; } - }, null, null, null, null, null + }, null, null, null, null ), new TestRemoteTaskRunnerConfig(new Period("PT1S")), cf, diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java index 44e83fc8d52..8e8b672636d 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java @@ -87,7 +87,7 @@ public class WorkerResourceTest { return basePath; } - }, null, null, null, null, null), + }, null, null, null, null), new RemoteTaskRunnerConfig(), cf, worker diff --git a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java index df45c1b99e0..118d2e8912c 100644 --- a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java +++ b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java @@ -20,7 +20,6 @@ package io.druid.server.initialization; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; import com.google.inject.Injector; @@ -32,6 +31,7 @@ import io.druid.guice.JsonConfigurator; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.jackson.JacksonUtils; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -156,7 +156,7 @@ public class IndexerZkConfigTest ); indexerZkConfig.inject(propertyValues, configurator); - Assert.assertEquals("/druid/indexer/leaderLatchPath", indexerZkConfig.get().get().getLeaderLatchPath()); + Assert.assertEquals("/druid/indexer/tasks", indexerZkConfig.get().get().getTasksPath()); } @Test @@ -245,7 +245,7 @@ public class IndexerZkConfigTest ZkPathsConfig zkPathsConfig1 = zkPathsConfig.get().get(); - IndexerZkConfig indexerZkConfig = new IndexerZkConfig(zkPathsConfig1, null, null, null, null, null); + IndexerZkConfig indexerZkConfig = new IndexerZkConfig(zkPathsConfig1, null, null, null, null); Assert.assertEquals("/druid/metrics/indexer", indexerZkConfig.getBase()); Assert.assertEquals("/druid/metrics/indexer/announcements", indexerZkConfig.getAnnouncementsPath()); @@ -262,22 +262,18 @@ public class IndexerZkConfigTest "/druid/prod", "/druid/prod/a", "/druid/prod/t", - "/druid/prod/s", - "/druid/prod/l" + "/druid/prod/s" ); Map value = mapper.readValue( - mapper.writeValueAsString(indexerZkConfig), new TypeReference>() - { - } + mapper.writeValueAsString(indexerZkConfig), JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING ); IndexerZkConfig newConfig = new IndexerZkConfig( zkPathsConfig, value.get("base"), value.get("announcementsPath"), value.get("tasksPath"), - value.get("statusPath"), - value.get("leaderLatchPath") + value.get("statusPath") ); Assert.assertEquals(indexerZkConfig, newConfig); diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 18b39e2f221..1864d651e51 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -211,6 +211,16 @@ + + de.thetaphi + forbiddenapis + + + + ../codestyle/joda-time-forbidden-apis.txt + + + @@ -244,6 +254,16 @@ + + de.thetaphi + forbiddenapis + + + + ../codestyle/joda-time-forbidden-apis.txt + + + diff --git a/integration-tests/run_cluster.sh b/integration-tests/run_cluster.sh index 152f418c609..28819eb6ef8 100755 --- a/integration-tests/run_cluster.sh +++ b/integration-tests/run_cluster.sh @@ -13,7 +13,7 @@ SUPERVISORDIR=/usr/lib/druid/conf RESOURCEDIR=$DIR/src/test/resources # so docker IP addr will be known during docker build -echo $DOCKER_IP > $DOCKERDIR/docker_ip +echo ${DOCKER_IP:=127.0.0.1} > $DOCKERDIR/docker_ip # Make directories if they dont exist mkdir -p $SHARED_DIR/logs diff --git a/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java b/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java index f3ae8658778..afa8a4da558 100644 --- a/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java +++ b/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java @@ -21,9 +21,9 @@ package io.druid.testing; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.logger.Logger; import java.io.File; @@ -56,9 +56,7 @@ public class ConfigFileConfigProvider implements IntegrationTestingConfigProvide ObjectMapper jsonMapper = new ObjectMapper(); try { props = jsonMapper.readValue( - new File(configFile), new TypeReference>() - { - } + new File(configFile), JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING ); } catch (IOException ex) { diff --git a/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java index eb7593c48d9..d41729d592e 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java @@ -28,6 +28,7 @@ import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.testing.guice.TestClient; @@ -144,10 +145,8 @@ public class EventReceiverFirehoseTestClient int expectedEventsPosted = 0; while ((s = reader.readLine()) != null) { events.add( - (Map) this.jsonMapper.readValue( - s, new TypeReference>() - { - } + this.jsonMapper.readValue( + s, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ) ); ObjectMapper mapper = (totalEventsPosted % 2 == 0) ? jsonMapper : smileMapper; diff --git a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java index 5996b88b956..2d4e695f024 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java @@ -33,6 +33,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.task.Task; import io.druid.java.util.common.ISE; import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; @@ -111,9 +112,7 @@ public class OverlordResourceTestClient ); } Map responseData = jsonMapper.readValue( - response.getContent(), new TypeReference>() - { - } + response.getContent(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING ); String taskID = responseData.get("task"); LOG.info("Submitted task with TaskID[%s]", taskID); @@ -143,9 +142,7 @@ public class OverlordResourceTestClient LOG.info("Index status response" + response.getContent()); Map responseData = jsonMapper.readValue( - response.getContent(), new TypeReference>() - { - } + response.getContent(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); //TODO: figure out a better way to parse the response... String status = (String) ((Map) responseData.get("status")).get("status"); @@ -202,9 +199,7 @@ public class OverlordResourceTestClient ); LOG.info("Shutdown Task %s response %s", taskID, response.getContent()); return jsonMapper.readValue( - response.getContent(), new TypeReference>() - { - } + response.getContent(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING ); } catch (Exception e) { @@ -258,9 +253,7 @@ public class OverlordResourceTestClient ); } Map responseData = jsonMapper.readValue( - response.getContent(), new TypeReference>() - { - } + response.getContent(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING ); String id = responseData.get("id"); LOG.info("Submitted supervisor with id[%s]", id); diff --git a/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java b/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java index 05836d66b71..14b48da5f7d 100644 --- a/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java +++ b/integration-tests/src/main/java/io/druid/testing/guice/DruidTestModule.java @@ -35,6 +35,9 @@ import io.druid.curator.CuratorConfig; import io.druid.guice.JsonConfigProvider; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Client; +import io.druid.guice.annotations.Self; +import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.IntegrationTestingConfigProvider; import io.druid.testing.IntegrationTestingCuratorConfig; @@ -52,6 +55,11 @@ public class DruidTestModule implements Module JsonConfigProvider.bind(binder, "druid.test.config", IntegrationTestingConfigProvider.class); binder.bind(CuratorConfig.class).to(IntegrationTestingCuratorConfig.class); + + // Bind DruidNode instance to make Guice happy. This instance is currently unused. + binder.bind(DruidNode.class).annotatedWith(Self.class).toInstance( + new DruidNode("integration-tests", "localhost", 9191, null, null, new ServerConfig()) + ); } @Provides diff --git a/integration-tests/src/main/java/org/testng/TestNG.java b/integration-tests/src/main/java/org/testng/TestNG.java index f375cea6298..0586a81519b 100644 --- a/integration-tests/src/main/java/org/testng/TestNG.java +++ b/integration-tests/src/main/java/org/testng/TestNG.java @@ -329,17 +329,8 @@ public class TestNG s.getChildSuites().add(cSuite); } } - catch (FileNotFoundException e) { - e.printStackTrace(System.out); - } - catch (ParserConfigurationException e) { - e.printStackTrace(System.out); - } - catch (SAXException e) { - e.printStackTrace(System.out); - } - catch (IOException e) { - e.printStackTrace(System.out); + catch (ParserConfigurationException | SAXException | IOException e) { + LOGGER.error("", e); } } @@ -366,17 +357,8 @@ public class TestNG } } } - catch (FileNotFoundException e) { - e.printStackTrace(System.out); - } - catch (IOException e) { - e.printStackTrace(System.out); - } - catch (ParserConfigurationException e) { - e.printStackTrace(System.out); - } - catch (SAXException e) { - e.printStackTrace(System.out); + catch (IOException | SAXException | ParserConfigurationException e) { + LOGGER.error("", e); } catch (Exception ex) { // Probably a Yaml exception, unnest it @@ -453,14 +435,8 @@ public class TestNG m_suites.add(xmlSuite); } } - catch (ParserConfigurationException ex) { - ex.printStackTrace(); - } - catch (SAXException ex) { - ex.printStackTrace(); - } - catch (IOException ex) { - ex.printStackTrace(); + catch (ParserConfigurationException | SAXException | IOException ex) { + LOGGER.error("", ex); } } @@ -1181,17 +1157,12 @@ public class TestNG if (!m_hasTests) { setStatus(HAS_NO_TEST); if (TestRunner.getVerbose() > 1) { - System.err.println("[TestNG] No tests found. Nothing was run"); + LOGGER.error("[TestNG] No tests found. Nothing was run"); usage(); } } } - private void p(String string) - { - System.out.println("[TestNG] " + string); - } - private void runExecutionListeners(boolean start) { for (List listeners @@ -1231,8 +1202,7 @@ public class TestNG ); } catch (Exception ex) { - System.err.println("[TestNG] Reporter " + reporter + " failed"); - ex.printStackTrace(System.err); + LOGGER.error("[TestNG] Reporter " + reporter + " failed", ex); } } } @@ -1505,7 +1475,7 @@ public class TestNG } catch (TestNGException ex) { if (TestRunner.getVerbose() > 1) { - ex.printStackTrace(System.out); + LOGGER.error("", ex); } else { error(ex.getMessage()); } @@ -1927,7 +1897,7 @@ public class TestNG static void exitWithError(String msg) { - System.err.println(msg); + LOGGER.error(msg); usage(); System.exit(1); } diff --git a/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java b/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java index fbf7faa7cd5..78807e69587 100644 --- a/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java +++ b/integration-tests/src/main/java/org/testng/remote/RemoteTestNG.java @@ -31,6 +31,7 @@ import org.testng.TestNGException; import org.testng.TestRunner; import org.testng.collections.Lists; import org.testng.internal.ClassHelper; +import org.testng.log4testng.Logger; import org.testng.remote.strprotocol.GenericMessage; import org.testng.remote.strprotocol.IMessageSender; import org.testng.remote.strprotocol.MessageHelper; @@ -59,6 +60,8 @@ import static org.testng.internal.Utils.defaultIfStringEmpty; */ public class RemoteTestNG extends TestNG { + private static final Logger LOGGER = Logger.getLogger(TestNG.class); + // The following constants are referenced by the Eclipse plug-in, make sure you // modify the plug-in as well if you change any of them. public static final String DEBUG_PORT = "12345"; @@ -134,7 +137,7 @@ public class RemoteTestNG extends TestNG private static void p(String s) { if (isVerbose()) { - System.out.println("[RemoteTestNG] " + s); + LOGGER.info("[RemoteTestNG] " + s); } } @@ -191,11 +194,11 @@ public class RemoteTestNG extends TestNG super.run(); } else { - System.err.println("No test suite found. Nothing to run"); + LOGGER.error("No test suite found. Nothing to run"); } } catch (Throwable cause) { - cause.printStackTrace(System.err); + LOGGER.error("", cause); } finally { msh.shutDown(); diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java index cb6a2f435db..82b83fb983c 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.Intervals; import io.druid.testing.clients.CoordinatorResourceTestClient; import io.druid.testing.clients.OverlordResourceTestClient; import io.druid.testing.utils.RetryUtil; @@ -32,9 +33,9 @@ import org.joda.time.Interval; import java.io.IOException; import java.io.InputStream; -import java.util.concurrent.Callable; import java.util.ArrayList; import java.util.Collections; +import java.util.concurrent.Callable; public abstract class AbstractIndexerTest { @@ -72,7 +73,7 @@ public abstract class AbstractIndexerTest // Wait for any existing index tasks to complete before disabling the datasource otherwise // realtime tasks can get stuck waiting for handoff. https://github.com/druid-io/druid/issues/1729 waitForAllTasksToComplete(); - Interval interval = new Interval(start + "/" + end); + Interval interval = Intervals.of(start + "/" + end); coordinator.unloadSegmentsForDataSource(dataSource, interval); RetryUtil.retryUntilFalse( new Callable() diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java index 38313d15fa3..530250c14a6 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java @@ -19,14 +19,15 @@ package io.druid.tests.indexer; -import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Throwables; import com.google.inject.Inject; import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; @@ -36,7 +37,6 @@ import io.druid.testing.utils.RetryUtil; import io.druid.testing.utils.ServerDiscoveryUtil; import org.apache.commons.io.IOUtils; import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.testng.annotations.Guice; @@ -102,7 +102,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest // the task will run for 3 minutes and then shutdown itself String task = setShutOffTime( getTaskAsString(REALTIME_TASK_RESOURCE), - new DateTime(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) + DateTimes.utc(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) ); LOG.info("indexerSpec: [%s]\n", task); taskID = indexer.submitTask(task); @@ -183,7 +183,6 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest public void postEvents() throws Exception { - DateTimeZone zone = DateTimeZone.forID("UTC"); final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME); eventReceiverSelector.start(); BufferedReader reader = null; @@ -212,7 +211,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest ); // there are 22 lines in the file int i = 1; - DateTime dt = new DateTime(zone); // timestamp used for sending each event + DateTime dt = DateTimes.nowUtc(); // timestamp used for sending each event dtFirst = dt; // timestamp of 1st event dtLast = dt; // timestamp of last event String line; @@ -228,10 +227,8 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest LOG.info("sending event: [%s]\n", event); Collection> events = new ArrayList>(); events.add( - (Map) this.jsonMapper.readValue( - event, new TypeReference>() - { - } + this.jsonMapper.readValue( + event, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ) ); int eventsPosted = client.postEvents(events, this.jsonMapper, MediaType.APPLICATION_JSON); @@ -244,7 +241,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest } catch (InterruptedException ex) { /* nothing */ } dtLast = dt; - dt = new DateTime(zone); + dt = DateTimes.nowUtc(); i++; } } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java index f83b2f44179..645f66dddae 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITUnionQueryTest.java @@ -25,6 +25,7 @@ import com.google.inject.Inject; import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; @@ -69,7 +70,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest // Load 4 datasources with same dimensions String task = setShutOffTime( getTaskAsString(UNION_TASK_RESOURCE), - new DateTime(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) + DateTimes.utc(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) ); List taskIDs = Lists.newArrayList(); for (int i = 0; i < numTasks; i++) { diff --git a/java-util/src/main/java/io/druid/java/util/common/DateTimes.java b/java-util/src/main/java/io/druid/java/util/common/DateTimes.java new file mode 100644 index 00000000000..149e34ee397 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/DateTimes.java @@ -0,0 +1,59 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; + +public final class DateTimes +{ + public static final DateTime EPOCH = utc(0); + public static final DateTime MAX = utc(JodaUtils.MAX_INSTANT); + public static final DateTime MIN = utc(JodaUtils.MIN_INSTANT); + + public static DateTime utc(long instant) + { + return new DateTime(instant, ISOChronology.getInstanceUTC()); + } + + public static DateTime of(String instant) + { + return new DateTime(instant, ISOChronology.getInstanceUTC()); + } + + public static DateTime nowUtc() + { + return DateTime.now(ISOChronology.getInstanceUTC()); + } + + public static DateTime max(DateTime dt1, DateTime dt2) + { + return dt1.compareTo(dt2) >= 0 ? dt1 : dt2; + } + + public static DateTime min(DateTime dt1, DateTime dt2) + { + return dt1.compareTo(dt2) < 0 ? dt1 : dt2; + } + + private DateTimes() + { + } +} diff --git a/java-util/src/main/java/io/druid/java/util/common/Intervals.java b/java-util/src/main/java/io/druid/java/util/common/Intervals.java new file mode 100644 index 00000000000..6a1e8b9c8b5 --- /dev/null +++ b/java-util/src/main/java/io/druid/java/util/common/Intervals.java @@ -0,0 +1,44 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.java.util.common; + +import com.google.common.collect.ImmutableList; +import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; + +public final class Intervals +{ + public static final Interval ETERNITY = utc(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); + public static final ImmutableList ONLY_ETERNITY = ImmutableList.of(ETERNITY); + + public static Interval utc(long startInstant, long endInstant) + { + return new Interval(startInstant, endInstant, ISOChronology.getInstanceUTC()); + } + + public static Interval of(String interval) + { + return new Interval(interval, ISOChronology.getInstanceUTC()); + } + + private Intervals() + { + } +} diff --git a/common/src/main/java/io/druid/common/utils/JodaUtils.java b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java similarity index 97% rename from common/src/main/java/io/druid/common/utils/JodaUtils.java rename to java-util/src/main/java/io/druid/java/util/common/JodaUtils.java index bc46320774d..bda0f98ba05 100644 --- a/common/src/main/java/io/druid/common/utils/JodaUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/JodaUtils.java @@ -17,7 +17,7 @@ * under the License. */ -package io.druid.common.utils; +package io.druid.java.util.common; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; @@ -38,7 +38,6 @@ public class JodaUtils // limit intervals such that duration millis fits in a long public static final long MAX_INSTANT = Long.MAX_VALUE / 2; public static final long MIN_INSTANT = Long.MIN_VALUE / 2; - public static final Interval ETERNITY = new Interval(MIN_INSTANT, MAX_INSTANT); public static ArrayList condenseIntervals(Iterable intervals) { diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java index 59a84ae6ed1..946bb5a2a50 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/AllGranularity.java @@ -20,6 +20,7 @@ package io.druid.java.util.common.granularity; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; @@ -29,15 +30,6 @@ import org.joda.time.format.DateTimeFormatter; */ public class AllGranularity extends Granularity { - // These constants are from JodaUtils in druid-common. - // Creates circular dependency. - // Will be nice to move JodaUtils here sometime - public static final long MAX_INSTANT = Long.MAX_VALUE / 2; - public static final long MIN_INSTANT = Long.MIN_VALUE / 2; - - private final DateTime maxDateTime = new DateTime(MAX_INSTANT); - private final DateTime minDateTime = new DateTime(MIN_INSTANT); - /** * This constructor is public b/c it is serialized and deserialized * based on type in GranularityModule @@ -53,7 +45,7 @@ public class AllGranularity extends Granularity @Override public DateTime increment(DateTime time) { - return maxDateTime; + return DateTimes.MAX; } @Override @@ -65,7 +57,7 @@ public class AllGranularity extends Granularity @Override public DateTime bucketStart(DateTime time) { - return minDateTime; + return DateTimes.MIN; } @Override diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java index f8f556c2db6..419280be151 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/DurationGranularity.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormatter; @@ -61,7 +62,7 @@ public class DurationGranularity extends Granularity @JsonProperty("origin") public DateTime getOrigin() { - return new DateTime(origin); + return DateTimes.utc(origin); } public long getOriginMillis() @@ -78,13 +79,13 @@ public class DurationGranularity extends Granularity @Override public DateTime increment(DateTime time) { - return new DateTime(time.getMillis() + getDurationMillis()); + return time.plus(getDuration()); } @Override public DateTime decrement(DateTime time) { - return new DateTime(time.getMillis() - getDurationMillis()); + return time.minus(getDuration()); } @Override @@ -96,7 +97,7 @@ public class DurationGranularity extends Granularity if (offset < 0) { offset += duration; } - return new DateTime(t - offset); + return new DateTime(t - offset, time.getChronology()); } @Override diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java index 1ec439dff89..9a76af789c1 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import io.druid.java.util.common.Cacheable; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; @@ -30,8 +31,6 @@ import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; -import java.util.Collections; -import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -79,7 +78,6 @@ public abstract class Granularity implements Cacheable public static List granularitiesFinerThan(final Granularity gran0) { - final DateTime epoch = new DateTime(0); final List retVal = Lists.newArrayList(); final DateTime origin = (gran0 instanceof PeriodGranularity) ? ((PeriodGranularity) gran0).getOrigin() : null; final DateTimeZone tz = (gran0 instanceof PeriodGranularity) ? ((PeriodGranularity) gran0).getTimeZone() : null; @@ -93,21 +91,17 @@ public abstract class Granularity implements Cacheable continue; } final Granularity segmentGranularity = gran.create(origin, tz); - if (segmentGranularity.bucket(epoch).toDurationMillis() <= gran0.bucket(epoch).toDurationMillis()) { + final long segmentGranularityDurationMillis = segmentGranularity.bucket(DateTimes.EPOCH).toDurationMillis(); + final long gran0DurationMillis = gran0.bucket(DateTimes.EPOCH).toDurationMillis(); + if (segmentGranularityDurationMillis <= gran0DurationMillis) { retVal.add(segmentGranularity); } } - Collections.sort( - retVal, - new Comparator() - { - @Override - public int compare(Granularity g1, Granularity g2) - { - return Longs.compare(g2.bucket(epoch).toDurationMillis(), g1.bucket(epoch).toDurationMillis()); - } - } - ); + retVal.sort((g1, g2) -> { + long duration1 = g2.bucket(DateTimes.EPOCH).toDurationMillis(); + long duration2 = g1.bucket(DateTimes.EPOCH).toDurationMillis(); + return Longs.compare(duration1, duration2); + }); return retVal; } @@ -128,7 +122,7 @@ public abstract class Granularity implements Cacheable public DateTime toDateTime(long offset) { - return new DateTime(offset, DateTimeZone.UTC); + return DateTimes.utc(offset); } public DateTime toDate(String filePath) diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java index a74efb7ec3d..dceaa4a1998 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java @@ -23,6 +23,7 @@ import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; /** * Only to create a mapping of the granularity and all the supported file patterns @@ -159,7 +160,8 @@ public enum GranularityType dateValuePositions >= 4 ? vals[4] : 0, dateValuePositions >= 5 ? vals[5] : 0, dateValuePositions >= 6 ? vals[6] : 0, - 0 + 0, + ISOChronology.getInstanceUTC() ); } diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java index 1fff1a42bab..ba4b24d4254 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/NoneGranularity.java @@ -42,23 +42,19 @@ public class NoneGranularity extends Granularity @Override public DateTime increment(DateTime time) { - return new DateTime(time.getMillis() + 1); + return time.plus(1); } @Override public DateTime decrement(DateTime time) { - return new DateTime(time.getMillis() - 1); + return time.minus(1); } @Override public DateTime bucketStart(DateTime time) { - if (time == null) { - return null; - } - - return new DateTime(time.getMillis()); + return time; } @Override diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java index 4708e855b6a..2df9d6c98a7 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/PeriodGranularity.java @@ -27,6 +27,7 @@ import com.fasterxml.jackson.databind.JsonSerializable; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.jsontype.TypeSerializer; import com.google.common.base.Preconditions; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import org.joda.time.Chronology; @@ -37,6 +38,7 @@ import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; +import javax.annotation.Nullable; import java.io.IOException; /** @@ -84,9 +86,10 @@ public class PeriodGranularity extends Granularity implements JsonSerializable } @JsonProperty("origin") + @Nullable public DateTime getOrigin() { - return hasOrigin ? new DateTime(origin) : null; + return hasOrigin ? DateTimes.utc(origin) : null; } // Used only for Segments. Not for Queries diff --git a/server/src/main/java/io/druid/client/selector/HostSelector.java b/java-util/src/main/java/io/druid/java/util/common/jackson/JacksonUtils.java similarity index 65% rename from server/src/main/java/io/druid/client/selector/HostSelector.java rename to java-util/src/main/java/io/druid/java/util/common/jackson/JacksonUtils.java index 3afafac4aa8..4f56c06aa23 100644 --- a/server/src/main/java/io/druid/client/selector/HostSelector.java +++ b/java-util/src/main/java/io/druid/java/util/common/jackson/JacksonUtils.java @@ -17,17 +17,18 @@ * under the License. */ -package io.druid.client.selector; +package io.druid.java.util.common.jackson; -import io.druid.curator.discovery.ServerDiscoverySelector; -import io.druid.java.util.common.Pair; -import io.druid.query.Query; +import com.fasterxml.jackson.core.type.TypeReference; -/** - */ -public interface HostSelector +import java.util.Map; + +public class JacksonUtils { - public String getDefaultServiceName(); - - public Pair select(Query query); + public static final TypeReference> TYPE_REFERENCE_MAP_STRING_OBJECT = new TypeReference>() + { + }; + public static final TypeReference> TYPE_REFERENCE_MAP_STRING_STRING = new TypeReference>() + { + }; } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java index fdff821e288..427f7ba3694 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/TimestampParser.java @@ -21,6 +21,7 @@ package io.druid.java.util.common.parsers; import com.google.common.base.Function; import com.google.common.base.Preconditions; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; @@ -29,6 +30,8 @@ import org.joda.time.format.DateTimeFormatterBuilder; import org.joda.time.format.DateTimeParser; import org.joda.time.format.ISODateTimeFormat; +import java.util.concurrent.TimeUnit; + public class TimestampParser { public static Function createTimestampParser( @@ -50,7 +53,7 @@ public class TimestampParser } } - return new DateTime(Long.parseLong(input)); + return DateTimes.utc(Long.parseLong(input)); } }; } else if (format.equalsIgnoreCase("iso")) { @@ -60,7 +63,7 @@ public class TimestampParser public DateTime apply(String input) { Preconditions.checkArgument(input != null && !input.isEmpty(), "null timestamp"); - return new DateTime(ParserUtils.stripQuotes(input)); + return DateTimes.of(ParserUtils.stripQuotes(input)); } }; } else if (format.equalsIgnoreCase("posix") @@ -118,7 +121,7 @@ public class TimestampParser @Override public DateTime apply(Number input) { - return new DateTime(input.longValue() * 1000); + return DateTimes.utc(TimeUnit.SECONDS.toMillis(input.longValue())); } }; } else if (format.equalsIgnoreCase("nano")) { @@ -127,7 +130,7 @@ public class TimestampParser @Override public DateTime apply(Number input) { - return new DateTime(input.longValue() / 1000000L); + return DateTimes.utc(TimeUnit.NANOSECONDS.toMillis(input.longValue())); } }; } else { @@ -136,7 +139,7 @@ public class TimestampParser @Override public DateTime apply(Number input) { - return new DateTime(input.longValue()); + return DateTimes.utc(input.longValue()); } }; } diff --git a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java index f801215106f..1a1be9f6566 100644 --- a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java @@ -28,6 +28,7 @@ import org.joda.time.DateTimeZone; import org.joda.time.IllegalFieldValueException; import org.joda.time.Interval; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Test; @@ -51,9 +52,9 @@ public class GranularityTest public void testHiveFormat() { PathDate[] secondChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "dt=2011-03-15-20-50-43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/dt=2011-03-15-20-50-43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "valid/dt=2011-03-15-20-50-43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "dt=2011-03-15-20-50-43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "/dt=2011-03-15-20-50-43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "valid/dt=2011-03-15-20-50-43/Test1"), new PathDate(null, null, "valid/dt=2011-03-15-20-50/Test2"), new PathDate(null, null, "valid/dt=2011-03-15-20/Test3"), new PathDate(null, null, "valid/dt=2011-03-15/Test4"), @@ -75,9 +76,9 @@ public class GranularityTest public void testSecondToDate() { PathDate[] secondChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), @@ -102,10 +103,10 @@ public class GranularityTest { PathDate[] minuteChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), @@ -114,7 +115,7 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 20, 42, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 42, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), @@ -129,18 +130,18 @@ public class GranularityTest { PathDate[] minuteChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=00/Test2a"), - new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=14/Test2b"), - new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=15/Test2c"), - new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=29/Test2d"), - new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=30/Test2e"), - new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=44/Test2f"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=45/Test2g"), - new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=59/Test2h"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=00/Test2a"), + new PathDate(new DateTime(2011, 3, 15, 20, 00, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=14/Test2b"), + new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=15/Test2c"), + new PathDate(new DateTime(2011, 3, 15, 20, 15, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=29/Test2d"), + new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=30/Test2e"), + new PathDate(new DateTime(2011, 3, 15, 20, 30, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=44/Test2f"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=45/Test2g"), + new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=59/Test2h"), new PathDate(null, null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), @@ -149,7 +150,7 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 20, 30, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 30, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), @@ -163,11 +164,11 @@ public class GranularityTest public void testHourToDate() { PathDate[] hourChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), @@ -175,8 +176,8 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 20, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") @@ -189,11 +190,11 @@ public class GranularityTest public void testSixHourToDate() { PathDate[] hourChecks = { - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), new PathDate(null, null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), @@ -201,14 +202,14 @@ public class GranularityTest new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=00/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=02/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=06/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=11/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=12/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=13/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 18, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=00/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=02/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=06/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 6, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=11/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=12/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 12, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=13/M=90/S=24/Test12"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") @@ -221,21 +222,21 @@ public class GranularityTest public void testDayToDate() { PathDate[] dayChecks = { - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), - new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 15, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/Test4"), new PathDate(null, null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), new PathDate(null, null, "null/y=/m=/d=/Test7"), new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 20, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") }; @@ -247,22 +248,22 @@ public class GranularityTest public void testMonthToDate() { PathDate[] monthChecks = { - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), - new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/Test5"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 3, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/Test5"), new PathDate(null, null, "valid/y=2011/Test6"), new PathDate(null, null, "null/y=/m=/d=/Test7"), new PathDate(null, null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), - new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 10, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), new PathDate(null, IllegalFieldValueException.class, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") }; @@ -273,23 +274,23 @@ public class GranularityTest public void testYearToDate() { PathDate[] yearChecks = { - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/H=20/Test3"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/d=15/Test4"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/m=03/Test5"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "valid/y=2011/Test6"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/Test2"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/Test3"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/Test4"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/Test5"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/Test6"), new PathDate(null, null, "null/y=/m=/d=/Test7"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "null/m=10/y=2011/d=23/Test8"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "null/m=10/y=2011/d=23/Test8"), new PathDate(null, null, "null/Test9"), new PathDate(null, null, ""), //Test10 Intentionally empty. - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), - new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0), null, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=42/S=72/Test11"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=20/M=90/S=24/Test12"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=20/H=42/M=42/S=24/Test13"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=10/d=33/H=20/M=42/S=24/Test14"), + new PathDate(new DateTime(2011, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()), null, "error/y=2011/m=13/d=20/H=20/M=42/S=24/Test15") }; checkToDate(YEAR, Granularity.Formatter.DEFAULT, yearChecks); } @@ -366,54 +367,54 @@ public class GranularityTest @Test public void testBucket() { - DateTime dt = new DateTime("2011-02-03T04:05:06.100"); + DateTime dt = DateTimes.of("2011-02-03T04:05:06.100"); - Assert.assertEquals(new Interval("2011-01-01/2012-01-01"), YEAR.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-01/2011-03-01"), MONTH.bucket(dt)); - Assert.assertEquals(new Interval("2011-01-31/2011-02-07"), WEEK.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03/2011-02-04"), DAY.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03T04/2011-02-03T05"), HOUR.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03T04:05:00/2011-02-03T04:06:00"), MINUTE.bucket(dt)); - Assert.assertEquals(new Interval("2011-02-03T04:05:06/2011-02-03T04:05:07"), SECOND.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-01-01/2012-01-01"), YEAR.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-01/2011-03-01"), MONTH.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-01-31/2011-02-07"), WEEK.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03/2011-02-04"), DAY.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03T04/2011-02-03T05"), HOUR.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03T04:05:00/2011-02-03T04:06:00"), MINUTE.bucket(dt)); + Assert.assertEquals(Intervals.of("2011-02-03T04:05:06/2011-02-03T04:05:07"), SECOND.bucket(dt)); // Test with aligned DateTime - Assert.assertEquals(new Interval("2011-01-01/2011-01-02"), DAY.bucket(new DateTime("2011-01-01"))); + Assert.assertEquals(Intervals.of("2011-01-01/2011-01-02"), DAY.bucket(DateTimes.of("2011-01-01"))); } @Test public void testTruncate() throws Exception { - DateTime date = new DateTime("2011-03-15T22:42:23.898"); - Assert.assertEquals(new DateTime("2011-01-01T00:00:00.000"), YEAR.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-01T00:00:00.000"), MONTH.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-14T00:00:00.000"), WEEK.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T00:00:00.000"), DAY.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T22:00:00.000"), HOUR.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T22:42:00.000"), MINUTE.bucketStart(date)); - Assert.assertEquals(new DateTime("2011-03-15T22:42:23.000"), SECOND.bucketStart(date)); + DateTime date = DateTimes.of("2011-03-15T22:42:23.898"); + Assert.assertEquals(DateTimes.of("2011-01-01T00:00:00.000"), YEAR.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-01T00:00:00.000"), MONTH.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-14T00:00:00.000"), WEEK.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T00:00:00.000"), DAY.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T22:00:00.000"), HOUR.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T22:42:00.000"), MINUTE.bucketStart(date)); + Assert.assertEquals(DateTimes.of("2011-03-15T22:42:23.000"), SECOND.bucketStart(date)); } @Test public void testGetIterable() throws Exception { - DateTime start = new DateTime("2011-01-01T00:00:00"); - DateTime end = new DateTime("2011-01-14T00:00:00"); + DateTime start = DateTimes.of("2011-01-01T00:00:00"); + DateTime end = DateTimes.of("2011-01-14T00:00:00"); Iterator intervals = DAY.getIterable(new Interval(start, end)).iterator(); - Assert.assertEquals(new Interval("2011-01-01/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-02/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-03/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-04/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-05/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-06/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-07/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-08/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-09/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-10/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-11/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-12/P1d"), intervals.next()); - Assert.assertEquals(new Interval("2011-01-13/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-01/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-02/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-03/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-04/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-05/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-06/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-07/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-08/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-09/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-10/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-11/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-12/P1d"), intervals.next()); + Assert.assertEquals(Intervals.of("2011-01-13/P1d"), intervals.next()); try { intervals.next(); @@ -427,9 +428,9 @@ public class GranularityTest public void testCustomPeriodToDate() { PathDate[] customChecks = { - new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), - new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1") + new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()), null, "/y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), + new PathDate(new DateTime(2011, 3, 15, 20, 50, 42, 0, ISOChronology.getInstanceUTC()), null, "valid/y=2011/m=03/d=15/H=20/M=50/S=43/Test1") }; checkToDate(new PeriodGranularity(new Period("PT2S"), null, DateTimeZone.UTC), Granularity.Formatter.DEFAULT, customChecks); } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java index e2714136ff9..c2346763ea8 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/ComparatorsTest.java @@ -19,6 +19,7 @@ package io.druid.java.util.common.guava; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -63,32 +64,32 @@ public class ComparatorsTest { Comparator comp = Comparators.intervalsByStartThenEnd(); - Assert.assertEquals(0, comp.compare(new Interval("P1d/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-03-31/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-02"), new Interval("2011-03-31/2011-04-02"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-04"))); + Assert.assertEquals(0, comp.compare(Intervals.of("P1d/2011-04-02"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-03-31/2011-04-02"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-02"), Intervals.of("2011-03-31/2011-04-02"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-01/2011-04-04"))); Interval[] intervals = new Interval[]{ - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-02/2011-04-04"), - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06") + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-02/2011-04-04"), + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06") }; Arrays.sort(intervals, comp); Assert.assertArrayEquals( new Interval[]{ - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06"), - new Interval("2011-04-02/2011-04-04"), + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06"), + Intervals.of("2011-04-02/2011-04-04"), }, intervals ); @@ -99,32 +100,32 @@ public class ComparatorsTest { Comparator comp = Comparators.intervalsByEndThenStart(); - Assert.assertEquals(0, comp.compare(new Interval("P1d/2011-04-02"), new Interval("2011-04-01/2011-04-02"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-01/2011-04-04"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-02"), new Interval("2011-04-01/2011-04-01"))); - Assert.assertEquals(-1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-04-02/2011-04-03"))); - Assert.assertEquals(1, comp.compare(new Interval("2011-04-01/2011-04-03"), new Interval("2011-03-31/2011-04-03"))); + Assert.assertEquals(0, comp.compare(Intervals.of("P1d/2011-04-02"), Intervals.of("2011-04-01/2011-04-02"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-01/2011-04-04"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-02"), Intervals.of("2011-04-01/2011-04-01"))); + Assert.assertEquals(-1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-04-02/2011-04-03"))); + Assert.assertEquals(1, comp.compare(Intervals.of("2011-04-01/2011-04-03"), Intervals.of("2011-03-31/2011-04-03"))); Interval[] intervals = new Interval[]{ - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-02/2011-04-04"), - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06") + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-02/2011-04-04"), + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06") }; Arrays.sort(intervals, comp); Assert.assertArrayEquals( new Interval[]{ - new Interval("2011-04-01/2011-04-02"), - new Interval("2011-04-01T18/2011-04-02T13"), - new Interval("2011-04-01/2011-04-03"), - new Interval("2011-04-02/2011-04-03"), - new Interval("2011-04-02/2011-04-03T06"), - new Interval("2011-04-01/2011-04-04"), - new Interval("2011-04-02/2011-04-04") + Intervals.of("2011-04-01/2011-04-02"), + Intervals.of("2011-04-01T18/2011-04-02T13"), + Intervals.of("2011-04-01/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03"), + Intervals.of("2011-04-02/2011-04-03T06"), + Intervals.of("2011-04-01/2011-04-04"), + Intervals.of("2011-04-02/2011-04-04") }, intervals ); diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java index 35ce86e1c9d..2c0ae1eea24 100644 --- a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java @@ -20,7 +20,9 @@ package io.druid.java.util.common.parsers; import com.google.common.base.Function; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -42,15 +44,15 @@ public class TimestampParserTest public void testAuto() throws Exception { final Function parser = TimestampParser.createObjectTimestampParser("auto"); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13T23:31:30Z")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13T23:31:30-08:00")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30Z")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13 23:31:30-08:00")); - Assert.assertEquals(new DateTime("2009-02-13T00:00:00Z"), parser.apply("2009-02-13")); - Assert.assertEquals(new DateTime("2009-02-13T00:00:00Z"), parser.apply("\"2009-02-13\"")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30")); - Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply(1234567890000L)); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("2009-02-13T23:31:30Z")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13T23:31:30-08:00")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30Z")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30-08:00"), parser.apply("2009-02-13 23:31:30-08:00")); + Assert.assertEquals(DateTimes.of("2009-02-13T00:00:00Z"), parser.apply("2009-02-13")); + Assert.assertEquals(DateTimes.of("2009-02-13T00:00:00Z"), parser.apply("\"2009-02-13\"")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply("2009-02-13 23:31:30")); + Assert.assertEquals(DateTimes.of("2009-02-13T23:31:30Z"), parser.apply(1234567890000L)); } @Test @@ -75,116 +77,88 @@ public class TimestampParserTest public void testRuby() throws Exception { final Function parser = TimestampParser.createObjectTimestampParser("ruby"); - Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); - Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply(1358347307.435447D)); + Assert.assertEquals(DateTimes.of("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); + Assert.assertEquals(DateTimes.of("2013-01-16T15:41:47+01:00"), parser.apply(1358347307.435447D)); } @Test public void testNano() throws Exception { String timeNsStr = "1427504794977098494"; - DateTime expectedDt = new DateTime("2015-3-28T01:06:34.977Z"); + DateTime expectedDt = DateTimes.of("2015-3-28T01:06:34.977Z"); final Function parser = TimestampParser.createObjectTimestampParser("nano"); Assert.assertEquals("Incorrect truncation of nanoseconds -> milliseconds", expectedDt, parser.apply(timeNsStr)); // Confirm sub-millisecond timestamps are handled correctly - expectedDt = new DateTime("1970-1-1T00:00:00.000Z"); + expectedDt = DateTimes.of("1970-1-1T00:00:00.000Z"); Assert.assertEquals(expectedDt, parser.apply("999999")); Assert.assertEquals(expectedDt, parser.apply("0")); Assert.assertEquals(expectedDt, parser.apply("0000")); Assert.assertEquals(expectedDt, parser.apply(999999L)); } - /*Commenting out until Joda 2.1 supported @Test - public void testTimeStampParserWithQuotes() throws Exception { + public void testTimeStampParserWithQuotes() throws Exception + { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); + Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); Assert.assertEquals(d.getMillis(), parser.apply(" \" Wed Nov 9 04:00:00 PST 1994 \" ").getMillis()); } @Test - public void testTimeStampParserWithShortTimeZone() throws Exception { + public void testTimeStampParserWithShortTimeZone() throws Exception + { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); + Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy"); Assert.assertEquals(d.getMillis(), parser.apply("Wed Nov 9 04:00:00 PST 1994").getMillis()); } @Test - public void testTimeStampParserWithLongTimeZone() throws Exception { + public void testTimeStampParserWithLongTimeZone() throws Exception + { long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); long millis2 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-6)).getMillis(); - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ z yyyy"); + Function parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss zZ z yyyy"); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 PST 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 CST 1994").getMillis()); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 PST 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 CST 1994").getMillis()); - parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ yyyy"); + parser = TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss zZ yyyy"); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 1994").getMillis()); Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 1994").getMillis()); Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 1994").getMillis()); - - parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ Q yyyy"); - Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 (PST) 1994").getMillis()); - Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 (CST) 1994").getMillis()); - Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 UTC-0800 (PST) 1994").getMillis()); - Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 UTC-0600 (CST) 1994").getMillis()); - } @Test - public void testTimeZoneAtExtremeLocations() throws Exception { - Function parser = ParserUtils.createTimestampParser("EEE MMM dd yy HH:mm:ss zZ z"); + public void testTimeZoneAtExtremeLocations() throws Exception + { + Function parser = TimestampParser.createTimestampParser("EEE MMM dd yy HH:mm:ss zZ z"); Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), parser.apply("Sat Jan 22 05 13:00:00 GMT-0600 CST").getMillis()); - parser = ParserUtils.createTimestampParser("zZ z EEE MMM dd yy HH:mm:ss"); + parser = TimestampParser.createTimestampParser("zZ z EEE MMM dd yy HH:mm:ss"); Assert.assertEquals(new DateTime(2005, 1, 22, 13, 0, DateTimeZone.forOffsetHours(-6)).getMillis(), parser.apply("GMT-0600 CST Sat Jan 22 05 13:00:00").getMillis()); } - */ - - /** - * This test case checks a potentially fragile behavior - * Some timestamps will come to us in the form of GMT-OFFSET (Time Zone Abbreviation) - * The number of time zone abbreviations is long and what they mean can change - * If the offset is explicitly provided via GMT-OFFSET, we want Joda to use this instead - * of the time zone abbreviation - * @throws Exception - */ - /*@Test - public void testOffsetPriority() throws Exception { - long millis1 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)).getMillis(); - long millis2 = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-6)).getMillis(); - - Function parser = ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss zZ Q yyyy"); - - //Test timestamps that have an incorrect time zone abbreviation for the GMT offset. - //Joda should use the offset and not use the time zone abbreviation - Assert.assertEquals(millis1, parser.apply("Wed Nov 9 04:00:00 GMT-0800 (ADT) 1994").getMillis()); - Assert.assertEquals(millis2, parser.apply("Wed Nov 9 04:00:00 GMT-0600 (MDT) 1994").getMillis()); - } @Test - public void testJodaSymbolInsideLiteral() throws Exception { + public void testJodaSymbolInsideLiteral() throws Exception + { DateTime d = new DateTime(1994, 11, 9, 4, 0, DateTimeZone.forOffsetHours(-8)); Assert.assertEquals(d.getMillis(), - ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy 'helloz'") + TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss z yyyy 'helloz'") .apply("Wed Nov 9 04:00:00 PST 1994 helloz") .getMillis() ); Assert.assertEquals(d.getMillis(), - ParserUtils.createTimestampParser("EEE MMM dd HH:mm:ss 'helloz' z yyyy 'hello'") + TimestampParser.createTimestampParser("EEE MMM dd HH:mm:ss 'helloz' z yyyy 'hello'") .apply("Wed Nov 9 04:00:00 helloz PST 1994 hello") .getMillis() ); - }*/ - - - + } } diff --git a/pom.xml b/pom.xml index ea830e23aa0..e04d669e421 100644 --- a/pom.xml +++ b/pom.xml @@ -58,6 +58,8 @@ + 1.8 + 2.11.0 1.9.0 @@ -908,6 +910,46 @@ + + de.thetaphi + forbiddenapis + 2.3 + + + + jdk-unsafe + + + ${session.executionRootDirectory}/codestyle/joda-time-forbidden-apis.txt + + + + + validate + validate + + check + + + + + jdk-unsafe + jdk-system-out + + + + + testValidate + validate + + testCheck + + + + org.codehaus.mojo animal-sniffer-maven-plugin @@ -1094,8 +1136,8 @@ org.apache.maven.plugins maven-compiler-plugin - 1.8 - 1.8 + ${maven.compiler.target} + ${maven.compiler.target} @@ -1116,8 +1158,8 @@ true 1024m 3000m - 1.8 - 1.8 + ${maven.compiler.target} + ${maven.compiler.target} false -XepDisableWarningsInGeneratedCode @@ -1196,7 +1238,7 @@ false - -Xmx1024m -Duser.language=en -Duser.country=US -Dfile.encoding=UTF-8 + -Xmx768m -Duser.language=en -Duser.country=US -Dfile.encoding=UTF-8 -Duser.timezone=UTC -Djava.util.logging.manager=org.apache.logging.log4j.jul.LogManager diff --git a/processing/src/main/java/io/druid/jackson/JodaStuff.java b/processing/src/main/java/io/druid/jackson/JodaStuff.java index 380e9ef1a82..43e569ab9e7 100644 --- a/processing/src/main/java/io/druid/jackson/JodaStuff.java +++ b/processing/src/main/java/io/druid/jackson/JodaStuff.java @@ -30,6 +30,8 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; import com.fasterxml.jackson.datatype.joda.deser.DurationDeserializer; import com.fasterxml.jackson.datatype.joda.deser.PeriodDeserializer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Interval; @@ -72,7 +74,7 @@ class JodaStuff public Interval deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, JsonProcessingException { - return new Interval(jsonParser.getText()); + return Intervals.of(jsonParser.getText()); } } @@ -81,7 +83,7 @@ class JodaStuff @Override public Object deserializeKey(String key, DeserializationContext ctxt) throws IOException, JsonProcessingException { - return new DateTime(key); + return DateTimes.of(key); } } @@ -98,7 +100,7 @@ class JodaStuff { JsonToken t = jp.getCurrentToken(); if (t == JsonToken.VALUE_NUMBER_INT) { - return new DateTime(jp.getLongValue()); + return DateTimes.utc(jp.getLongValue()); } if (t == JsonToken.VALUE_STRING) { String str = jp.getText().trim(); diff --git a/processing/src/main/java/io/druid/query/BaseQuery.java b/processing/src/main/java/io/druid/query/BaseQuery.java index 8869508807f..a7cd66d1155 100644 --- a/processing/src/main/java/io/druid/query/BaseQuery.java +++ b/processing/src/main/java/io/druid/query/BaseQuery.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.Duration; import org.joda.time.Interval; @@ -33,6 +34,7 @@ import java.util.Map; /** */ +@ExtensionPoint public abstract class BaseQuery> implements Query { public static void checkInterrupted() diff --git a/processing/src/main/java/io/druid/query/BySegmentResultValue.java b/processing/src/main/java/io/druid/query/BySegmentResultValue.java index a8758d44339..6259a3401f3 100644 --- a/processing/src/main/java/io/druid/query/BySegmentResultValue.java +++ b/processing/src/main/java/io/druid/query/BySegmentResultValue.java @@ -19,12 +19,14 @@ package io.druid.query; +import io.druid.guice.annotations.PublicApi; import org.joda.time.Interval; import java.util.List; /** */ +@PublicApi public interface BySegmentResultValue { public List getResults(); diff --git a/processing/src/main/java/io/druid/query/CacheStrategy.java b/processing/src/main/java/io/druid/query/CacheStrategy.java index 8e14947b406..95681dd1b06 100644 --- a/processing/src/main/java/io/druid/query/CacheStrategy.java +++ b/processing/src/main/java/io/druid/query/CacheStrategy.java @@ -21,11 +21,13 @@ package io.druid.query; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Function; +import io.druid.guice.annotations.ExtensionPoint; import java.util.concurrent.ExecutorService; /** */ +@ExtensionPoint public interface CacheStrategy> { /** diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index 0c942a7a3cc..1d074d4efb9 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; @@ -894,7 +895,7 @@ public class Druids public ResultBuilder() { - timestamp = new DateTime(0); + timestamp = DateTimes.EPOCH; value = null; } diff --git a/processing/src/main/java/io/druid/query/GenericQueryMetricsFactory.java b/processing/src/main/java/io/druid/query/GenericQueryMetricsFactory.java index 5882d9c7922..6fd1e79174f 100644 --- a/processing/src/main/java/io/druid/query/GenericQueryMetricsFactory.java +++ b/processing/src/main/java/io/druid/query/GenericQueryMetricsFactory.java @@ -19,6 +19,8 @@ package io.druid.query; +import io.druid.guice.annotations.PublicApi; + /** * This factory is used for DI of custom {@link QueryMetrics} implementations for all query types, which don't (yet) * need to emit custom dimensions and/or metrics, i. e. they are good with the generic {@link QueryMetrics} interface. @@ -32,7 +34,11 @@ package io.druid.query; * * And then setting property: * druid.query.generic.queryMetricsFactory=myCustomGenericQueryMetricsFactory + * + * Unlike {@link QueryMetrics} itself, this interface is considered stable and is expected to be injected into custom + * Query extensions that do not want to worry about the potential instability of {@link QueryMetrics}. */ +@PublicApi public interface GenericQueryMetricsFactory { /** diff --git a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java index f4d3467fb31..26422510540 100644 --- a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java +++ b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunner.java @@ -22,6 +22,7 @@ package io.druid.query; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.guava.Sequence; @@ -41,8 +42,6 @@ import java.util.concurrent.ExecutorService; */ public class IntervalChunkingQueryRunner implements QueryRunner { - private static final DateTime EPOCH = new DateTime(0L); - private final QueryRunner baseRunner; private final QueryToolChest> toolChest; @@ -68,7 +67,7 @@ public class IntervalChunkingQueryRunner implements QueryRunner final Period chunkPeriod = getChunkPeriod(queryPlus.getQuery()); // Check for non-empty chunkPeriod, avoiding toStandardDuration since that cannot handle periods like P1M. - if (EPOCH.plus(chunkPeriod).getMillis() == EPOCH.getMillis()) { + if (DateTimes.EPOCH.plus(chunkPeriod).getMillis() == DateTimes.EPOCH.getMillis()) { return baseRunner.run(queryPlus, responseContext); } @@ -124,7 +123,7 @@ public class IntervalChunkingQueryRunner implements QueryRunner ); } - private Iterable splitInterval(Interval interval, Period period) + private static Iterable splitInterval(Interval interval, Period period) { if (interval.getEndMillis() == interval.getStartMillis()) { return Lists.newArrayList(interval); @@ -133,15 +132,15 @@ public class IntervalChunkingQueryRunner implements QueryRunner List intervals = Lists.newArrayList(); Iterator timestamps = new PeriodGranularity(period, null, null).getIterable(interval).iterator(); - long start = Math.max(timestamps.next().getStartMillis(), interval.getStartMillis()); + DateTime start = DateTimes.max(timestamps.next().getStart(), interval.getStart()); while (timestamps.hasNext()) { - long end = timestamps.next().getStartMillis(); + DateTime end = timestamps.next().getStart(); intervals.add(new Interval(start, end)); start = end; } - if (start < interval.getEndMillis()) { - intervals.add(new Interval(start, interval.getEndMillis())); + if (start.compareTo(interval.getEnd()) < 0) { + intervals.add(new Interval(start, interval.getEnd())); } return intervals; diff --git a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunnerDecorator.java b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunnerDecorator.java index 9a2df39a365..5f7e10c5bcc 100644 --- a/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunnerDecorator.java +++ b/processing/src/main/java/io/druid/query/IntervalChunkingQueryRunnerDecorator.java @@ -22,6 +22,7 @@ package io.druid.query; import com.google.inject.Inject; import com.metamx.emitter.service.ServiceEmitter; import io.druid.guice.annotations.Processing; +import io.druid.guice.annotations.PublicApi; import java.util.concurrent.ExecutorService; @@ -40,6 +41,7 @@ public class IntervalChunkingQueryRunnerDecorator this.emitter = emitter; } + @PublicApi public QueryRunner decorate(QueryRunner delegate, QueryToolChest> toolChest) { return new IntervalChunkingQueryRunner(delegate, (QueryToolChest>) toolChest, diff --git a/processing/src/main/java/io/druid/query/Queries.java b/processing/src/main/java/io/druid/query/Queries.java index 519ef2acc65..b372407d50d 100644 --- a/processing/src/main/java/io/druid/query/Queries.java +++ b/processing/src/main/java/io/druid/query/Queries.java @@ -22,6 +22,7 @@ package io.druid.query; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import io.druid.guice.annotations.PublicApi; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; @@ -34,6 +35,7 @@ import java.util.Set; /** */ +@PublicApi public class Queries { public static List decoratePostAggregators( diff --git a/processing/src/main/java/io/druid/query/Query.java b/processing/src/main/java/io/druid/query/Query.java index e8c62962f53..9e0d8a0f658 100644 --- a/processing/src/main/java/io/druid/query/Query.java +++ b/processing/src/main/java/io/druid/query/Query.java @@ -22,6 +22,7 @@ package io.druid.query; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.google.common.collect.Ordering; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.query.datasourcemetadata.DataSourceMetadataQuery; import io.druid.query.filter.DimFilter; import io.druid.query.groupby.GroupByQuery; @@ -38,6 +39,7 @@ import org.joda.time.Interval; import java.util.List; import java.util.Map; +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "queryType") @JsonSubTypes(value = { @JsonSubTypes.Type(name = Query.TIMESERIES, value = TimeseriesQuery.class), diff --git a/processing/src/main/java/io/druid/query/QueryContexts.java b/processing/src/main/java/io/druid/query/QueryContexts.java index 2e9dfdd9b68..b56812d8b0e 100644 --- a/processing/src/main/java/io/druid/query/QueryContexts.java +++ b/processing/src/main/java/io/druid/query/QueryContexts.java @@ -21,9 +21,11 @@ package io.druid.query; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +@PublicApi public class QueryContexts { public static final String PRIORITY_KEY = "priority"; diff --git a/processing/src/main/java/io/druid/query/QueryMetrics.java b/processing/src/main/java/io/druid/query/QueryMetrics.java index 86c29fe8b72..7eab88c5af1 100644 --- a/processing/src/main/java/io/druid/query/QueryMetrics.java +++ b/processing/src/main/java/io/druid/query/QueryMetrics.java @@ -78,6 +78,9 @@ import java.util.List; * dimension or metric is useful and not very expensive to process and store then emit, skip (see above Goals, 1.) * otherwise. * + *

This interface can be extended, but is not marked as an {@code ExtensionPoint}, because it may change in breaking + * ways even in minor releases. + * *

If implementors of custom QueryMetrics don't want to fix builds on every Druid release (e. g. if they want to add * a single dimension to emitted events and don't want to alter other dimensions and emitted metrics), they could * inherit their custom QueryMetrics from {@link DefaultQueryMetrics} or query-specific default implementation class, diff --git a/processing/src/main/java/io/druid/query/QueryPlus.java b/processing/src/main/java/io/druid/query/QueryPlus.java index c6cfe6f89f8..63d2e8abe87 100644 --- a/processing/src/main/java/io/druid/query/QueryPlus.java +++ b/processing/src/main/java/io/druid/query/QueryPlus.java @@ -20,6 +20,7 @@ package io.druid.query; import com.google.common.base.Preconditions; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.guava.Sequence; import io.druid.query.spec.QuerySegmentSpec; @@ -30,6 +31,7 @@ import java.util.Map; * An immutable composite object of {@link Query} + extra stuff needed in {@link QueryRunner}s. This "extra stuff" * is only {@link QueryMetrics} yet. */ +@PublicApi public final class QueryPlus { /** diff --git a/processing/src/main/java/io/druid/query/QueryRunner.java b/processing/src/main/java/io/druid/query/QueryRunner.java index cf9366451e0..b9cbf5244f6 100644 --- a/processing/src/main/java/io/druid/query/QueryRunner.java +++ b/processing/src/main/java/io/druid/query/QueryRunner.java @@ -19,10 +19,12 @@ package io.druid.query; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.guava.Sequence; import java.util.Map; +@ExtensionPoint public interface QueryRunner { /** diff --git a/processing/src/main/java/io/druid/query/QueryRunnerFactory.java b/processing/src/main/java/io/druid/query/QueryRunnerFactory.java index 3eb2eae4e0a..cc1419be978 100644 --- a/processing/src/main/java/io/druid/query/QueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/QueryRunnerFactory.java @@ -19,6 +19,7 @@ package io.druid.query; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.segment.Segment; import java.util.concurrent.ExecutorService; @@ -26,6 +27,7 @@ import java.util.concurrent.ExecutorService; /** * An interface that defines the nitty gritty implementation detauls of a Query on a Segment */ +@ExtensionPoint public interface QueryRunnerFactory> { /** diff --git a/processing/src/main/java/io/druid/query/QueryToolChest.java b/processing/src/main/java/io/druid/query/QueryToolChest.java index c41bb86c438..9cb78c5d98c 100644 --- a/processing/src/main/java/io/druid/query/QueryToolChest.java +++ b/processing/src/main/java/io/druid/query/QueryToolChest.java @@ -21,6 +21,7 @@ package io.druid.query; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Function; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.timeline.LogicalSegment; @@ -28,10 +29,9 @@ import javax.annotation.Nullable; import java.util.List; /** - * The broker-side (also used by server in some cases) API for a specific Query type. This API is still undergoing - * evolution and is only semi-stable, so proprietary Query implementations should be ready for the potential - * maintenance burden when upgrading versions. + * The broker-side (also used by server in some cases) API for a specific Query type. */ +@ExtensionPoint public abstract class QueryToolChest> { /** diff --git a/processing/src/main/java/io/druid/query/Result.java b/processing/src/main/java/io/druid/query/Result.java index 0a23ad625f4..10afea9c23b 100644 --- a/processing/src/main/java/io/druid/query/Result.java +++ b/processing/src/main/java/io/druid/query/Result.java @@ -21,12 +21,14 @@ package io.druid.query; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.guice.annotations.PublicApi; import org.joda.time.DateTime; import java.util.function.Function; /** */ +@PublicApi public class Result implements Comparable> { public static String MISSING_SEGMENTS_KEY = "missingSegments"; diff --git a/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java b/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java index 98953a5687c..8bf6c9e8fed 100644 --- a/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java +++ b/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java @@ -21,12 +21,14 @@ package io.druid.query; import com.google.common.collect.Ordering; import com.google.common.primitives.Longs; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.granularity.Granularity; import java.util.Comparator; /** */ +@PublicApi public class ResultGranularTimestampComparator implements Comparator> { private final Granularity gran; diff --git a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java index 04c6e550dd7..7840a7de5d5 100644 --- a/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ResultMergeQueryRunner.java @@ -21,6 +21,7 @@ package io.druid.query; import com.google.common.collect.Ordering; import io.druid.common.guava.CombiningSequence; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.nary.BinaryFn; @@ -28,6 +29,7 @@ import java.util.Map; /** */ +@PublicApi public abstract class ResultMergeQueryRunner extends BySegmentSkippingQueryRunner { public ResultMergeQueryRunner( diff --git a/processing/src/main/java/io/druid/query/TimewarpOperator.java b/processing/src/main/java/io/druid/query/TimewarpOperator.java index 5f45a76192b..64ed0cdd9af 100644 --- a/processing/src/main/java/io/druid/query/TimewarpOperator.java +++ b/processing/src/main/java/io/druid/query/TimewarpOperator.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import io.druid.data.input.MapBasedRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.spec.MultipleIntervalSegmentSpec; @@ -69,7 +70,7 @@ public class TimewarpOperator implements PostProcessingOperator @Override public QueryRunner postProcess(QueryRunner baseQueryRunner) { - return postProcess(baseQueryRunner, DateTime.now().getMillis()); + return postProcess(baseQueryRunner, DateTimes.nowUtc().getMillis()); } public QueryRunner postProcess(final QueryRunner baseRunner, final long now) @@ -84,7 +85,8 @@ public class TimewarpOperator implements PostProcessingOperator final Interval interval = queryPlus.getQuery().getIntervals().get(0); final Interval modifiedInterval = new Interval( Math.min(interval.getStartMillis() + offset, now + offset), - Math.min(interval.getEndMillis() + offset, now + offset) + Math.min(interval.getEndMillis() + offset, now + offset), + interval.getChronology() ); return Sequences.map( baseRunner.run( @@ -113,9 +115,9 @@ public class TimewarpOperator implements PostProcessingOperator final DateTime maxTime = boundary.getMaxTime(); return (T) ((TimeBoundaryQuery) queryPlus.getQuery()).buildResult( - new DateTime(Math.min(res.getTimestamp().getMillis() - offset, now)), + DateTimes.utc(Math.min(res.getTimestamp().getMillis() - offset, now)), minTime != null ? minTime.minus(offset) : null, - maxTime != null ? new DateTime(Math.min(maxTime.getMillis() - offset, now)) : null + maxTime != null ? DateTimes.utc(Math.min(maxTime.getMillis() - offset, now)) : null ).iterator().next(); } return (T) new Result(res.getTimestamp().minus(offset), value); diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java new file mode 100644 index 00000000000..e873c82ed94 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/AggregateCombiner.java @@ -0,0 +1,69 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import io.druid.segment.ColumnValueSelector; + +/** + * AggregateCombiner is used to fold rollup aggregation results from serveral "rows" of different indexes during index + * merging (see {@link io.druid.segment.IndexMerger}). + * + * The state of the implementations of this interface is an aggregation value (either a primitive or an object), that + * could be queried via {@link ColumnValueSelector}'s methods. Before {@link #reset} is ever called on an + * AggregateCombiner, it's state is undefined and {@link ColumnValueSelector}'s methods could return something random, + * or null, or throw an exception. + * + * This interface would probably better be called "AggregateFolder", but somebody may confuse it with "folder" as + * "directory" synonym. + * + * @see AggregatorFactory#makeAggregateCombiner() + * @see LongAggregateCombiner + * @see DoubleAggregateCombiner + * @see ObjectAggregateCombiner + */ +public interface AggregateCombiner extends ColumnValueSelector +{ + /** + * Resets this AggregateCombiner's state value to the value of the given selector, e. g. after calling this method + * combiner.get*() should return the same value as selector.get*(). + * + * If the selector is an {@link io.druid.segment.ObjectColumnSelector}, the object returned from {@link + * io.druid.segment.ObjectColumnSelector#get()} must not be modified, and must not become a subject for modification + * during subsequent {@link #fold} calls. + */ + void reset(ColumnValueSelector selector); + + /** + * Folds this AggregateCombiner's state value with the value of the given selector and saves it in this + * AggregateCombiner's state, e. g. after calling combiner.fold(selector), combiner.get*() should return the value + * that would be the result of {@link AggregatorFactory#combine + * aggregatorFactory.combine(combiner.get*(), selector.get*())} call. + * + * Unlike {@link AggregatorFactory#combine}, if the selector is an {@link io.druid.segment.ObjectColumnSelector}, the + * object returned from {@link io.druid.segment.ObjectColumnSelector#get()} must not be modified, and must not become + * a subject for modification during subsequent fold() calls. + * + * Since the state of AggregateCombiner is undefined before {@link #reset} is ever called on it, the effects of + * calling fold() are also undefined in this case. + * + * @see AggregatorFactory#combine + */ + void fold(ColumnValueSelector selector); +} diff --git a/processing/src/main/java/io/druid/query/aggregation/Aggregator.java b/processing/src/main/java/io/druid/query/aggregation/Aggregator.java index fb4689fe009..d7fba1e3c5c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/Aggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/Aggregator.java @@ -19,6 +19,8 @@ package io.druid.query.aggregation; +import io.druid.guice.annotations.ExtensionPoint; + import java.io.Closeable; /** @@ -27,13 +29,10 @@ import java.io.Closeable; * it can use to get at the next bit of data. * * Thus, an Aggregator can be thought of as a closure over some other thing that is stateful and changes between calls - * to aggregate(). This is currently (as of this documentation) implemented through the use of Offset and - * FloatColumnSelector objects. The Aggregator has a handle on a FloatColumnSelector object which has a handle on an Offset. - * QueryableIndex has both the Aggregators and the Offset object and iterates through the Offset calling the aggregate() - * method on the Aggregators for each applicable row. - * - * This interface is old and going away. It is being replaced by BufferAggregator + * to aggregate(). This is currently (as of this documentation) implemented through the use of {@link + * io.druid.segment.ColumnValueSelector} objects. */ +@ExtensionPoint public interface Aggregator extends Closeable { void aggregate(); diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java index 388ad911427..f4e67c34e26 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java @@ -19,26 +19,24 @@ package io.druid.query.aggregation; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.Cacheable; import io.druid.java.util.common.UOE; import io.druid.java.util.common.logger.Logger; import io.druid.segment.ColumnSelectorFactory; +import javax.annotation.Nullable; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** - * Processing related interface - * - * An AggregatorFactory is an object that knows how to generate an Aggregator using a ColumnSelectorFactory. - * - * This is useful as an abstraction to allow Aggregator classes to be written in terms of MetricSelector objects - * without making any assumptions about how they are pulling values out of the base data. That is, the data is - * provided to the Aggregator through the MetricSelector object, so whatever creates that object gets to choose how - * the data is actually stored and accessed. + * AggregatorFactory is a strategy (in the terms of Design Patterns) that represents column aggregation, e. g. min, + * max, sum of metric columns, or cardinality of dimension columns (see {@link + * io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory}). */ +@ExtensionPoint public abstract class AggregatorFactory implements Cacheable { private static final Logger log = new Logger(AggregatorFactory.class); @@ -50,10 +48,10 @@ public abstract class AggregatorFactory implements Cacheable public abstract Comparator getComparator(); /** - * A method that knows how to combine the outputs of the getIntermediate() method from the Aggregators - * produced via factorize(). Note, even though this is called combine, this method's contract *does* - * allow for mutation of the input objects. Thus, any use of lhs or rhs after calling this method is - * highly discouraged. + * A method that knows how to combine the outputs of {@link Aggregator#get} produced via {@link #factorize} or {@link + * BufferAggregator#get} produced via {@link #factorizeBuffered}. Note, even though this method is called "combine", + * this method's contract *does* allow for mutation of the input objects. Thus, any use of lhs or rhs after calling + * this method is highly discouraged. * * @param lhs The left hand side of the combine * @param rhs The right hand side of the combine @@ -62,6 +60,20 @@ public abstract class AggregatorFactory implements Cacheable */ public abstract Object combine(Object lhs, Object rhs); + /** + * Creates an AggregateCombiner to fold rollup aggregation results from serveral "rows" of different indexes during + * index merging. AggregateCombiner implements the same logic as {@link #combine}, with the difference that it uses + * {@link io.druid.segment.ColumnValueSelector} and it's subinterfaces to get inputs and implements {@code + * ColumnValueSelector} to provide output. + * + * @see AggregateCombiner + * @see io.druid.segment.IndexMerger + */ + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("[%s] does not implement makeAggregateCombiner()", this.getClass().getName()); + } + /** * Returns an AggregatorFactory that can be used to combine the output of aggregators from this factory. This * generally amounts to simply creating a new factory that is the same as the current except with its input @@ -134,6 +146,7 @@ public abstract class AggregatorFactory implements Cacheable * * @return merged AggregatorFactory[] or Null if merging is not possible. */ + @Nullable public static AggregatorFactory[] mergeAggregators(List aggregatorsList) { if (aggregatorsList == null || aggregatorsList.isEmpty()) { diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java index 399880188ee..d8adbe3182a 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorUtil.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation; import com.google.common.collect.Lists; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.Pair; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.Parser; @@ -34,6 +35,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Set; +@PublicApi public class AggregatorUtil { public static final byte STRING_SEPARATOR = (byte) 0xFF; diff --git a/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java index 9dc9acb3267..cdddd7629f2 100644 --- a/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/BufferAggregator.java @@ -19,6 +19,7 @@ package io.druid.query.aggregation; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.query.monomorphicprocessing.CalledFromHotLoop; import io.druid.query.monomorphicprocessing.HotLoopCallee; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; @@ -33,6 +34,7 @@ import java.nio.ByteBuffer; * Thus, an Aggregator can be thought of as a closure over some other thing that is stateful and changes between calls * to aggregate(...). */ +@ExtensionPoint public interface BufferAggregator extends HotLoopCallee { /** diff --git a/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java index ba7677cbcd8..1e1be24b2e0 100644 --- a/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/CountAggregatorFactory.java @@ -70,6 +70,12 @@ public class CountAggregatorFactory extends AggregatorFactory return CountAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new LongSumAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/segment/data/ArrayBasedOffset.java b/processing/src/main/java/io/druid/query/aggregation/DoubleAggregateCombiner.java similarity index 56% rename from processing/src/main/java/io/druid/segment/data/ArrayBasedOffset.java rename to processing/src/main/java/io/druid/query/aggregation/DoubleAggregateCombiner.java index 90ce1da5dbe..821a070e1b2 100644 --- a/processing/src/main/java/io/druid/segment/data/ArrayBasedOffset.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleAggregateCombiner.java @@ -17,62 +17,19 @@ * under the License. */ -package io.druid.segment.data; +package io.druid.query.aggregation; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.DoubleColumnSelector; /** + * Specialization of {@link AggregateCombiner} for primitive double aggregations. */ -public class ArrayBasedOffset extends Offset +public abstract class DoubleAggregateCombiner implements AggregateCombiner, DoubleColumnSelector { - private final int[] ints; - private int currIndex; - - public ArrayBasedOffset( - int[] ints - ) - { - this(ints, 0); - } - - public ArrayBasedOffset( - int[] ints, - int startIndex - ) - { - this.ints = ints; - this.currIndex = startIndex; - } - - @Override - public int getOffset() - { - return ints[currIndex]; - } - - @Override - public void increment() - { - ++currIndex; - } - - @Override - public boolean withinBounds() - { - return currIndex < ints.length; - } - - @Override - public Offset clone() - { - final ArrayBasedOffset retVal = new ArrayBasedOffset(ints); - retVal.currIndex = currIndex; - return retVal; - } - @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { - // nothing to inspect + // Usually AggregateCombiner has nothing to inspect } } diff --git a/processing/src/main/java/io/druid/segment/historical/OffsetHolder.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregateCombiner.java similarity index 60% rename from processing/src/main/java/io/druid/segment/historical/OffsetHolder.java rename to processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregateCombiner.java index 9d773dc5370..4aa4845e1d5 100644 --- a/processing/src/main/java/io/druid/segment/historical/OffsetHolder.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregateCombiner.java @@ -17,19 +17,29 @@ * under the License. */ -package io.druid.segment.historical; +package io.druid.query.aggregation; -import io.druid.segment.data.Offset; -import io.druid.segment.data.ReadableOffset; +import io.druid.segment.ColumnValueSelector; -public interface OffsetHolder +final class DoubleMaxAggregateCombiner extends DoubleAggregateCombiner { - Offset getOffset(); + private double max; - /** - * Should return the same, or a "view" of the same offset as {@link #getOffset()}. The difference is that smaller - * interface allows to return unwrapped underlying offset sometimes, e. g. {@link - * io.druid.segment.FilteredOffset#baseOffset}, instead of the wrapper {@link io.druid.segment.FilteredOffset}. - */ - ReadableOffset getReadableOffset(); + @Override + public void reset(ColumnValueSelector selector) + { + max = selector.getDouble(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + max = Math.max(max, selector.getDouble()); + } + + @Override + public double getDouble() + { + return max; + } } diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregator.java index 6b4f384d51c..dcf4eadc61d 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregator.java @@ -21,14 +21,10 @@ package io.druid.query.aggregation; import io.druid.segment.DoubleColumnSelector; -import java.util.Comparator; - /** */ public class DoubleMaxAggregator implements Aggregator { - static final Comparator COMPARATOR = DoubleSumAggregator.COMPARATOR; - static double combineValues(Object lhs, Object rhs) { return Math.max(((Number) lhs).doubleValue(), ((Number) rhs).doubleValue()); diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java index 85267eb622a..ff73f2f8c0e 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMaxAggregatorFactory.java @@ -70,6 +70,12 @@ public class DoubleMaxAggregatorFactory extends SimpleDoubleAggregatorFactory return DoubleMaxAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleMaxAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregateCombiner.java new file mode 100644 index 00000000000..e0ea8bb33c3 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregateCombiner.java @@ -0,0 +1,45 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import io.druid.segment.ColumnValueSelector; + +final class DoubleMinAggregateCombiner extends DoubleAggregateCombiner +{ + private double min; + + @Override + public void reset(ColumnValueSelector selector) + { + min = selector.getDouble(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + min = Math.min(min, selector.getDouble()); + } + + @Override + public double getDouble() + { + return min; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregator.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregator.java index 33e0477cf29..f7592f14bbc 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregator.java @@ -21,14 +21,10 @@ package io.druid.query.aggregation; import io.druid.segment.DoubleColumnSelector; -import java.util.Comparator; - /** */ public class DoubleMinAggregator implements Aggregator { - static final Comparator COMPARATOR = DoubleSumAggregator.COMPARATOR; - static double combineValues(Object lhs, Object rhs) { return Math.min(((Number) lhs).doubleValue(), ((Number) rhs).doubleValue()); diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java index ab5e4aaf2a2..9577e89846c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleMinAggregatorFactory.java @@ -28,7 +28,6 @@ import io.druid.segment.ColumnSelectorFactory; import java.nio.ByteBuffer; import java.util.Arrays; -import java.util.Comparator; import java.util.List; import java.util.Objects; @@ -64,18 +63,18 @@ public class DoubleMinAggregatorFactory extends SimpleDoubleAggregatorFactory return new DoubleMinBufferAggregator(getDoubleColumnSelector(metricFactory, Double.POSITIVE_INFINITY)); } - @Override - public Comparator getComparator() - { - return DoubleMinAggregator.COMPARATOR; - } - @Override public Object combine(Object lhs, Object rhs) { return DoubleMinAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleMinAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregateCombiner.java new file mode 100644 index 00000000000..60b34a5bee9 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregateCombiner.java @@ -0,0 +1,45 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import io.druid.segment.ColumnValueSelector; + +final class DoubleSumAggregateCombiner extends DoubleAggregateCombiner +{ + private double sum; + + @Override + public void reset(ColumnValueSelector selector) + { + sum = selector.getDouble(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + sum += selector.getDouble(); + } + + @Override + public double getDouble() + { + return sum; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java index ac66d1bbdfb..50ec11bed55 100644 --- a/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/DoubleSumAggregatorFactory.java @@ -70,6 +70,12 @@ public class DoubleSumAggregatorFactory extends SimpleDoubleAggregatorFactory return DoubleSumAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleSumAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java index 166421fc05e..bb4ca9bfb08 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/FilteredAggregatorFactory.java @@ -79,6 +79,12 @@ public class FilteredAggregatorFactory extends AggregatorFactory return delegate.combine(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return delegate.makeAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregator.java b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregator.java index 0efbecdb1fd..d344729d055 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregator.java @@ -21,14 +21,10 @@ package io.druid.query.aggregation; import io.druid.segment.FloatColumnSelector; -import java.util.Comparator; - /** */ public class FloatMaxAggregator implements Aggregator { - static final Comparator COMPARATOR = FloatSumAggregator.COMPARATOR; - static double combineValues(Object lhs, Object rhs) { return Math.max(((Number) lhs).floatValue(), ((Number) rhs).floatValue()); diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java index 31a4c49d3e2..63a83da06d2 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/FloatMaxAggregatorFactory.java @@ -68,6 +68,12 @@ public class FloatMaxAggregatorFactory extends SimpleFloatAggregatorFactory return FloatMaxAggregator.combineValues(finalizeComputation(lhs), finalizeComputation(rhs)); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleMaxAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregator.java b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregator.java index 67cd93f9e9a..cbecd26b223 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregator.java @@ -21,14 +21,10 @@ package io.druid.query.aggregation; import io.druid.segment.FloatColumnSelector; -import java.util.Comparator; - /** */ public class FloatMinAggregator implements Aggregator { - static final Comparator COMPARATOR = FloatSumAggregator.COMPARATOR; - static double combineValues(Object lhs, Object rhs) { return Math.min(((Number) lhs).floatValue(), ((Number) rhs).floatValue()); diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java index c186ed8ea23..1129e15bc76 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/FloatMinAggregatorFactory.java @@ -68,6 +68,12 @@ public class FloatMinAggregatorFactory extends SimpleFloatAggregatorFactory return FloatMinAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleMinAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java index a15c7004b53..4986a1ff6a3 100644 --- a/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/FloatSumAggregatorFactory.java @@ -68,6 +68,12 @@ public class FloatSumAggregatorFactory extends SimpleFloatAggregatorFactory return FloatSumAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleSumAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/Histogram.java b/processing/src/main/java/io/druid/query/aggregation/Histogram.java index 8fa7188e75e..dc8ac1234ec 100644 --- a/processing/src/main/java/io/druid/query/aggregation/Histogram.java +++ b/processing/src/main/java/io/druid/query/aggregation/Histogram.java @@ -58,6 +58,28 @@ public class Histogram } } + public Histogram(Histogram other) + { + this.breaks = other.breaks; + this.bins = other.bins.clone(); + this.min = other.min; + this.max = other.max; + this.count = other.count; + } + + public void copyFrom(Histogram other) + { + this.breaks = other.breaks; + if (this.bins.length == other.bins.length) { + System.arraycopy(other.bins, 0, this.bins, 0, this.bins.length); + } else { + this.bins = other.bins.clone(); + } + this.min = other.min; + this.max = other.max; + this.count = other.count; + } + public void offer(float d) { if (d > max) { diff --git a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregator.java b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregator.java index 665bf8c9d87..344899d7607 100644 --- a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregator.java @@ -35,7 +35,7 @@ public class HistogramAggregator implements Aggregator } }; - static Object combineHistograms(Object lhs, Object rhs) + static Histogram combineHistograms(Object lhs, Object rhs) { return ((Histogram) lhs).fold((Histogram) rhs); } diff --git a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java index 80eade4092a..1bccb8dcaed 100644 --- a/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/HistogramAggregatorFactory.java @@ -27,8 +27,11 @@ import com.google.common.primitives.Floats; import com.google.common.primitives.Longs; import io.druid.java.util.common.StringUtils; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; +import io.druid.segment.ObjectColumnSelector; import org.apache.commons.codec.binary.Base64; +import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Comparator; @@ -88,6 +91,50 @@ public class HistogramAggregatorFactory extends AggregatorFactory return HistogramAggregator.combineHistograms(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + // HistogramAggregatorFactory.combine() delegates to HistogramAggregator.combineHistograms() and it doesn't check + // for nulls, so this AggregateCombiner neither. + return new ObjectAggregateCombiner() + { + private Histogram combined; + + @Override + public void reset(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + Histogram first = ((ObjectColumnSelector) selector).get(); + if (combined == null) { + combined = new Histogram(first); + } else { + combined.copyFrom(first); + } + } + + @Override + public void fold(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + Histogram other = ((ObjectColumnSelector) selector).get(); + combined.fold(other); + } + + @Override + public Class classOfObject() + { + return Histogram.class; + } + + @Nullable + @Override + public Histogram get() + { + return combined; + } + }; + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java index bfd4647fad4..afcc71bd22a 100644 --- a/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/JavaScriptAggregatorFactory.java @@ -32,6 +32,7 @@ import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.js.JavaScriptConfig; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; import io.druid.segment.ObjectColumnSelector; import org.mozilla.javascript.Context; import org.mozilla.javascript.ContextAction; @@ -140,6 +141,33 @@ public class JavaScriptAggregatorFactory extends AggregatorFactory return getCompiledScript().combine(((Number) lhs).doubleValue(), ((Number) rhs).doubleValue()); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new DoubleAggregateCombiner() + { + private double combined; + + @Override + public void reset(ColumnValueSelector selector) + { + combined = selector.getDouble(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + combined = getCompiledScript().combine(combined, selector.getDouble()); + } + + @Override + public double getDouble() + { + return combined; + } + }; + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/LongAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/LongAggregateCombiner.java new file mode 100644 index 00000000000..949f4164a27 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LongAggregateCombiner.java @@ -0,0 +1,35 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.LongColumnSelector; + +/** + * Specialization of {@link AggregateCombiner} for primitive long aggregations. + */ +public abstract class LongAggregateCombiner implements AggregateCombiner, LongColumnSelector +{ + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + // Usually AggregateCombiner has nothing to inspect + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java index 66b07c288e1..35c25cffeb3 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongMaxAggregatorFactory.java @@ -28,6 +28,7 @@ import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.Parser; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; import io.druid.segment.LongColumnSelector; import java.nio.ByteBuffer; @@ -100,6 +101,33 @@ public class LongMaxAggregatorFactory extends AggregatorFactory return LongMaxAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new LongAggregateCombiner() + { + private long max; + + @Override + public void reset(ColumnValueSelector selector) + { + max = selector.getLong(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + max = Math.max(max, selector.getLong()); + } + + @Override + public long getLong() + { + return max; + } + }; + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java index d09f23dc4b0..d024dc6afa1 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongMinAggregatorFactory.java @@ -28,6 +28,7 @@ import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.Parser; import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.ColumnValueSelector; import io.druid.segment.LongColumnSelector; import java.nio.ByteBuffer; @@ -101,6 +102,33 @@ public class LongMinAggregatorFactory extends AggregatorFactory return LongMinAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new LongAggregateCombiner() + { + private long min; + + @Override + public void reset(ColumnValueSelector selector) + { + min = selector.getLong(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + min = Math.min(min, selector.getLong()); + } + + @Override + public long getLong() + { + return min; + } + }; + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregateCombiner.java new file mode 100644 index 00000000000..0ceff541b1b --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregateCombiner.java @@ -0,0 +1,45 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation; + +import io.druid.segment.ColumnValueSelector; + +public final class LongSumAggregateCombiner extends LongAggregateCombiner +{ + private long sum; + + @Override + public void reset(ColumnValueSelector selector) + { + sum = selector.getLong(); + } + + @Override + public void fold(ColumnValueSelector selector) + { + sum += selector.getLong(); + } + + @Override + public long getLong() + { + return sum; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java index d2212c4fbeb..cec708eb4a6 100644 --- a/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/LongSumAggregatorFactory.java @@ -100,6 +100,12 @@ public class LongSumAggregatorFactory extends AggregatorFactory return LongSumAggregator.combineValues(lhs, rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new LongSumAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/MetricManipulationFn.java b/processing/src/main/java/io/druid/query/aggregation/MetricManipulationFn.java index 15f391a43db..19812e3f8a9 100644 --- a/processing/src/main/java/io/druid/query/aggregation/MetricManipulationFn.java +++ b/processing/src/main/java/io/druid/query/aggregation/MetricManipulationFn.java @@ -19,8 +19,11 @@ package io.druid.query.aggregation; +import io.druid.guice.annotations.PublicApi; + /** */ +@PublicApi public interface MetricManipulationFn { public Object manipulate(AggregatorFactory factory, Object object); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java b/processing/src/main/java/io/druid/query/aggregation/ObjectAggregateCombiner.java similarity index 75% rename from indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java rename to processing/src/main/java/io/druid/query/aggregation/ObjectAggregateCombiner.java index 72a7828ddff..29bc0950982 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/StringIntervalFunction.java +++ b/processing/src/main/java/io/druid/query/aggregation/ObjectAggregateCombiner.java @@ -17,18 +17,13 @@ * under the License. */ -package io.druid.indexer; +package io.druid.query.aggregation; -import com.google.common.base.Function; -import org.joda.time.Interval; +import io.druid.segment.ObjectColumnSelector; /** -*/ -class StringIntervalFunction implements Function + * Specialization of {@link AggregateCombiner} for object aggregations. + */ +public abstract class ObjectAggregateCombiner implements AggregateCombiner, ObjectColumnSelector { - @Override - public Interval apply(String input) - { - return new Interval(input); - } } diff --git a/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java index 5366d5e546a..171d4f39ef1 100644 --- a/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/PostAggregator.java @@ -19,6 +19,7 @@ package io.druid.query.aggregation; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.java.util.common.Cacheable; import java.util.Comparator; @@ -28,6 +29,7 @@ import java.util.Set; /** * Functionally similar to an Aggregator. See the Aggregator interface for more comments. */ +@ExtensionPoint public interface PostAggregator extends Cacheable { Set getDependentFields(); diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java index 4a8129e4054..cf82c4d612a 100644 --- a/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorFactory.java @@ -28,6 +28,7 @@ import com.google.common.collect.Lists; import io.druid.hll.HyperLogLogCollector; import io.druid.java.util.common.StringUtils; import io.druid.query.ColumnSelectorPlus; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; @@ -38,6 +39,7 @@ import io.druid.query.aggregation.NoopBufferAggregator; import io.druid.query.aggregation.cardinality.types.CardinalityAggregatorColumnSelectorStrategy; import io.druid.query.aggregation.cardinality.types.CardinalityAggregatorColumnSelectorStrategyFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; +import io.druid.query.cache.CacheKeyBuilder; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.segment.ColumnSelectorFactory; @@ -45,10 +47,11 @@ import io.druid.segment.DimensionHandlerUtils; import org.apache.commons.codec.binary.Base64; import java.nio.ByteBuffer; -import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; public class CardinalityAggregatorFactory extends AggregatorFactory { @@ -86,28 +89,21 @@ public class CardinalityAggregatorFactory extends AggregatorFactory ); } - public static Object estimateCardinality(Object object) - { - if (object == null) { - return 0; - } - - return ((HyperLogLogCollector) object).estimateCardinality(); - } - private static final CardinalityAggregatorColumnSelectorStrategyFactory STRATEGY_FACTORY = new CardinalityAggregatorColumnSelectorStrategyFactory(); private final String name; private final List fields; private final boolean byRow; + private final boolean round; @JsonCreator public CardinalityAggregatorFactory( @JsonProperty("name") String name, @Deprecated @JsonProperty("fieldNames") final List fieldNames, @JsonProperty("fields") final List fields, - @JsonProperty("byRow") final boolean byRow + @JsonProperty("byRow") final boolean byRow, + @JsonProperty("round") final boolean round ) { this.name = name; @@ -122,6 +118,7 @@ public class CardinalityAggregatorFactory extends AggregatorFactory this.fields = fields; } this.byRow = byRow; + this.round = round; } public CardinalityAggregatorFactory( @@ -130,7 +127,7 @@ public class CardinalityAggregatorFactory extends AggregatorFactory final boolean byRow ) { - this(name, null, fields, byRow); + this(name, null, fields, byRow, false); } @Override @@ -191,10 +188,16 @@ public class CardinalityAggregatorFactory extends AggregatorFactory return ((HyperLogLogCollector) lhs).fold((HyperLogLogCollector) rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new HyperLogLogCollectorAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { - return new HyperUniquesAggregatorFactory(name, name); + return new HyperUniquesAggregatorFactory(name, name, false, round); } @Override @@ -206,17 +209,18 @@ public class CardinalityAggregatorFactory extends AggregatorFactory @Override public List getRequiredColumns() { - return Lists.transform( - fields, - new Function() - { - @Override - public AggregatorFactory apply(DimensionSpec input) - { - return new CardinalityAggregatorFactory(input.getOutputName(), Collections.singletonList(input), byRow); - } - } - ); + return fields.stream() + .map( + field -> + new CardinalityAggregatorFactory( + field.getOutputName(), + null, + Collections.singletonList(field), + byRow, + round + ) + ) + .collect(Collectors.toList()); } @Override @@ -242,7 +246,7 @@ public class CardinalityAggregatorFactory extends AggregatorFactory public Object finalizeComputation(Object object) { - return estimateCardinality(object); + return HyperUniquesAggregatorFactory.estimateCardinality(object, round); } @Override @@ -270,25 +274,20 @@ public class CardinalityAggregatorFactory extends AggregatorFactory return byRow; } + @JsonProperty + public boolean isRound() + { + return round; + } + @Override public byte[] getCacheKey() { - List dimSpecKeys = new ArrayList<>(); - int dimSpecKeysLength = fields.size(); - for (DimensionSpec dimSpec : fields) { - byte[] dimSpecKey = dimSpec.getCacheKey(); - dimSpecKeysLength += dimSpecKey.length; - dimSpecKeys.add(dimSpec.getCacheKey()); - } - - ByteBuffer retBuf = ByteBuffer.allocate(2 + dimSpecKeysLength); - retBuf.put(AggregatorUtil.CARD_CACHE_TYPE_ID); - for (byte[] dimSpecKey : dimSpecKeys) { - retBuf.put(dimSpecKey); - retBuf.put(AggregatorUtil.STRING_SEPARATOR); - } - retBuf.put((byte) (byRow ? 1 : 0)); - return retBuf.array(); + return new CacheKeyBuilder(AggregatorUtil.CARD_CACHE_TYPE_ID) + .appendCacheables(fields) + .appendBoolean(byRow) + .appendBoolean(round) + .build(); } @Override @@ -304,7 +303,7 @@ public class CardinalityAggregatorFactory extends AggregatorFactory } @Override - public boolean equals(Object o) + public boolean equals(final Object o) { if (this == o) { return true; @@ -312,26 +311,17 @@ public class CardinalityAggregatorFactory extends AggregatorFactory if (o == null || getClass() != o.getClass()) { return false; } - - CardinalityAggregatorFactory that = (CardinalityAggregatorFactory) o; - - if (isByRow() != that.isByRow()) { - return false; - } - if (!getName().equals(that.getName())) { - return false; - } - return getFields().equals(that.getFields()); - + final CardinalityAggregatorFactory that = (CardinalityAggregatorFactory) o; + return byRow == that.byRow && + round == that.round && + Objects.equals(name, that.name) && + Objects.equals(fields, that.fields); } @Override public int hashCode() { - int result = getName().hashCode(); - result = 31 * result + getFields().hashCode(); - result = 31 * result + (isByRow() ? 1 : 0); - return result; + return Objects.hash(name, fields, byRow, round); } @Override @@ -339,7 +329,9 @@ public class CardinalityAggregatorFactory extends AggregatorFactory { return "CardinalityAggregatorFactory{" + "name='" + name + '\'' + - ", fields='" + fields + '\'' + + ", fields=" + fields + + ", byRow=" + byRow + + ", round=" + round + '}'; } } diff --git a/processing/src/main/java/io/druid/query/aggregation/cardinality/HyperLogLogCollectorAggregateCombiner.java b/processing/src/main/java/io/druid/query/aggregation/cardinality/HyperLogLogCollectorAggregateCombiner.java new file mode 100644 index 00000000000..86d8c972c85 --- /dev/null +++ b/processing/src/main/java/io/druid/query/aggregation/cardinality/HyperLogLogCollectorAggregateCombiner.java @@ -0,0 +1,67 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.query.aggregation.cardinality; + +import io.druid.hll.HyperLogLogCollector; +import io.druid.query.aggregation.ObjectAggregateCombiner; +import io.druid.segment.ColumnValueSelector; +import io.druid.segment.ObjectColumnSelector; + +import javax.annotation.Nullable; + +public final class HyperLogLogCollectorAggregateCombiner extends ObjectAggregateCombiner +{ + @Nullable + private HyperLogLogCollector combined; + + @Override + public void reset(ColumnValueSelector selector) + { + combined = null; + fold(selector); + } + + @Override + public void fold(ColumnValueSelector selector) + { + @SuppressWarnings("unchecked") + HyperLogLogCollector other = ((ObjectColumnSelector) selector).get(); + if (other == null) { + return; + } + if (combined == null) { + combined = HyperLogLogCollector.makeLatestCollector(); + } + combined.fold(other); + } + + @Override + public Class classOfObject() + { + return HyperLogLogCollector.class; + } + + @Nullable + @Override + public HyperLogLogCollector get() + { + return combined; + } +} diff --git a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java index 9a4af62eae1..783ba82d2b3 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/DoubleFirstAggregatorFactory.java @@ -26,11 +26,13 @@ import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; @@ -102,6 +104,12 @@ public class DoubleFirstAggregatorFactory extends AggregatorFactory return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs; } + @Override + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("DoubleFirstAggregatorFactory is not supported during ingestion for rollup"); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java index a6d6823e2cd..e26b0228f8a 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/FloatFirstAggregatorFactory.java @@ -26,11 +26,13 @@ import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; @@ -102,6 +104,12 @@ public class FloatFirstAggregatorFactory extends AggregatorFactory return TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs; } + @Override + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("FloatFirstAggregatorFactory is not supported during ingestion for rollup"); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java index 1cdffb2d49b..96ff43d2792 100644 --- a/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/first/LongFirstAggregatorFactory.java @@ -25,11 +25,13 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; @@ -95,6 +97,12 @@ public class LongFirstAggregatorFactory extends AggregatorFactory return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) <= 0 ? lhs : rhs; } + @Override + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("LongFirstAggregatorFactory is not supported during ingestion for rollup"); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregator.java index ab1b405a4de..f49c79c3c1d 100644 --- a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregator.java @@ -48,18 +48,28 @@ public class HyperUniqueFinalizingPostAggregator implements PostAggregator private final String name; private final String fieldName; + private final AggregatorFactory aggregatorFactory; @JsonCreator public HyperUniqueFinalizingPostAggregator( @JsonProperty("name") String name, @JsonProperty("fieldName") String fieldName ) + { + this(name, fieldName, null); + } + + private HyperUniqueFinalizingPostAggregator( + String name, + String fieldName, + AggregatorFactory aggregatorFactory + ) { this.fieldName = Preconditions.checkNotNull(fieldName, "fieldName is null"); //Note that, in general, name shouldn't be null, we are defaulting //to fieldName here just to be backward compatible with 0.7.x this.name = name == null ? fieldName : name; - + this.aggregatorFactory = aggregatorFactory; } @Override @@ -77,7 +87,16 @@ public class HyperUniqueFinalizingPostAggregator implements PostAggregator @Override public Object compute(Map combinedAggregators) { - return HyperUniquesAggregatorFactory.estimateCardinality(combinedAggregators.get(fieldName)); + final Object collector = combinedAggregators.get(fieldName); + + if (aggregatorFactory == null) { + // This didn't come directly from an aggregator. Maybe it came through a FieldAccessPostAggregator or + // something like that. Hope it's a HyperLogLogCollector, and estimate it without rounding. + return HyperUniquesAggregatorFactory.estimateCardinality(collector, false); + } else { + // Delegate to the aggregator factory to get the user-specified rounding behavior. + return aggregatorFactory.finalizeComputation(collector); + } } @Override @@ -90,7 +109,8 @@ public class HyperUniqueFinalizingPostAggregator implements PostAggregator @Override public HyperUniqueFinalizingPostAggregator decorate(Map aggregators) { - return this; + final AggregatorFactory theAggregatorFactory = aggregators != null ? aggregators.get(fieldName) : null; + return new HyperUniqueFinalizingPostAggregator(name, fieldName, theAggregatorFactory); } @JsonProperty("fieldName") diff --git a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java index bc61b436802..e6ded9de10f 100644 --- a/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactory.java @@ -25,6 +25,7 @@ import io.druid.hll.HyperLogLogCollector; import io.druid.java.util.common.IAE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Comparators; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; @@ -32,6 +33,8 @@ import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; import io.druid.query.aggregation.NoopAggregator; import io.druid.query.aggregation.NoopBufferAggregator; +import io.druid.query.aggregation.cardinality.HyperLogLogCollectorAggregateCombiner; +import io.druid.query.cache.CacheKeyBuilder; import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ObjectColumnSelector; import org.apache.commons.codec.binary.Base64; @@ -47,29 +50,39 @@ import java.util.Objects; */ public class HyperUniquesAggregatorFactory extends AggregatorFactory { - public static Object estimateCardinality(Object object) + public static Object estimateCardinality(Object object, boolean round) { if (object == null) { return 0; } - return ((HyperLogLogCollector) object).estimateCardinality(); + final HyperLogLogCollector collector = (HyperLogLogCollector) object; + + // Avoid ternary, it causes estimateCardinalityRound to be cast to double. + if (round) { + return collector.estimateCardinalityRound(); + } else { + return collector.estimateCardinality(); + } } private final String name; private final String fieldName; private final boolean isInputHyperUnique; + private final boolean round; @JsonCreator public HyperUniquesAggregatorFactory( @JsonProperty("name") String name, @JsonProperty("fieldName") String fieldName, - @JsonProperty("isInputHyperUnique") Boolean isInputHyperUnique + @JsonProperty("isInputHyperUnique") boolean isInputHyperUnique, + @JsonProperty("round") boolean round ) { this.name = name; this.fieldName = fieldName; - this.isInputHyperUnique = (isInputHyperUnique == null) ? false : isInputHyperUnique; + this.isInputHyperUnique = isInputHyperUnique; + this.round = round; } public HyperUniquesAggregatorFactory( @@ -77,7 +90,7 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory String fieldName ) { - this(name, fieldName, false); + this(name, fieldName, false, false); } @Override @@ -136,10 +149,16 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory return ((HyperLogLogCollector) lhs).fold((HyperLogLogCollector) rhs); } + @Override + public AggregateCombiner makeAggregateCombiner() + { + return new HyperLogLogCollectorAggregateCombiner(); + } + @Override public AggregatorFactory getCombiningFactory() { - return new HyperUniquesAggregatorFactory(name, name, false); + return new HyperUniquesAggregatorFactory(name, name, false, round); } @Override @@ -158,7 +177,8 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory return Arrays.asList(new HyperUniquesAggregatorFactory( fieldName, fieldName, - isInputHyperUnique + isInputHyperUnique, + round )); } @@ -184,7 +204,7 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory @Override public Object finalizeComputation(Object object) { - return estimateCardinality(object); + return estimateCardinality(object, round); } @Override @@ -212,15 +232,19 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory return isInputHyperUnique; } + @JsonProperty + public boolean isRound() + { + return round; + } + @Override public byte[] getCacheKey() { - byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); - - return ByteBuffer.allocate(1 + fieldNameBytes.length) - .put(AggregatorUtil.HYPER_UNIQUE_CACHE_TYPE_ID) - .put(fieldNameBytes) - .array(); + return new CacheKeyBuilder(AggregatorUtil.HYPER_UNIQUE_CACHE_TYPE_ID) + .appendString(fieldName) + .appendBoolean(round) + .build(); } @Override @@ -246,11 +270,12 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory "name='" + name + '\'' + ", fieldName='" + fieldName + '\'' + ", isInputHyperUnique=" + isInputHyperUnique + + ", round=" + round + '}'; } @Override - public boolean equals(Object o) + public boolean equals(final Object o) { if (this == o) { return true; @@ -258,16 +283,16 @@ public class HyperUniquesAggregatorFactory extends AggregatorFactory if (o == null || getClass() != o.getClass()) { return false; } - - HyperUniquesAggregatorFactory that = (HyperUniquesAggregatorFactory) o; - - return Objects.equals(fieldName, that.fieldName) && Objects.equals(name, that.name) && - Objects.equals(isInputHyperUnique, that.isInputHyperUnique); + final HyperUniquesAggregatorFactory that = (HyperUniquesAggregatorFactory) o; + return isInputHyperUnique == that.isInputHyperUnique && + round == that.round && + Objects.equals(name, that.name) && + Objects.equals(fieldName, that.fieldName); } @Override public int hashCode() { - return Objects.hash(name, fieldName, isInputHyperUnique); + return Objects.hash(name, fieldName, isInputHyperUnique, round); } } diff --git a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java index 27aefaaffff..ddaec869889 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/DoubleLastAggregatorFactory.java @@ -25,6 +25,8 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.java.util.common.UOE; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; @@ -93,6 +95,12 @@ public class DoubleLastAggregatorFactory extends AggregatorFactory return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs; } + @Override + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("DoubleLastAggregatorFactory is not supported during ingestion for rollup"); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java index 6f2d55423dd..34f818e490b 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/FloatLastAggregatorFactory.java @@ -25,11 +25,13 @@ import com.google.common.base.Preconditions; import com.google.common.primitives.Longs; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.first.FloatFirstAggregatorFactory; import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; @@ -93,6 +95,12 @@ public class FloatLastAggregatorFactory extends AggregatorFactory return FloatFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs; } + @Override + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("FloatLastAggregatorFactory is not supported during ingestion for rollup"); + } + @Override public AggregatorFactory getCombiningFactory() { diff --git a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java index 32623c50b89..23bd1934523 100644 --- a/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/last/LongLastAggregatorFactory.java @@ -24,11 +24,13 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.metamx.common.StringUtils; import io.druid.collections.SerializablePair; +import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactoryNotMergeableException; import io.druid.query.aggregation.AggregatorUtil; import io.druid.query.aggregation.BufferAggregator; +import io.druid.query.aggregation.AggregateCombiner; import io.druid.query.aggregation.first.DoubleFirstAggregatorFactory; import io.druid.query.aggregation.first.LongFirstAggregatorFactory; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; @@ -91,6 +93,11 @@ public class LongLastAggregatorFactory extends AggregatorFactory return DoubleFirstAggregatorFactory.TIME_COMPARATOR.compare(lhs, rhs) > 0 ? lhs : rhs; } + @Override + public AggregateCombiner makeAggregateCombiner() + { + throw new UOE("LongLastAggregatorFactory is not supported during ingestion for rollup"); + } @Override public AggregatorFactory getCombiningFactory() diff --git a/processing/src/main/java/io/druid/query/cache/CacheKeyBuilder.java b/processing/src/main/java/io/druid/query/cache/CacheKeyBuilder.java index 81a89e4e936..530f2725bc1 100644 --- a/processing/src/main/java/io/druid/query/cache/CacheKeyBuilder.java +++ b/processing/src/main/java/io/druid/query/cache/CacheKeyBuilder.java @@ -26,6 +26,7 @@ import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import com.google.common.primitives.Ints; import com.google.common.primitives.UnsignedBytes; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.Cacheable; @@ -50,6 +51,7 @@ import java.util.List; * +--------------------------------------------------------+ * */ +@PublicApi public class CacheKeyBuilder { static final byte BYTE_KEY = 0; diff --git a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java index d750c97cc55..06a5b259607 100644 --- a/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/datasourcemetadata/DataSourceMetadataQuery.java @@ -22,7 +22,8 @@ package io.druid.query.datasourcemetadata; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.Druids; @@ -32,7 +33,6 @@ import io.druid.query.filter.DimFilter; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; -import org.joda.time.Interval; import java.util.Collections; import java.util.List; @@ -42,9 +42,7 @@ import java.util.Map; */ public class DataSourceMetadataQuery extends BaseQuery> { - public static final Interval MY_Y2K_INTERVAL = new Interval( - JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT - ); + private static final QuerySegmentSpec DEFAULT_SEGMENT_SPEC = new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY); @JsonCreator public DataSourceMetadataQuery( @@ -53,13 +51,7 @@ public class DataSourceMetadataQuery extends BaseQuery context ) { - super( - dataSource, - (querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Collections.singletonList(MY_Y2K_INTERVAL)) - : querySegmentSpec, - false, - context - ); + super(dataSource, querySegmentSpec == null ? DEFAULT_SEGMENT_SPEC : querySegmentSpec, false, context); } @Override @@ -112,7 +104,7 @@ public class DataSourceMetadataQuery extends BaseQuery result : results) { DateTime currMaxIngestedEventTime = result.getValue().getMaxIngestedEventTime(); if (currMaxIngestedEventTime != null && currMaxIngestedEventTime.isAfter(max)) { diff --git a/processing/src/main/java/io/druid/query/dimension/DimensionSpec.java b/processing/src/main/java/io/druid/query/dimension/DimensionSpec.java index 7749be3120e..7a98ffd5edb 100644 --- a/processing/src/main/java/io/druid/query/dimension/DimensionSpec.java +++ b/processing/src/main/java/io/druid/query/dimension/DimensionSpec.java @@ -27,6 +27,8 @@ import io.druid.segment.DimensionSelector; import io.druid.segment.column.ValueType; /** + * Provides information about a dimension for a grouping query, like topN or groupBy. Note that this is not annotated + * with {@code PublicApi}, since it is not meant to be stable for usage by non-built-in queries. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = LegacyDimensionSpec.class) @JsonSubTypes(value = { diff --git a/processing/src/main/java/io/druid/query/expression/ExprUtils.java b/processing/src/main/java/io/druid/query/expression/ExprUtils.java index c215de9db41..74d9218b258 100644 --- a/processing/src/main/java/io/druid/query/expression/ExprUtils.java +++ b/processing/src/main/java/io/druid/query/expression/ExprUtils.java @@ -22,9 +22,11 @@ package io.druid.query.expression; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.Expr; +import org.joda.time.Chronology; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; public class ExprUtils { @@ -56,13 +58,6 @@ public class ExprUtils final DateTime origin; final DateTimeZone timeZone; - if (originArg == null) { - origin = null; - } else { - final Object value = originArg.eval(bindings).value(); - origin = value != null ? new DateTime(value) : null; - } - if (timeZoneArg == null) { timeZone = null; } else { @@ -70,6 +65,14 @@ public class ExprUtils timeZone = value != null ? DateTimeZone.forID(value) : null; } + if (originArg == null) { + origin = null; + } else { + Chronology chronology = timeZone == null ? ISOChronology.getInstanceUTC() : ISOChronology.getInstance(timeZone); + final Object value = originArg.eval(bindings).value(); + origin = value != null ? new DateTime(value, chronology) : null; + } + return new PeriodGranularity(period, origin, timeZone); } } diff --git a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java index 301a11bf3d0..48ce083e730 100644 --- a/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java +++ b/processing/src/main/java/io/druid/query/expression/TimestampCeilExprMacro.java @@ -19,13 +19,13 @@ package io.druid.query.expression; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprMacroTable; -import org.joda.time.DateTime; import javax.annotation.Nonnull; import java.util.List; @@ -67,7 +67,7 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro @Override public ExprEval eval(final ObjectBinding bindings) { - return ExprEval.of(granularity.bucketEnd(new DateTime(arg.eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketEnd(DateTimes.utc(arg.eval(bindings).asLong())).getMillis()); } @Override @@ -102,7 +102,7 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro public ExprEval eval(final ObjectBinding bindings) { final PeriodGranularity granularity = getGranularity(args, bindings); - return ExprEval.of(granularity.bucketEnd(new DateTime(args.get(0).eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketEnd(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis()); } @Override diff --git a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java index 261e9caf879..19c64415e5f 100644 --- a/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java +++ b/processing/src/main/java/io/druid/query/expression/TimestampExtractExprMacro.java @@ -21,6 +21,7 @@ package io.druid.query.expression; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprMacroTable; @@ -70,7 +71,7 @@ public class TimestampExtractExprMacro implements ExprMacroTable.ExprMacro } final Expr arg = args.get(0); - final Unit unit = Unit.valueOf(((String) args.get(1).getLiteralValue()).toUpperCase()); + final Unit unit = Unit.valueOf(StringUtils.toUpperCase((String) args.get(1).getLiteralValue())); final DateTimeZone timeZone; if (args.size() > 2) { @@ -87,7 +88,7 @@ public class TimestampExtractExprMacro implements ExprMacroTable.ExprMacro @Override public ExprEval eval(final ObjectBinding bindings) { - final DateTime dateTime = new DateTime(arg.eval(bindings).asLong()).withChronology(chronology); + final DateTime dateTime = new DateTime(arg.eval(bindings).asLong(), chronology); switch (unit) { case EPOCH: return ExprEval.of(dateTime.getMillis()); diff --git a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java index cf660ba346a..fd81e51fc94 100644 --- a/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java +++ b/processing/src/main/java/io/druid/query/expression/TimestampFloorExprMacro.java @@ -19,13 +19,13 @@ package io.druid.query.expression; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprMacroTable; -import org.joda.time.DateTime; import javax.annotation.Nonnull; import java.util.List; @@ -77,7 +77,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro @Override public ExprEval eval(final ObjectBinding bindings) { - return ExprEval.of(granularity.bucketStart(new DateTime(arg.eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketStart(DateTimes.utc(arg.eval(bindings).asLong())).getMillis()); } @Override @@ -102,7 +102,7 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro public ExprEval eval(final ObjectBinding bindings) { final PeriodGranularity granularity = getGranularity(args, bindings); - return ExprEval.of(granularity.bucketStart(new DateTime(args.get(0).eval(bindings).asLong())).getMillis()); + return ExprEval.of(granularity.bucketStart(DateTimes.utc(args.get(0).eval(bindings).asLong())).getMillis()); } @Override diff --git a/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java index 0bfe2033fc7..b3e7cb39098 100644 --- a/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java @@ -21,6 +21,7 @@ package io.druid.query.extraction; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.RegisteredLookupExtractionFn; @@ -28,6 +29,7 @@ import javax.annotation.Nullable; /** */ +@ExtensionPoint @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonSubTypes(value = { @JsonSubTypes.Type(name = "time", value = TimeDimExtractionFn.class), diff --git a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java index fa66b5c2d75..3a2153b1966 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java @@ -22,11 +22,13 @@ package io.druid.query.extraction; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import io.druid.common.guava.GuavaUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; @@ -124,7 +126,7 @@ public class TimeFormatExtractionFn implements ExtractionFn @Override public String apply(long value) { - final long truncated = granularity.bucketStart(new DateTime(value)).getMillis(); + final long truncated = granularity.bucketStart(DateTimes.utc(value)).getMillis(); return formatter == null ? String.valueOf(truncated) : formatter.print(truncated); } @@ -138,9 +140,9 @@ public class TimeFormatExtractionFn implements ExtractionFn if (asMillis && value instanceof String) { final Long theLong = GuavaUtils.tryParseLong((String) value); - return theLong == null ? apply(new DateTime(value).getMillis()) : apply(theLong.longValue()); + return theLong == null ? apply(DateTimes.of((String) value).getMillis()) : apply(theLong.longValue()); } else { - return apply(new DateTime(value).getMillis()); + return apply(new DateTime(value, ISOChronology.getInstanceUTC()).getMillis()); } } diff --git a/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java b/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java index 52e06be1a09..830619a6268 100644 --- a/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/IntervalDimFilter.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; import com.google.common.primitives.Longs; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.ExtractionFn; diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java index d7af8929c39..298bee8b96b 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryEngine.java @@ -332,7 +332,7 @@ public class GroupByQueryEngine ); } - final DimensionSelector selector = cursor.makeDimensionSelector(dimSpec); + final DimensionSelector selector = cursor.getColumnSelectorFactory().makeDimensionSelector(dimSpec); if (selector != null) { if (selector.getValueCardinality() == DimensionSelector.CARDINALITY_UNKNOWN) { throw new UnsupportedOperationException( @@ -349,7 +349,7 @@ public class GroupByQueryEngine sizesRequired = new int[aggregatorSpecs.size()]; for (int i = 0; i < aggregatorSpecs.size(); ++i) { AggregatorFactory aggregatorSpec = aggregatorSpecs.get(i); - aggregators[i] = aggregatorSpec.factorizeBuffered(cursor); + aggregators[i] = aggregatorSpec.factorizeBuffered(cursor.getColumnSelectorFactory()); metricNames[i] = aggregatorSpec.getName(); sizesRequired[i] = aggregatorSpec.getMaxIntermediateSize(); } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index 32372893165..1d2fb7b7151 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -66,11 +66,11 @@ public class GroupByQueryHelper { final GroupByQueryConfig querySpecificConfig = config.withOverrides(query); final Granularity gran = query.getGranularity(); - final long timeStart = query.getIntervals().get(0).getStartMillis(); + final DateTime timeStart = query.getIntervals().get(0).getStart(); - long granTimeStart = timeStart; + DateTime granTimeStart = timeStart; if (!(Granularities.ALL.equals(gran))) { - granTimeStart = gran.bucketStart(new DateTime(timeStart)).getMillis(); + granTimeStart = gran.bucketStart(timeStart); } final List aggs; @@ -115,7 +115,7 @@ public class GroupByQueryHelper .withDimensionsSpec(new DimensionsSpec(dimensionSchemas, null, null)) .withMetrics(aggs.toArray(new AggregatorFactory[aggs.size()])) .withQueryGranularity(gran) - .withMinTimestamp(granTimeStart) + .withMinTimestamp(granTimeStart.getMillis()) .build(); if (query.getContextValue("useOffheap", false)) { diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java index a47a2b7b930..c45c5afa359 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByQueryEngineV2.java @@ -28,6 +28,7 @@ import io.druid.collections.NonBlockingPool; import io.druid.collections.ResourceHolder; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.BaseSequence; @@ -129,7 +130,7 @@ public class GroupByQueryEngineV2 final DateTime fudgeTimestamp = fudgeTimestampString == null ? null - : new DateTime(Long.parseLong(fudgeTimestampString)); + : DateTimes.utc(Long.parseLong(fudgeTimestampString)); return cursors.flatMap( cursor -> new BaseSequence<>( @@ -142,7 +143,7 @@ public class GroupByQueryEngineV2 .createColumnSelectorPluses( STRATEGY_FACTORY, query.getDimensions(), - cursor + cursor.getColumnSelectorFactory() ); GroupByColumnSelectorPlus[] dims = createGroupBySelectorPlus(selectorPlus); @@ -434,7 +435,7 @@ public class GroupByQueryEngineV2 return new BufferHashGrouper<>( Suppliers.ofInstance(buffer), keySerde, - cursor, + cursor.getColumnSelectorFactory(), query.getAggregatorSpecs() .toArray(new AggregatorFactory[query.getAggregatorSpecs().size()]), querySpecificConfig.getBufferGrouperMaxSize(), @@ -587,7 +588,7 @@ public class GroupByQueryEngineV2 { return new BufferArrayGrouper( Suppliers.ofInstance(buffer), - cursor, + cursor.getColumnSelectorFactory(), query.getAggregatorSpecs() .toArray(new AggregatorFactory[query.getAggregatorSpecs().size()]), cardinality diff --git a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java index 488cc527367..8280998f661 100644 --- a/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/io/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -35,6 +35,7 @@ import io.druid.data.input.Row; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Merging; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; @@ -117,10 +118,10 @@ public class GroupByStrategyV2 implements GroupByStrategy final String timestampStringFromContext = query.getContextValue(CTX_KEY_FUDGE_TIMESTAMP, ""); if (!timestampStringFromContext.isEmpty()) { - return new DateTime(Long.parseLong(timestampStringFromContext)); + return DateTimes.utc(Long.parseLong(timestampStringFromContext)); } else if (Granularities.ALL.equals(gran)) { - final long timeStart = query.getIntervals().get(0).getStartMillis(); - return gran.getIterable(new Interval(timeStart, timeStart + 1)).iterator().next().getStart(); + final DateTime timeStart = query.getIntervals().get(0).getStart(); + return gran.getIterable(new Interval(timeStart, timeStart.plus(1))).iterator().next().getStart(); } else { return null; } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java index addccea550a..36c36d256b4 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java @@ -50,6 +50,7 @@ import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import io.druid.segment.serde.ComplexMetricSerde; import io.druid.segment.serde.ComplexMetrics; +import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; @@ -249,8 +250,8 @@ public class SegmentAnalyzer } if (analyzingSize()) { - final long start = storageAdapter.getMinTime().getMillis(); - final long end = storageAdapter.getMaxTime().getMillis(); + final DateTime start = storageAdapter.getMinTime(); + final DateTime end = storageAdapter.getMaxTime(); final Sequence cursors = storageAdapter.makeCursors( @@ -269,12 +270,9 @@ public class SegmentAnalyzer @Override public Long accumulate(Long accumulated, Cursor cursor) { - DimensionSelector selector = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - columnName, - columnName - ) - ); + DimensionSelector selector = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName)); if (selector == null) { return accumulated; } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index fcb80d77c6a..4a921480f30 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -32,7 +32,7 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; import io.druid.common.guava.CombiningSequence; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index 9f0c1d7e970..adc7f8da184 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; @@ -39,7 +39,6 @@ import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.Interval; import java.nio.ByteBuffer; -import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; @@ -54,6 +53,8 @@ public class SegmentMetadataQuery extends BaseQuery */ public static final byte[] ANALYSIS_TYPES_CACHE_PREFIX = new byte[] {(byte) 0xFF}; + private static final QuerySegmentSpec DEFAULT_SEGMENT_SPEC = new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY); + public enum AnalysisType { CARDINALITY, @@ -84,10 +85,6 @@ public class SegmentMetadataQuery extends BaseQuery } } - public static final Interval DEFAULT_INTERVAL = new Interval( - JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT - ); - private final ColumnIncluderator toInclude; private final boolean merge; private final boolean usingDefaultInterval; @@ -106,13 +103,7 @@ public class SegmentMetadataQuery extends BaseQuery @JsonProperty("lenientAggregatorMerge") Boolean lenientAggregatorMerge ) { - super( - dataSource, - (querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Collections.singletonList(DEFAULT_INTERVAL)) - : querySegmentSpec, - false, - context - ); + super(dataSource, querySegmentSpec == null ? DEFAULT_SEGMENT_SPEC : querySegmentSpec, false, context); if (querySegmentSpec == null) { this.usingDefaultInterval = true; diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index ec932823fc0..6e9aa50956a 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -30,6 +30,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; @@ -54,7 +55,6 @@ import io.druid.query.filter.DimFilter; import io.druid.query.search.search.SearchHit; import io.druid.query.search.search.SearchQuery; import io.druid.query.search.search.SearchQueryConfig; -import org.joda.time.DateTime; import javax.annotation.Nullable; import java.nio.ByteBuffer; @@ -253,7 +253,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest( - new DateTime(((Number) result.get(0)).longValue()), + DateTimes.utc(((Number) result.get(0)).longValue()), new SearchResultValue( Lists.transform( (List) result.get(1), @@ -279,7 +279,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest( - new DateTime(((Number) result.get(0)).longValue()), + DateTimes.utc(((Number) result.get(0)).longValue()), new SearchResultValue( Lists.transform( (List) result.get(1), diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java index 7670d5003d7..fc849bacf25 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java @@ -44,7 +44,6 @@ import io.druid.segment.DimensionSelector; import io.druid.segment.DoubleColumnSelector; import io.druid.segment.FloatColumnSelector; import io.druid.segment.LongColumnSelector; -import io.druid.segment.NullDimensionSelector; import io.druid.segment.Segment; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ValueType; @@ -132,7 +131,7 @@ public class SearchQueryRunner implements QueryRunner> final Object2IntRBTreeMap set ) { - if (selector != null && !(selector instanceof NullDimensionSelector)) { + if (selector != null && !isNilSelector(selector)) { final IndexedInts vals = selector.getRow(); for (int i = 0; i < vals.size(); ++i) { final String dimVal = selector.lookupName(vals.get(i)); @@ -147,6 +146,13 @@ public class SearchQueryRunner implements QueryRunner> } } + private static boolean isNilSelector(final DimensionSelector selector) + { + return selector.nameLookupPossibleInAdvance() + && selector.getValueCardinality() == 1 + && selector.lookupName(0) == null; + } + public static class LongSearchColumnSelectorStrategy implements SearchColumnSelectorStrategy { @Override diff --git a/processing/src/main/java/io/druid/query/search/search/CursorOnlyStrategy.java b/processing/src/main/java/io/druid/query/search/search/CursorOnlyStrategy.java index 88c253cd76f..668b963d197 100644 --- a/processing/src/main/java/io/druid/query/search/search/CursorOnlyStrategy.java +++ b/processing/src/main/java/io/druid/query/search/search/CursorOnlyStrategy.java @@ -117,7 +117,7 @@ public class CursorOnlyStrategy extends SearchStrategy DimensionHandlerUtils.createColumnSelectorPluses( SearchQueryRunner.SEARCH_COLUMN_SELECTOR_STRATEGY_FACTORY, dimsToSearch, - cursor + cursor.getColumnSelectorFactory() ) ); diff --git a/processing/src/main/java/io/druid/query/select/EventHolder.java b/processing/src/main/java/io/druid/query/select/EventHolder.java index 9120b2075df..b738d403059 100644 --- a/processing/src/main/java/io/druid/query/select/EventHolder.java +++ b/processing/src/main/java/io/druid/query/select/EventHolder.java @@ -22,6 +22,7 @@ package io.druid.query.select; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import org.joda.time.DateTime; @@ -52,8 +53,10 @@ public class EventHolder public DateTime getTimestamp() { Object retVal = event.get(timestampKey); - if (retVal instanceof String || retVal instanceof Long) { - return new DateTime(retVal); + if (retVal instanceof Long) { + return DateTimes.utc((Long) retVal); + } else if (retVal instanceof String) { + return DateTimes.of((String) retVal); } else if (retVal instanceof DateTime) { return (DateTime) retVal; } else { diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java index 85099f18ba5..78a0e1829e1 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java @@ -26,16 +26,17 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.query.ColumnSelectorPlus; import io.druid.query.QueryRunnerHelper; import io.druid.query.Result; -import io.druid.query.dimension.DefaultDimensionSpec; -import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ColumnSelectorStrategy; import io.druid.query.dimension.ColumnSelectorStrategyFactory; +import io.druid.query.dimension.DefaultDimensionSpec; +import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.Filter; import io.druid.segment.ColumnValueSelector; import io.druid.segment.Cursor; @@ -53,7 +54,6 @@ import io.druid.segment.column.ValueType; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.Filters; import io.druid.timeline.DataSegmentUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.util.ArrayList; @@ -241,13 +241,14 @@ public class SelectQueryEngine query.isDescending() ); - final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME); + final LongColumnSelector timestampColumnSelector = + cursor.getColumnSelectorFactory().makeLongColumnSelector(Column.TIME_COLUMN_NAME); final List> selectorPlusList = Arrays.asList( DimensionHandlerUtils.createColumnSelectorPluses( STRATEGY_FACTORY, Lists.newArrayList(dims), - cursor + cursor.getColumnSelectorFactory() ) ); @@ -257,7 +258,8 @@ public class SelectQueryEngine final Map metSelectors = Maps.newHashMap(); for (String metric : metrics) { - final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric); + final ObjectColumnSelector metricSelector = + cursor.getColumnSelectorFactory().makeObjectColumnSelector(metric); metSelectors.put(metric, metricSelector); builder.addMetric(metric); } @@ -300,7 +302,7 @@ public class SelectQueryEngine ) { final Map theEvent = Maps.newLinkedHashMap(); - theEvent.put(timestampKey, new DateTime(timestampColumnSelector.getLong())); + theEvent.put(timestampKey, DateTimes.utc(timestampColumnSelector.getLong())); for (ColumnSelectorPlus selectorPlus : selectorPlusList) { selectorPlus.getColumnSelectorStrategy().addRowValuesToSelectResult(selectorPlus.getOutputName(), selectorPlus.getSelector(), theEvent); diff --git a/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java b/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java index 46a0034e781..9fe23e10b3c 100644 --- a/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/LegacySegmentSpec.java @@ -20,15 +20,15 @@ package io.druid.query.spec; import com.fasterxml.jackson.annotation.JsonCreator; -import com.google.common.base.Function; -import com.google.common.collect.Lists; import io.druid.java.util.common.IAE; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; /** */ @@ -49,17 +49,10 @@ public class LegacySegmentSpec extends MultipleIntervalSegmentSpec throw new IAE("Unknown type[%s] for intervals[%s]", intervals.getClass(), intervals); } - return Lists.transform( - intervalStringList, - new Function() - { - @Override - public Interval apply(Object input) - { - return new Interval(input); - } - } - ); + return intervalStringList + .stream() + .map(input -> new Interval(input, ISOChronology.getInstanceUTC())) + .collect(Collectors.toList()); } @JsonCreator diff --git a/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java b/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java index 92a57cc4966..888a4c8e4ea 100644 --- a/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/MultipleIntervalSegmentSpec.java @@ -21,7 +21,7 @@ package io.druid.query.spec; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; diff --git a/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java b/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java index 01a05fb15c1..20f6d36b14b 100644 --- a/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/MultipleSpecificSegmentSpec.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QuerySegmentWalker; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java index e97f7cdc11b..ce7d2f247c6 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQuery.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; @@ -34,10 +35,8 @@ import io.druid.query.filter.DimFilter; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; -import org.joda.time.Interval; import java.nio.ByteBuffer; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -45,10 +44,7 @@ import java.util.Map; */ public class TimeBoundaryQuery extends BaseQuery> { - public static final Interval MY_Y2K_INTERVAL = new Interval( - new DateTime("0000-01-01"), - new DateTime("3000-01-01") - ); + private static final QuerySegmentSpec DEFAULT_SEGMENT_SPEC = new MultipleIntervalSegmentSpec(Intervals.ONLY_ETERNITY); public static final String MAX_TIME = "maxTime"; public static final String MIN_TIME = "minTime"; @@ -66,13 +62,7 @@ public class TimeBoundaryQuery extends BaseQuery @JsonProperty("context") Map context ) { - super( - dataSource, - (querySegmentSpec == null) ? new MultipleIntervalSegmentSpec(Collections.singletonList(MY_Y2K_INTERVAL)) - : querySegmentSpec, - false, - context - ); + super(dataSource, querySegmentSpec == null ? DEFAULT_SEGMENT_SPEC : querySegmentSpec, false, context); this.dimFilter = dimFilter; this.bound = bound == null ? "" : bound; @@ -159,8 +149,8 @@ public class TimeBoundaryQuery extends BaseQuery return Lists.newArrayList(); } - DateTime min = new DateTime(JodaUtils.MAX_INSTANT); - DateTime max = new DateTime(JodaUtils.MIN_INSTANT); + DateTime min = DateTimes.MAX; + DateTime max = DateTimes.MIN; for (Result result : results) { TimeBoundaryResultValue val = result.getValue(); diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java index c53c05ebcc2..a36461d2234 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChest.java @@ -27,6 +27,7 @@ import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.BySegmentSkippingQueryRunner; @@ -41,7 +42,6 @@ import io.druid.query.QueryToolChest; import io.druid.query.Result; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.timeline.LogicalSegment; -import org.joda.time.DateTime; import java.nio.ByteBuffer; import java.util.List; @@ -197,7 +197,7 @@ public class TimeBoundaryQueryQueryToolChest List result = (List) input; return new Result<>( - new DateTime(((Number) result.get(0)).longValue()), + DateTimes.utc(((Number) result.get(0)).longValue()), new TimeBoundaryResultValue(result.get(1)) ); } diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index d2498ecc54f..01ad3166b9c 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -22,6 +22,7 @@ package io.druid.query.timeboundary; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.BaseSequence; @@ -102,8 +103,9 @@ public class TimeBoundaryQueryRunnerFactory if (cursor.isDone()) { return null; } - final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME); - final DateTime timestamp = new DateTime(timestampColumnSelector.getLong()); + final LongColumnSelector timestampColumnSelector = + cursor.getColumnSelectorFactory().makeLongColumnSelector(Column.TIME_COLUMN_NAME); + final DateTime timestamp = DateTimes.utc(timestampColumnSelector.getLong()); return new Result<>(adapter.getInterval().getStart(), timestamp); } }; diff --git a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java index 18982199787..732f768335d 100644 --- a/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java +++ b/processing/src/main/java/io/druid/query/timeboundary/TimeBoundaryResultValue.java @@ -21,9 +21,11 @@ package io.druid.query.timeboundary; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import org.joda.time.DateTime; +import javax.annotation.Nullable; import java.util.Map; /** @@ -46,6 +48,7 @@ public class TimeBoundaryResultValue return value; } + @Nullable public DateTime getMaxTime() { if (value instanceof Map) { @@ -55,6 +58,7 @@ public class TimeBoundaryResultValue } } + @Nullable public DateTime getMinTime() { if (value instanceof Map) { @@ -97,7 +101,8 @@ public class TimeBoundaryResultValue '}'; } - private DateTime getDateTimeValue(Object val) + @Nullable + private DateTime getDateTimeValue(@Nullable Object val) { if (val == null) { return null; @@ -105,8 +110,10 @@ public class TimeBoundaryResultValue if (val instanceof DateTime) { return (DateTime) val; - } else if (val instanceof String || val instanceof Long) { - return new DateTime(val); + } else if (val instanceof String) { + return DateTimes.of((String) val); + } else if (val instanceof Long) { + return DateTimes.utc((Long) val); } else { throw new IAE("Cannot get time from type[%s]", val.getClass()); } diff --git a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java index bbe57f5df3d..4fa93de263b 100644 --- a/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java +++ b/processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryEngine.java @@ -66,7 +66,7 @@ public class TimeseriesQueryEngine String[] aggregatorNames = new String[aggregatorSpecs.size()]; for (int i = 0; i < aggregatorSpecs.size(); i++) { - aggregators[i] = aggregatorSpecs.get(i).factorize(cursor); + aggregators[i] = aggregatorSpecs.get(i).factorize(cursor.getColumnSelectorFactory()); aggregatorNames[i] = aggregatorSpecs.get(i).getName(); } diff --git a/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java b/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java index 8430c3878a8..2b1e4b61edf 100644 --- a/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java +++ b/processing/src/main/java/io/druid/query/topn/BaseTopNAlgorithm.java @@ -45,7 +45,7 @@ public abstract class BaseTopNAlgorithm INVALID_VALUE; + return value > INVALID_VALUE; } + @Override + public void reset() + { + iterator = safeClone(iteratorForReset); + value = valueForReset; + } + + @Override + public ReadableOffset getBaseReadableOffset() + { + return this; + } + + @SuppressWarnings("MethodDoesntCallSuperMethod") @Override public Offset clone() { - return new BitmapOffset(fullness, itr.clone(), val); + return new BitmapOffset(fullness, safeClone(iterator), value); } @Override public int getOffset() { - return val; + return value; } @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { - inspector.visit("itr", itr); + inspector.visit("iterator", iterator); inspector.visit("fullness", fullness); } - public static class RoaringBitmapOffset extends BitmapOffset + private static IntIterator safeClone(IntIterator iterator) { - - public RoaringBitmapOffset(ImmutableBitmap bitmapIndex, boolean descending, long numRows) - { - super(bitmapIndex, descending, numRows); - } - - RoaringBitmapOffset(String fullness, IntIterator itr, int val) - { - super(fullness, itr, val); - } - - @Override - public Offset clone() - { - return new RoaringBitmapOffset(fullness, itr.hasNext() ? itr.clone() : EmptyIntIterator.instance(), val); - } + // Calling clone() on empty iterators from RoaringBitmap library sometimes fails with NPE, + // see https://github.com/druid-io/druid/issues/4709, https://github.com/RoaringBitmap/RoaringBitmap/issues/177 + return iterator.hasNext() ? iterator.clone() : EmptyIntIterator.instance(); } } diff --git a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java index 29b51924d1e..e8c7e2e95d1 100644 --- a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java @@ -200,12 +200,6 @@ public class CompressedVSizeIndexedSupplier implements WritableSupplier predicate) + { + return BooleanValueMatcher.of(predicate.apply(value)); + } + + @Override + public int getValueCardinality() + { + return 1; + } + + @Override + public String lookupName(int id) + { + assert id == 0 : "id = " + id; + return value; + } + + @Override + public boolean nameLookupPossibleInAdvance() + { + return true; + } + + @Nullable + @Override + public IdLookup idLookup() + { + return this; + } + + @Override + public int lookupId(String name) + { + return value.equals(name) ? 0 : -1; + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("value", value); + } +} diff --git a/processing/src/main/java/io/druid/segment/Cursor.java b/processing/src/main/java/io/druid/segment/Cursor.java index 85b367231c5..3300f5bbc52 100644 --- a/processing/src/main/java/io/druid/segment/Cursor.java +++ b/processing/src/main/java/io/druid/segment/Cursor.java @@ -24,8 +24,9 @@ import org.joda.time.DateTime; /** */ -public interface Cursor extends ColumnSelectorFactory +public interface Cursor { + ColumnSelectorFactory getColumnSelectorFactory(); DateTime getTime(); void advance(); void advanceUninterruptibly(); diff --git a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java index aee86578411..ff526e6662a 100644 --- a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java +++ b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java @@ -134,13 +134,14 @@ public final class DimensionHandlerUtils * @param The strategy type created by the provided strategy factory. * @param strategyFactory A factory provided by query engines that generates type-handling strategies * @param dimensionSpecs The set of columns to generate ColumnSelectorPlus objects for - * @param cursor Used to create value selectors for columns. + * @param columnSelectorFactory Used to create value selectors for columns. * @return An array of ColumnSelectorPlus objects, in the order of the columns specified in dimensionSpecs */ - public static ColumnSelectorPlus[] createColumnSelectorPluses( + public static + ColumnSelectorPlus[] createColumnSelectorPluses( ColumnSelectorStrategyFactory strategyFactory, List dimensionSpecs, - ColumnSelectorFactory cursor + ColumnSelectorFactory columnSelectorFactory ) { int dimCount = dimensionSpecs.size(); @@ -150,12 +151,12 @@ public final class DimensionHandlerUtils final String dimName = dimSpec.getDimension(); final ColumnValueSelector selector = getColumnValueSelectorFromDimensionSpec( dimSpec, - cursor + columnSelectorFactory ); ColumnSelectorStrategyClass strategy = makeStrategy( strategyFactory, dimSpec, - cursor.getColumnCapabilities(dimSpec.getDimension()), + columnSelectorFactory.getColumnCapabilities(dimSpec.getDimension()), selector ); final ColumnSelectorPlus selectorPlus = new ColumnSelectorPlus<>( diff --git a/processing/src/main/java/io/druid/segment/DimensionIndexer.java b/processing/src/main/java/io/druid/segment/DimensionIndexer.java index dd885f05c5d..2a21ece1480 100644 --- a/processing/src/main/java/io/druid/segment/DimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/DimensionIndexer.java @@ -25,7 +25,7 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.segment.column.ValueType; import io.druid.segment.data.Indexed; import io.druid.segment.incremental.IncrementalIndex; -import io.druid.segment.incremental.IncrementalIndexStorageAdapter; +import io.druid.segment.incremental.TimeAndDimsHolder; import javax.annotation.Nullable; @@ -217,7 +217,7 @@ public interface DimensionIndexer */ DimensionSelector makeDimensionSelector( DimensionSpec spec, - IncrementalIndexStorageAdapter.EntryHolder currEntry, + TimeAndDimsHolder currEntry, IncrementalIndex.DimensionDesc desc ); @@ -229,10 +229,7 @@ public interface DimensionIndexer * @param desc Descriptor object for this dimension within an IncrementalIndex * @return A new object that reads rows from currEntry */ - LongColumnSelector makeLongColumnSelector( - IncrementalIndexStorageAdapter.EntryHolder currEntry, - IncrementalIndex.DimensionDesc desc - ); + LongColumnSelector makeLongColumnSelector(TimeAndDimsHolder currEntry, IncrementalIndex.DimensionDesc desc); /** @@ -242,10 +239,7 @@ public interface DimensionIndexer * @param desc Descriptor object for this dimension within an IncrementalIndex * @return A new object that reads rows from currEntry */ - FloatColumnSelector makeFloatColumnSelector( - IncrementalIndexStorageAdapter.EntryHolder currEntry, - IncrementalIndex.DimensionDesc desc - ); + FloatColumnSelector makeFloatColumnSelector(TimeAndDimsHolder currEntry, IncrementalIndex.DimensionDesc desc); /** @@ -255,10 +249,7 @@ public interface DimensionIndexer * @param desc Descriptor object for this dimension within an IncrementalIndex * @return A new object that reads rows from currEntry */ - DoubleColumnSelector makeDoubleColumnSelector( - IncrementalIndexStorageAdapter.EntryHolder currEntry, - IncrementalIndex.DimensionDesc desc - ); + DoubleColumnSelector makeDoubleColumnSelector(TimeAndDimsHolder currEntry, IncrementalIndex.DimensionDesc desc); /** * Compares the row values for this DimensionIndexer's dimension from a TimeAndDims key. diff --git a/processing/src/main/java/io/druid/segment/DimensionSelectorUtils.java b/processing/src/main/java/io/druid/segment/DimensionSelectorUtils.java index 3bf165becb4..f83be70241b 100644 --- a/processing/src/main/java/io/druid/segment/DimensionSelectorUtils.java +++ b/processing/src/main/java/io/druid/segment/DimensionSelectorUtils.java @@ -21,12 +21,15 @@ package io.druid.segment; import com.google.common.base.Predicate; import com.google.common.base.Predicates; +import com.google.common.base.Strings; import io.druid.java.util.common.IAE; +import io.druid.query.extraction.ExtractionFn; import io.druid.query.filter.ValueMatcher; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.data.IndexedInts; import io.druid.segment.filter.BooleanValueMatcher; +import javax.annotation.Nullable; import java.util.BitSet; import java.util.Objects; @@ -246,4 +249,25 @@ public final class DimensionSelectorUtils } return valueIds; } + + public static DimensionSelector constantSelector(@Nullable final String value) + { + if (Strings.isNullOrEmpty(value)) { + return NullDimensionSelector.instance(); + } else { + return new ConstantDimensionSelector(value); + } + } + + public static DimensionSelector constantSelector( + @Nullable final String value, + @Nullable final ExtractionFn extractionFn + ) + { + if (extractionFn == null) { + return constantSelector(value); + } else { + return constantSelector(extractionFn.apply(value)); + } + } } diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java index 8334dfd1f46..b3b71e30005 100644 --- a/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/DoubleDimensionIndexer.java @@ -26,7 +26,7 @@ import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.column.ValueType; import io.druid.segment.data.Indexed; import io.druid.segment.incremental.IncrementalIndex; -import io.druid.segment.incremental.IncrementalIndexStorageAdapter; +import io.druid.segment.incremental.TimeAndDimsHolder; import javax.annotation.Nullable; import java.util.List; @@ -86,19 +86,16 @@ public class DoubleDimensionIndexer implements DimensionIndexer currentOffset && iter.hasNext()) { iterOffset = iter.next(); } @@ -173,8 +172,7 @@ public final class FilteredOffset extends Offset @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { - inspector.visit("holder", holder); - inspector.visit("offset", holder.getReadableOffset()); + inspector.visit("offset", offset); inspector.visit("iter", iter); } }; @@ -186,7 +184,7 @@ public final class FilteredOffset extends Offset @Override public boolean matches() { - int currentOffset = holder.getReadableOffset().getOffset(); + int currentOffset = offset.getOffset(); while (iterOffset < currentOffset && iter.hasNext()) { iterOffset = iter.next(); } @@ -197,8 +195,7 @@ public final class FilteredOffset extends Offset @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { - inspector.visit("holder", holder); - inspector.visit("offset", holder.getReadableOffset()); + inspector.visit("offset", offset); inspector.visit("iter", iter); } }; diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java index f8844d7e0ed..ce925e04ddf 100644 --- a/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/FloatDimensionIndexer.java @@ -26,7 +26,7 @@ import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.column.ValueType; import io.druid.segment.data.Indexed; import io.druid.segment.incremental.IncrementalIndex; -import io.druid.segment.incremental.IncrementalIndexStorageAdapter; +import io.druid.segment.incremental.TimeAndDimsHolder; import javax.annotation.Nullable; import java.util.List; @@ -87,7 +87,7 @@ public class FloatDimensionIndexer implements DimensionIndexer @Override public DimensionSelector makeDimensionSelector( - DimensionSpec spec, IncrementalIndexStorageAdapter.EntryHolder currEntry, IncrementalIndex.DimensionDesc desc + DimensionSpec spec, TimeAndDimsHolder currEntry, IncrementalIndex.DimensionDesc desc ) { return new LongWrappingDimensionSelector( @@ -98,7 +98,7 @@ public class LongDimensionIndexer implements DimensionIndexer @Override public LongColumnSelector makeLongColumnSelector( - final IncrementalIndexStorageAdapter.EntryHolder currEntry, + final TimeAndDimsHolder currEntry, final IncrementalIndex.DimensionDesc desc ) { @@ -129,7 +129,7 @@ public class LongDimensionIndexer implements DimensionIndexer @Override public FloatColumnSelector makeFloatColumnSelector( - final IncrementalIndexStorageAdapter.EntryHolder currEntry, + final TimeAndDimsHolder currEntry, final IncrementalIndex.DimensionDesc desc ) { @@ -161,7 +161,8 @@ public class LongDimensionIndexer implements DimensionIndexer @Override public DoubleColumnSelector makeDoubleColumnSelector( - IncrementalIndexStorageAdapter.EntryHolder currEntry, IncrementalIndex.DimensionDesc desc + final TimeAndDimsHolder currEntry, + final IncrementalIndex.DimensionDesc desc ) { final int dimIndex = desc.getIndex(); diff --git a/processing/src/main/java/io/druid/segment/Metadata.java b/processing/src/main/java/io/druid/segment/Metadata.java index 3ac724e8f0c..e7cf31354f2 100644 --- a/processing/src/main/java/io/druid/segment/Metadata.java +++ b/processing/src/main/java/io/druid/segment/Metadata.java @@ -21,6 +21,7 @@ package io.druid.segment; import com.fasterxml.jackson.annotation.JsonProperty; import io.druid.data.input.impl.TimestampSpec; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; @@ -33,6 +34,7 @@ import java.util.concurrent.ConcurrentHashMap; /** */ +@PublicApi public class Metadata { // container is used for arbitrary key-value pairs in segment metadata e.g. diff --git a/processing/src/main/java/io/druid/segment/NoFilterOffset.java b/processing/src/main/java/io/druid/segment/NoFilterOffset.java new file mode 100644 index 00000000000..8fdf37d1cdc --- /dev/null +++ b/processing/src/main/java/io/druid/segment/NoFilterOffset.java @@ -0,0 +1,88 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment; + +import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.data.Offset; +import io.druid.segment.data.ReadableOffset; + +public class NoFilterOffset extends Offset +{ + private final int rowCount; + private final boolean descending; + private final int initialOffset; + private int currentOffset; + + NoFilterOffset(int initialOffset, int rowCount, boolean descending) + { + this.initialOffset = initialOffset; + this.currentOffset = initialOffset; + this.rowCount = rowCount; + this.descending = descending; + } + + @Override + public void increment() + { + currentOffset++; + } + + @Override + public boolean withinBounds() + { + return currentOffset < rowCount; + } + + @Override + public void reset() + { + currentOffset = initialOffset; + } + + @Override + public ReadableOffset getBaseReadableOffset() + { + return this; + } + + @Override + public Offset clone() + { + return new NoFilterOffset(currentOffset, rowCount, descending); + } + + @Override + public int getOffset() + { + return descending ? rowCount - currentOffset - 1 : currentOffset; + } + + @Override + public String toString() + { + return currentOffset + "/" + rowCount + (descending ? "(DSC)" : ""); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("descending", descending); + } +} diff --git a/processing/src/main/java/io/druid/segment/ObjectColumnSelector.java b/processing/src/main/java/io/druid/segment/ObjectColumnSelector.java index 6dac1a42d5d..498aa627375 100644 --- a/processing/src/main/java/io/druid/segment/ObjectColumnSelector.java +++ b/processing/src/main/java/io/druid/segment/ObjectColumnSelector.java @@ -19,6 +19,8 @@ package io.druid.segment; +import javax.annotation.Nullable; + public interface ObjectColumnSelector extends ColumnValueSelector { public Class classOfObject(); @@ -28,6 +30,7 @@ public interface ObjectColumnSelector extends ColumnValueSelector * ObjectColumnSelector doesn't extend {@link io.druid.query.monomorphicprocessing.HotLoopCallee} yet. If it will, * this method should be annotated. */ + @Nullable public T get(); /** @@ -39,7 +42,11 @@ public interface ObjectColumnSelector extends ColumnValueSelector @Override default float getFloat() { - return ((Number) get()).floatValue(); + T value = get(); + if (value == null) { + return 0; + } + return ((Number) value).floatValue(); } /** @@ -51,7 +58,11 @@ public interface ObjectColumnSelector extends ColumnValueSelector @Override default double getDouble() { - return ((Number) get()).doubleValue(); + T value = get(); + if (value == null) { + return 0; + } + return ((Number) value).doubleValue(); } /** @@ -63,6 +74,10 @@ public interface ObjectColumnSelector extends ColumnValueSelector @Override default long getLong() { - return ((Number) get()).longValue(); + T value = get(); + if (value == null) { + return 0; + } + return ((Number) value).longValue(); } } diff --git a/processing/src/main/java/io/druid/segment/QueryableIndex.java b/processing/src/main/java/io/druid/segment/QueryableIndex.java index ee50675f4b8..fcfd6b2395a 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndex.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndex.java @@ -28,6 +28,11 @@ import java.io.IOException; import java.util.Map; /** + * Direct interface to memory mapped segments. Not a public API for extensions; site specific queries should be + * using {@link StorageAdapter}. + * + * @see QueryableIndexStorageAdapter for query path adapter + * @see QueryableIndexIndexableAdapter for indexing path adapter */ public interface QueryableIndex extends ColumnSelector, Closeable { diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexColumnSelectorFactory.java b/processing/src/main/java/io/druid/segment/QueryableIndexColumnSelectorFactory.java new file mode 100644 index 00000000000..41f8115284f --- /dev/null +++ b/processing/src/main/java/io/druid/segment/QueryableIndexColumnSelectorFactory.java @@ -0,0 +1,385 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment; + +import com.google.common.collect.Maps; +import io.druid.java.util.common.io.Closer; +import io.druid.query.dimension.DimensionSpec; +import io.druid.query.extraction.ExtractionFn; +import io.druid.segment.column.Column; +import io.druid.segment.column.ColumnCapabilities; +import io.druid.segment.column.ComplexColumn; +import io.druid.segment.column.DictionaryEncodedColumn; +import io.druid.segment.column.GenericColumn; +import io.druid.segment.column.ValueType; +import io.druid.segment.data.IndexedInts; +import io.druid.segment.data.ReadableOffset; + +import javax.annotation.Nullable; +import java.io.Closeable; +import java.util.Map; + +/** + * The basic implementation of {@link ColumnSelectorFactory} over a historical segment (i. e. {@link QueryableIndex}). + * It's counterpart for incremental index is {@link io.druid.segment.incremental.IncrementalIndexColumnSelectorFactory}. + */ +class QueryableIndexColumnSelectorFactory implements ColumnSelectorFactory +{ + private final QueryableIndex index; + private final VirtualColumns virtualColumns; + private final boolean descending; + private final Closer closer; + protected final ReadableOffset offset; + + private final Map dictionaryColumnCache = Maps.newHashMap(); + private final Map genericColumnCache = Maps.newHashMap(); + private final Map objectColumnCache = Maps.newHashMap(); + + QueryableIndexColumnSelectorFactory( + QueryableIndex index, + VirtualColumns virtualColumns, + boolean descending, + Closer closer, + ReadableOffset offset + ) + { + this.index = index; + this.virtualColumns = virtualColumns; + this.descending = descending; + this.closer = closer; + this.offset = offset; + } + + @Override + public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) + { + if (virtualColumns.exists(dimensionSpec.getDimension())) { + return virtualColumns.makeDimensionSelector(dimensionSpec, this); + } + + return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec)); + } + + private DimensionSelector makeDimensionSelectorUndecorated(DimensionSpec dimensionSpec) + { + final String dimension = dimensionSpec.getDimension(); + final ExtractionFn extractionFn = dimensionSpec.getExtractionFn(); + + final Column columnDesc = index.getColumn(dimension); + if (columnDesc == null) { + return DimensionSelectorUtils.constantSelector(null, extractionFn); + } + + if (dimension.equals(Column.TIME_COLUMN_NAME)) { + return new SingleScanTimeDimSelector( + makeLongColumnSelector(dimension), + extractionFn, + descending + ); + } + + if (columnDesc.getCapabilities().getType() == ValueType.LONG) { + return new LongWrappingDimensionSelector(makeLongColumnSelector(dimension), extractionFn); + } + + if (columnDesc.getCapabilities().getType() == ValueType.FLOAT) { + return new FloatWrappingDimensionSelector(makeFloatColumnSelector(dimension), extractionFn); + } + + if (columnDesc.getCapabilities().getType() == ValueType.DOUBLE) { + return new DoubleWrappingDimensionSelector(makeDoubleColumnSelector(dimension), extractionFn); + } + + DictionaryEncodedColumn cachedColumn = dictionaryColumnCache.get(dimension); + if (cachedColumn == null) { + cachedColumn = columnDesc.getDictionaryEncoding(); + closer.register(cachedColumn); + dictionaryColumnCache.put(dimension, cachedColumn); + } + + final DictionaryEncodedColumn column = cachedColumn; + if (column == null) { + return DimensionSelectorUtils.constantSelector(null, extractionFn); + } else { + return column.makeDimensionSelector(offset, extractionFn); + } + } + + @Override + public FloatColumnSelector makeFloatColumnSelector(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.makeFloatColumnSelector(columnName, this); + } + + GenericColumn cachedMetricVals = genericColumnCache.get(columnName); + + if (cachedMetricVals == null) { + Column holder = index.getColumn(columnName); + if (holder != null && ValueType.isNumeric(holder.getCapabilities().getType())) { + cachedMetricVals = holder.getGenericColumn(); + closer.register(cachedMetricVals); + genericColumnCache.put(columnName, cachedMetricVals); + } + } + + if (cachedMetricVals == null) { + return ZeroFloatColumnSelector.instance(); + } + + return cachedMetricVals.makeFloatSingleValueRowSelector(offset); + } + + @Override + public DoubleColumnSelector makeDoubleColumnSelector(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.makeDoubleColumnSelector(columnName, this); + } + + GenericColumn cachedMetricVals = genericColumnCache.get(columnName); + + if (cachedMetricVals == null) { + Column holder = index.getColumn(columnName); + if (holder != null && ValueType.isNumeric(holder.getCapabilities().getType())) { + cachedMetricVals = holder.getGenericColumn(); + closer.register(cachedMetricVals); + genericColumnCache.put(columnName, cachedMetricVals); + } + } + + if (cachedMetricVals == null) { + return ZeroDoubleColumnSelector.instance(); + } + + return cachedMetricVals.makeDoubleSingleValueRowSelector(offset); + } + + @Override + public LongColumnSelector makeLongColumnSelector(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.makeLongColumnSelector(columnName, this); + } + + GenericColumn cachedMetricVals = genericColumnCache.get(columnName); + + if (cachedMetricVals == null) { + Column holder = index.getColumn(columnName); + if (holder != null && ValueType.isNumeric(holder.getCapabilities().getType())) { + cachedMetricVals = holder.getGenericColumn(); + closer.register(cachedMetricVals); + genericColumnCache.put(columnName, cachedMetricVals); + } + } + + if (cachedMetricVals == null) { + return ZeroLongColumnSelector.instance(); + } + + return cachedMetricVals.makeLongSingleValueRowSelector(offset); + } + + @Nullable + @Override + public ObjectColumnSelector makeObjectColumnSelector(String column) + { + if (virtualColumns.exists(column)) { + return virtualColumns.makeObjectColumnSelector(column, this); + } + + Object cachedColumnVals = objectColumnCache.get(column); + + if (cachedColumnVals == null) { + Column holder = index.getColumn(column); + + if (holder != null) { + final ColumnCapabilities capabilities = holder.getCapabilities(); + + if (capabilities.isDictionaryEncoded()) { + cachedColumnVals = holder.getDictionaryEncoding(); + } else if (capabilities.getType() == ValueType.COMPLEX) { + cachedColumnVals = holder.getComplexColumn(); + } else { + cachedColumnVals = holder.getGenericColumn(); + } + } + + if (cachedColumnVals != null) { + closer.register((Closeable) cachedColumnVals); + objectColumnCache.put(column, cachedColumnVals); + } + } + + if (cachedColumnVals == null) { + return null; + } + + if (cachedColumnVals instanceof GenericColumn) { + final GenericColumn columnVals = (GenericColumn) cachedColumnVals; + final ValueType type = columnVals.getType(); + + if (columnVals.hasMultipleValues()) { + throw new UnsupportedOperationException( + "makeObjectColumnSelector does not support multi-value GenericColumns" + ); + } + + if (type == ValueType.FLOAT) { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return Float.class; + } + + @Override + public Float get() + { + return columnVals.getFloatSingleValueRow(offset.getOffset()); + } + }; + } + if (type == ValueType.DOUBLE) { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return Double.class; + } + + @Override + public Double get() + { + return columnVals.getDoubleSingleValueRow(offset.getOffset()); + } + }; + } + if (type == ValueType.LONG) { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return Long.class; + } + + @Override + public Long get() + { + return columnVals.getLongSingleValueRow(offset.getOffset()); + } + }; + } + if (type == ValueType.STRING) { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return String.class; + } + + @Override + public String get() + { + return columnVals.getStringSingleValueRow(offset.getOffset()); + } + }; + } + } + + if (cachedColumnVals instanceof DictionaryEncodedColumn) { + final DictionaryEncodedColumn columnVals = (DictionaryEncodedColumn) cachedColumnVals; + if (columnVals.hasMultipleValues()) { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return Object.class; + } + + @Override + @Nullable + public Object get() + { + final IndexedInts multiValueRow = columnVals.getMultiValueRow(offset.getOffset()); + if (multiValueRow.size() == 0) { + return null; + } else if (multiValueRow.size() == 1) { + return columnVals.lookupName(multiValueRow.get(0)); + } else { + final String[] strings = new String[multiValueRow.size()]; + for (int i = 0; i < multiValueRow.size(); i++) { + strings[i] = columnVals.lookupName(multiValueRow.get(i)); + } + return strings; + } + } + }; + } else { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return String.class; + } + + @Override + public String get() + { + return columnVals.lookupName(columnVals.getSingleValueRow(offset.getOffset())); + } + }; + } + } + + final ComplexColumn columnVals = (ComplexColumn) cachedColumnVals; + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return columnVals.getClazz(); + } + + @Override + public Object get() + { + return columnVals.getRowValue(offset.getOffset()); + } + }; + } + + @Override + @Nullable + public ColumnCapabilities getColumnCapabilities(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.getColumnCapabilities(columnName); + } + + return QueryableIndexStorageAdapter.getColumnCapabilites(index, columnName); + } +} diff --git a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java index 1a5eb25e038..09640cfa57d 100644 --- a/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/QueryableIndexStorageAdapter.java @@ -22,9 +22,9 @@ package io.druid.segment; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import io.druid.collections.bitmap.ImmutableBitmap; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -33,33 +33,25 @@ import io.druid.query.BaseQuery; import io.druid.query.BitmapResultFactory; import io.druid.query.DefaultBitmapResultFactory; import io.druid.query.QueryMetrics; -import io.druid.query.dimension.DimensionSpec; -import io.druid.query.extraction.ExtractionFn; import io.druid.query.filter.Filter; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.column.BitmapIndex; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ComplexColumn; -import io.druid.segment.column.DictionaryEncodedColumn; import io.druid.segment.column.GenericColumn; -import io.druid.segment.column.ValueType; import io.druid.segment.data.Indexed; -import io.druid.segment.data.IndexedInts; import io.druid.segment.data.Offset; import io.druid.segment.data.ReadableOffset; import io.druid.segment.filter.AndFilter; import io.druid.segment.historical.HistoricalCursor; -import io.druid.segment.historical.HistoricalFloatColumnSelector; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; -import java.io.Closeable; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Objects; /** @@ -68,9 +60,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter { private final QueryableIndex index; - public QueryableIndexStorageAdapter( - QueryableIndex index - ) + public QueryableIndexStorageAdapter(QueryableIndex index) { this.index = index; } @@ -126,7 +116,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public DateTime getMinTime() { try (final GenericColumn column = index.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn()) { - return new DateTime(column.getLongSingleValueRow(0)); + return DateTimes.utc(column.getLongSingleValueRow(0)); } } @@ -134,11 +124,12 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public DateTime getMaxTime() { try (final GenericColumn column = index.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn()) { - return new DateTime(column.getLongSingleValueRow(column.length() - 1)); + return DateTimes.utc(column.getLongSingleValueRow(column.length() - 1)); } } @Override + @Nullable public Comparable getMinValue(String dimension) { Column column = index.getColumn(dimension); @@ -150,6 +141,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter } @Override + @Nullable public Comparable getMaxValue(String dimension) { Column column = index.getColumn(dimension); @@ -167,17 +159,12 @@ public class QueryableIndexStorageAdapter implements StorageAdapter } @Override + @Nullable public ColumnCapabilities getColumnCapabilities(String column) { return getColumnCapabilites(index, column); } - @Override - public Map getDimensionHandlers() - { - return index.getDimensionHandlers(); - } - @Override public String getColumnTypeName(String columnName) { @@ -204,25 +191,18 @@ public class QueryableIndexStorageAdapter implements StorageAdapter @Nullable QueryMetrics queryMetrics ) { - Interval actualInterval = interval; - long minDataTimestamp = getMinTime().getMillis(); - long maxDataTimestamp = getMaxTime().getMillis(); - final Interval dataInterval = new Interval( - minDataTimestamp, - gran.bucketEnd(getMaxTime()).getMillis() - ); + DateTime minTime = getMinTime(); + long minDataTimestamp = minTime.getMillis(); + DateTime maxTime = getMaxTime(); + long maxDataTimestamp = maxTime.getMillis(); + final Interval dataInterval = new Interval(minTime, gran.bucketEnd(maxTime)); - if (!actualInterval.overlaps(dataInterval)) { + if (!interval.overlaps(dataInterval)) { return Sequences.empty(); } - if (actualInterval.getStart().isBefore(dataInterval.getStart())) { - actualInterval = actualInterval.withStart(dataInterval.getStart()); - } - if (actualInterval.getEnd().isAfter(dataInterval.getEnd())) { - actualInterval = actualInterval.withEnd(dataInterval.getEnd()); - } + final Interval actualInterval = interval.overlap(dataInterval); final ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector( index.getBitmapFactoryForDimensions(), @@ -330,7 +310,8 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ); } - private static ColumnCapabilities getColumnCapabilites(ColumnSelector index, String columnName) + @Nullable + static ColumnCapabilities getColumnCapabilites(ColumnSelector index, String columnName) { Column columnObj = index.getColumn(columnName); if (columnObj == null) { @@ -381,10 +362,6 @@ public class QueryableIndexStorageAdapter implements StorageAdapter { final Offset baseOffset = offset.clone(); - final Map dictionaryColumnCache = Maps.newHashMap(); - final Map genericColumnCache = Maps.newHashMap(); - final Map objectColumnCache = Maps.newHashMap(); - final GenericColumn timestamps = index.getColumn(Column.TIME_COLUMN_NAME).getGenericColumn(); final Closer closer = Closer.create(); @@ -404,7 +381,10 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public Cursor apply(final Interval inputInterval) { final long timeStart = Math.max(interval.getStartMillis(), inputInterval.getStartMillis()); - final long timeEnd = Math.min(interval.getEndMillis(), gran.increment(inputInterval.getStart()).getMillis()); + final long timeEnd = Math.min( + interval.getEndMillis(), + gran.increment(inputInterval.getStart()).getMillis() + ); if (descending) { for (; baseOffset.withinBounds(); baseOffset.increment()) { @@ -435,488 +415,27 @@ public class QueryableIndexStorageAdapter implements StorageAdapter ); - final Offset initOffset = offset.clone(); + final Offset baseCursorOffset = offset.clone(); + final ColumnSelectorFactory columnSelectorFactory = new QueryableIndexColumnSelectorFactory( + index, + virtualColumns, + descending, + closer, + baseCursorOffset.getBaseReadableOffset() + ); final DateTime myBucket = gran.toDateTime(inputInterval.getStartMillis()); - abstract class QueryableIndexBaseCursor implements HistoricalCursor - { - OffsetType cursorOffset; - - @Override - public OffsetType getOffset() - { - return cursorOffset; - } - - @Override - public ReadableOffset getReadableOffset() - { - return cursorOffset; - } - - @Override - public DateTime getTime() - { - return myBucket; - } - - @Override - public void advanceTo(int offset) - { - int count = 0; - while (count < offset && !isDone()) { - advance(); - count++; - } - } - - @Override - public boolean isDone() - { - return !cursorOffset.withinBounds(); - } - - @Override - public boolean isDoneOrInterrupted() - { - return isDone() || Thread.currentThread().isInterrupted(); - } - - @Override - public DimensionSelector makeDimensionSelector( - DimensionSpec dimensionSpec - ) - { - if (virtualColumns.exists(dimensionSpec.getDimension())) { - return virtualColumns.makeDimensionSelector(dimensionSpec, this); - } - - return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec)); - } - - private DimensionSelector makeDimensionSelectorUndecorated( - DimensionSpec dimensionSpec - ) - { - final String dimension = dimensionSpec.getDimension(); - final ExtractionFn extractionFn = dimensionSpec.getExtractionFn(); - - final Column columnDesc = index.getColumn(dimension); - if (columnDesc == null) { - return NullDimensionSelector.instance(); - } - - if (dimension.equals(Column.TIME_COLUMN_NAME)) { - return new SingleScanTimeDimSelector( - makeLongColumnSelector(dimension), - extractionFn, - descending - ); - } - - if (columnDesc.getCapabilities().getType() == ValueType.LONG) { - return new LongWrappingDimensionSelector(makeLongColumnSelector(dimension), extractionFn); - } - - if (columnDesc.getCapabilities().getType() == ValueType.FLOAT) { - return new FloatWrappingDimensionSelector(makeFloatColumnSelector(dimension), extractionFn); - } - - if (columnDesc.getCapabilities().getType() == ValueType.DOUBLE) { - return new DoubleWrappingDimensionSelector(makeDoubleColumnSelector(dimension), extractionFn); - } - DictionaryEncodedColumn cachedColumn = dictionaryColumnCache.get(dimension); - if (cachedColumn == null) { - cachedColumn = columnDesc.getDictionaryEncoding(); - closer.register(cachedColumn); - dictionaryColumnCache.put(dimension, cachedColumn); - } - - final DictionaryEncodedColumn column = cachedColumn; - if (column == null) { - return NullDimensionSelector.instance(); - } else { - return column.makeDimensionSelector(this, extractionFn); - } - } - - @Override - public FloatColumnSelector makeFloatColumnSelector(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.makeFloatColumnSelector(columnName, this); - } - - GenericColumn cachedMetricVals = genericColumnCache.get(columnName); - - if (cachedMetricVals == null) { - Column holder = index.getColumn(columnName); - if (holder != null && ValueType.isNumeric(holder.getCapabilities().getType())) { - cachedMetricVals = holder.getGenericColumn(); - closer.register(cachedMetricVals); - genericColumnCache.put(columnName, cachedMetricVals); - } - } - - if (cachedMetricVals == null) { - return ZeroFloatColumnSelector.instance(); - } - - final GenericColumn metricVals = cachedMetricVals; - return new HistoricalFloatColumnSelector() - { - @Override - public float getFloat() - { - return metricVals.getFloatSingleValueRow(getReadableOffset().getOffset()); - } - - @Override - public float get(int offset) - { - return metricVals.getFloatSingleValueRow(offset); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("metricVals", metricVals); - inspector.visit("cursorOffset", getReadableOffset()); - } - }; - } - - @Override - public DoubleColumnSelector makeDoubleColumnSelector(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.makeDoubleColumnSelector(columnName, this); - } - - GenericColumn cachedMetricVals = genericColumnCache.get(columnName); - - if (cachedMetricVals == null) { - Column holder = index.getColumn(columnName); - if (holder != null && ValueType.isNumeric(holder.getCapabilities().getType())) { - cachedMetricVals = holder.getGenericColumn(); - closer.register(cachedMetricVals); - genericColumnCache.put(columnName, cachedMetricVals); - } - } - - if (cachedMetricVals == null) { - return ZeroDoubleColumnSelector.instance(); - } - - final GenericColumn metricVals = cachedMetricVals; - return new DoubleColumnSelector() - { - @Override - public double getDouble() - { - return metricVals.getDoubleSingleValueRow(getReadableOffset().getOffset()); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("metricVals", metricVals); - inspector.visit("cursorOffset", getReadableOffset()); - } - }; - } - - @Override - public LongColumnSelector makeLongColumnSelector(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.makeLongColumnSelector(columnName, this); - } - - GenericColumn cachedMetricVals = genericColumnCache.get(columnName); - - if (cachedMetricVals == null) { - Column holder = index.getColumn(columnName); - if (holder != null && ValueType.isNumeric(holder.getCapabilities().getType())) { - cachedMetricVals = holder.getGenericColumn(); - closer.register(cachedMetricVals); - genericColumnCache.put(columnName, cachedMetricVals); - } - } - - if (cachedMetricVals == null) { - return ZeroLongColumnSelector.instance(); - } - - final GenericColumn metricVals = cachedMetricVals; - return new LongColumnSelector() - { - @Override - public long getLong() - { - return metricVals.getLongSingleValueRow(getReadableOffset().getOffset()); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("metricVals", metricVals); - inspector.visit("cursorOffset", getReadableOffset()); - } - }; - } - - - @Override - public ObjectColumnSelector makeObjectColumnSelector(String column) - { - if (virtualColumns.exists(column)) { - return virtualColumns.makeObjectColumnSelector(column, this); - } - - Object cachedColumnVals = objectColumnCache.get(column); - - if (cachedColumnVals == null) { - Column holder = index.getColumn(column); - - if (holder != null) { - final ColumnCapabilities capabilities = holder.getCapabilities(); - - if (capabilities.isDictionaryEncoded()) { - cachedColumnVals = holder.getDictionaryEncoding(); - } else if (capabilities.getType() == ValueType.COMPLEX) { - cachedColumnVals = holder.getComplexColumn(); - } else { - cachedColumnVals = holder.getGenericColumn(); - } - } - - if (cachedColumnVals != null) { - closer.register((Closeable) cachedColumnVals); - objectColumnCache.put(column, cachedColumnVals); - } - } - - if (cachedColumnVals == null) { - return null; - } - - if (cachedColumnVals instanceof GenericColumn) { - final GenericColumn columnVals = (GenericColumn) cachedColumnVals; - final ValueType type = columnVals.getType(); - - if (columnVals.hasMultipleValues()) { - throw new UnsupportedOperationException( - "makeObjectColumnSelector does not support multi-value GenericColumns" - ); - } - - if (type == ValueType.FLOAT) { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return Float.class; - } - - @Override - public Float get() - { - return columnVals.getFloatSingleValueRow(getReadableOffset().getOffset()); - } - }; - } - if (type == ValueType.DOUBLE) { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return Double.class; - } - - @Override - public Double get() - { - return columnVals.getDoubleSingleValueRow(getReadableOffset().getOffset()); - } - }; - } - if (type == ValueType.LONG) { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return Long.class; - } - - @Override - public Long get() - { - return columnVals.getLongSingleValueRow(getReadableOffset().getOffset()); - } - }; - } - if (type == ValueType.STRING) { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return String.class; - } - - @Override - public String get() - { - return columnVals.getStringSingleValueRow(getReadableOffset().getOffset()); - } - }; - } - } - - if (cachedColumnVals instanceof DictionaryEncodedColumn) { - final DictionaryEncodedColumn columnVals = (DictionaryEncodedColumn) cachedColumnVals; - if (columnVals.hasMultipleValues()) { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return Object.class; - } - - @Override - public Object get() - { - int currentOffset = getReadableOffset().getOffset(); - final IndexedInts multiValueRow = columnVals.getMultiValueRow(currentOffset); - if (multiValueRow.size() == 0) { - return null; - } else if (multiValueRow.size() == 1) { - return columnVals.lookupName(multiValueRow.get(0)); - } else { - final String[] strings = new String[multiValueRow.size()]; - for (int i = 0; i < multiValueRow.size(); i++) { - strings[i] = columnVals.lookupName(multiValueRow.get(i)); - } - return strings; - } - } - }; - } else { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return String.class; - } - - @Override - public String get() - { - int currentOffset = getReadableOffset().getOffset(); - return columnVals.lookupName(columnVals.getSingleValueRow(currentOffset)); - } - }; - } - } - - final ComplexColumn columnVals = (ComplexColumn) cachedColumnVals; - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return columnVals.getClazz(); - } - - @Override - public Object get() - { - return columnVals.getRowValue(getReadableOffset().getOffset()); - } - }; - } - - @Override - public ColumnCapabilities getColumnCapabilities(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.getColumnCapabilities(columnName); - } - - return getColumnCapabilites(index, columnName); - } - } - if (postFilter == null) { - return new QueryableIndexBaseCursor() - { - { - reset(); - } - - @Override - public void advance() - { - BaseQuery.checkInterrupted(); - cursorOffset.increment(); - } - - @Override - public void advanceUninterruptibly() - { - cursorOffset.increment(); - } - - @Override - public void reset() - { - cursorOffset = initOffset.clone(); - } - }; + return new QueryableIndexCursor(baseCursorOffset, columnSelectorFactory, myBucket); } else { - return new QueryableIndexBaseCursor() - { - private Offset baseOffset; - - { - cursorOffset = new FilteredOffset(this, descending, postFilter, bitmapIndexSelector); - reset(); - } - - @Override - public ReadableOffset getReadableOffset() - { - return baseOffset; - } - - @Override - public void advance() - { - BaseQuery.checkInterrupted(); - cursorOffset.incrementInterruptibly(); - } - - @Override - public void advanceUninterruptibly() - { - if (!Thread.currentThread().isInterrupted()) { - cursorOffset.increment(); - } - } - - @Override - public void reset() - { - baseOffset = initOffset.clone(); - cursorOffset.reset(baseOffset); - } - }; + FilteredOffset filteredOffset = new FilteredOffset( + baseCursorOffset, + columnSelectorFactory, + descending, + postFilter, + bitmapIndexSelector + ); + return new QueryableIndexCursor(filteredOffset, columnSelectorFactory, myBucket); } } @@ -927,14 +446,92 @@ public class QueryableIndexStorageAdapter implements StorageAdapter } } + private static class QueryableIndexCursor implements HistoricalCursor + { + private final Offset cursorOffset; + private final ColumnSelectorFactory columnSelectorFactory; + private final DateTime bucketStart; + + QueryableIndexCursor(Offset cursorOffset, ColumnSelectorFactory columnSelectorFactory, DateTime bucketStart) + { + this.cursorOffset = cursorOffset; + this.columnSelectorFactory = columnSelectorFactory; + this.bucketStart = bucketStart; + } + + @Override + public Offset getOffset() + { + return cursorOffset; + } + + @Override + public ColumnSelectorFactory getColumnSelectorFactory() + { + return columnSelectorFactory; + } + + @Override + public DateTime getTime() + { + return bucketStart; + } + + @Override + public void advance() + { + cursorOffset.increment(); + // Must call BaseQuery.checkInterrupted() after cursorOffset.increment(), not before, because + // FilteredOffset.increment() is a potentially long, not an "instant" operation (unlike to all other subclasses + // of Offset) and it returns early on interruption, leaving itself in an illegal state. We should not let + // aggregators, etc. access this illegal state and throw a QueryInterruptedException by calling + // BaseQuery.checkInterrupted(). + BaseQuery.checkInterrupted(); + } + + @Override + public void advanceUninterruptibly() + { + cursorOffset.increment(); + } + + @Override + public void advanceTo(int offset) + { + int count = 0; + while (count < offset && !isDone()) { + advance(); + count++; + } + } + + @Override + public boolean isDone() + { + return !cursorOffset.withinBounds(); + } + + @Override + public boolean isDoneOrInterrupted() + { + return isDone() || Thread.currentThread().isInterrupted(); + } + + @Override + public void reset() + { + cursorOffset.reset(); + } + } + public abstract static class TimestampCheckingOffset extends Offset { - protected final Offset baseOffset; - protected final GenericColumn timestamps; - protected final long timeLimit; - protected final boolean allWithinThreshold; + final Offset baseOffset; + final GenericColumn timestamps; + final long timeLimit; + final boolean allWithinThreshold; - public TimestampCheckingOffset( + TimestampCheckingOffset( Offset baseOffset, GenericColumn timestamps, long timeLimit, @@ -966,6 +563,18 @@ public class QueryableIndexStorageAdapter implements StorageAdapter return timeInRange(timestamps.getLongSingleValueRow(baseOffset.getOffset())); } + @Override + public void reset() + { + baseOffset.reset(); + } + + @Override + public ReadableOffset getBaseReadableOffset() + { + return baseOffset.getBaseReadableOffset(); + } + protected abstract boolean timeInRange(long current); @Override @@ -974,6 +583,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter baseOffset.increment(); } + @SuppressWarnings("MethodDoesntCallSuperMethod") @Override public Offset clone() { @@ -991,7 +601,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public static class AscendingTimestampCheckingOffset extends TimestampCheckingOffset { - public AscendingTimestampCheckingOffset( + AscendingTimestampCheckingOffset( Offset baseOffset, GenericColumn timestamps, long timeLimit, @@ -1014,6 +624,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter "<" + timeLimit + "::" + baseOffset; } + @SuppressWarnings("MethodDoesntCallSuperMethod") @Override public Offset clone() { @@ -1023,7 +634,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter public static class DescendingTimestampCheckingOffset extends TimestampCheckingOffset { - public DescendingTimestampCheckingOffset( + DescendingTimestampCheckingOffset( Offset baseOffset, GenericColumn timestamps, long timeLimit, @@ -1047,6 +658,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter "::" + baseOffset; } + @SuppressWarnings("MethodDoesntCallSuperMethod") @Override public Offset clone() { @@ -1054,56 +666,6 @@ public class QueryableIndexStorageAdapter implements StorageAdapter } } - public static class NoFilterOffset extends Offset - { - private final int rowCount; - private final boolean descending; - private int currentOffset; - - NoFilterOffset(int currentOffset, int rowCount, boolean descending) - { - this.currentOffset = currentOffset; - this.rowCount = rowCount; - this.descending = descending; - } - - @Override - public void increment() - { - currentOffset++; - } - - @Override - public boolean withinBounds() - { - return currentOffset < rowCount; - } - - @Override - public Offset clone() - { - return new NoFilterOffset(currentOffset, rowCount, descending); - } - - @Override - public int getOffset() - { - return descending ? rowCount - currentOffset - 1 : currentOffset; - } - - @Override - public String toString() - { - return currentOffset + "/" + rowCount + (descending ? "(DSC)" : ""); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("descending", descending); - } - } - @Override public Metadata getMetadata() { diff --git a/processing/src/main/java/io/druid/segment/Rowboat.java b/processing/src/main/java/io/druid/segment/Rowboat.java index 6d66669f75d..1ef2dbf3aa1 100644 --- a/processing/src/main/java/io/druid/segment/Rowboat.java +++ b/processing/src/main/java/io/druid/segment/Rowboat.java @@ -21,10 +21,10 @@ package io.druid.segment; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; +import io.druid.java.util.common.DateTimes; import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; import it.unimi.dsi.fastutil.ints.IntRBTreeSet; import it.unimi.dsi.fastutil.ints.IntSortedSet; -import org.joda.time.DateTime; import java.util.Arrays; @@ -132,7 +132,7 @@ public class Rowboat implements Comparable public String toString() { return "Rowboat{" + - "timestamp=" + new DateTime(timestamp).toString() + + "timestamp=" + DateTimes.utc(timestamp) + ", dims=" + Arrays.deepToString(dims) + ", metrics=" + Arrays.toString(metrics) + ", comprisedRows=" + comprisedRows + diff --git a/processing/src/main/java/io/druid/segment/Segment.java b/processing/src/main/java/io/druid/segment/Segment.java index aef3dc86513..4c38a7f36b4 100644 --- a/processing/src/main/java/io/druid/segment/Segment.java +++ b/processing/src/main/java/io/druid/segment/Segment.java @@ -19,12 +19,14 @@ package io.druid.segment; +import io.druid.guice.annotations.PublicApi; import org.joda.time.Interval; import java.io.Closeable; /** */ +@PublicApi public interface Segment extends Closeable { public String getIdentifier(); diff --git a/processing/src/main/java/io/druid/segment/StorageAdapter.java b/processing/src/main/java/io/druid/segment/StorageAdapter.java index 82b181a9bed..24f12c43760 100644 --- a/processing/src/main/java/io/druid/segment/StorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/StorageAdapter.java @@ -19,16 +19,17 @@ package io.druid.segment; +import io.druid.guice.annotations.PublicApi; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.data.Indexed; import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; -import java.util.Map; /** */ +@PublicApi public interface StorageAdapter extends CursorFactory { public String getSegmentIdentifier(); @@ -47,7 +48,9 @@ public interface StorageAdapter extends CursorFactory public int getDimensionCardinality(String column); public DateTime getMinTime(); public DateTime getMaxTime(); + @Nullable public Comparable getMinValue(String column); + @Nullable public Comparable getMaxValue(String column); public Capabilities getCapabilities(); @@ -63,8 +66,6 @@ public interface StorageAdapter extends CursorFactory @Nullable public ColumnCapabilities getColumnCapabilities(String column); - public Map getDimensionHandlers(); - /** * Like {@link ColumnCapabilities#getType()}, but may return a more descriptive string for complex columns. * @param column column name diff --git a/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java b/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java index 30a14dcd5bd..d7d81d3ef54 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionIndexer.java @@ -23,7 +23,6 @@ import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Strings; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import io.druid.collections.bitmap.BitmapFactory; import io.druid.collections.bitmap.MutableBitmap; @@ -41,9 +40,11 @@ import io.druid.segment.data.IndexedInts; import io.druid.segment.data.IndexedIterable; import io.druid.segment.filter.BooleanValueMatcher; import io.druid.segment.incremental.IncrementalIndex; -import io.druid.segment.incremental.IncrementalIndexStorageAdapter; +import io.druid.segment.incremental.TimeAndDimsHolder; import it.unimi.dsi.fastutil.ints.IntArrays; import it.unimi.dsi.fastutil.ints.IntIterator; +import it.unimi.dsi.fastutil.objects.Object2IntMap; +import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2IntRBTreeMap; import it.unimi.dsi.fastutil.objects.Object2IntSortedMap; @@ -53,7 +54,6 @@ import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.function.Function; public class StringDimensionIndexer implements DimensionIndexer @@ -65,7 +65,7 @@ public class StringDimensionIndexer implements DimensionIndexer valueToId = Maps.newHashMap(); + private final Object2IntMap valueToId = new Object2IntOpenHashMap<>(); private final List idToValue = Lists.newArrayList(); private final Object lock; @@ -73,13 +73,13 @@ public class StringDimensionIndexer implements DimensionIndexer= 0) { return prev; } final int index = size(); @@ -370,7 +370,7 @@ public class StringDimensionIndexer implements DimensionIndexer @Override public int skip(int n) { - return IntIteratorUtils.skip(this, n); + return IntIteratorUtils.skip(baseIterator, n); } }; } diff --git a/processing/src/main/java/io/druid/segment/VirtualColumn.java b/processing/src/main/java/io/druid/segment/VirtualColumn.java index 4c49d0a555c..cbdc7c3fde5 100644 --- a/processing/src/main/java/io/druid/segment/VirtualColumn.java +++ b/processing/src/main/java/io/druid/segment/VirtualColumn.java @@ -26,7 +26,6 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.virtual.ExpressionVirtualColumn; -import javax.annotation.Nullable; import java.util.List; /** @@ -69,9 +68,8 @@ public interface VirtualColumn extends Cacheable * @param dimensionSpec the dimensionSpec this column was referenced with * @param factory column selector factory * - * @return the selector, or null if we can't make a selector + * @return the selector, must not be null */ - @Nullable DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec, ColumnSelectorFactory factory); /** @@ -81,9 +79,8 @@ public interface VirtualColumn extends Cacheable * @param columnName the name this virtual column was referenced with * @param factory column selector factory * - * @return the selector, or null if we can't make a selector + * @return the selector, must not be null */ - @Nullable FloatColumnSelector makeFloatColumnSelector(String columnName, ColumnSelectorFactory factory); /** @@ -93,9 +90,8 @@ public interface VirtualColumn extends Cacheable * @param columnName the name this virtual column was referenced with * @param factory column selector factory * - * @return the selector, or null if we can't make a selector + * @return the selector, must not be null */ - @Nullable LongColumnSelector makeLongColumnSelector(String columnName, ColumnSelectorFactory factory); /** @@ -105,9 +101,8 @@ public interface VirtualColumn extends Cacheable * @param columnName the name this virtual column was referenced with * @param factory column selector factory * - * @return the selector, or null if we can't make a selector + * @return the selector, must not be null */ - @Nullable DoubleColumnSelector makeDoubleColumnSelector(String columnName, ColumnSelectorFactory factory); /** diff --git a/processing/src/main/java/io/druid/segment/VirtualColumns.java b/processing/src/main/java/io/druid/segment/VirtualColumns.java index 964ce4544df..ef7003455a7 100644 --- a/processing/src/main/java/io/druid/segment/VirtualColumns.java +++ b/processing/src/main/java/io/druid/segment/VirtualColumns.java @@ -127,6 +127,13 @@ public class VirtualColumns implements Cacheable private final Map withDotSupport; private final Map withoutDotSupport; + /** + * Returns true if a virtual column exists with a particular columnName. + * + * @param columnName the column name + * + * @return true or false + */ public boolean exists(String columnName) { return getVirtualColumn(columnName) != null; @@ -142,11 +149,21 @@ public class VirtualColumns implements Cacheable return withDotSupport.get(baseColumnName); } + /** + * Create an object selector. + * + * @param columnName column mame + * @param factory base column selector factory + * + * @return selector + * + * @throws IllegalArgumentException if the virtual column does not exist (see {@link #exists(String)} + */ public ObjectColumnSelector makeObjectColumnSelector(String columnName, ColumnSelectorFactory factory) { final VirtualColumn virtualColumn = getVirtualColumn(columnName); if (virtualColumn == null) { - return null; + throw new IAE("No such virtual column[%s]", columnName); } else { return Preconditions.checkNotNull( virtualColumn.makeObjectColumnSelector(columnName, factory), @@ -157,39 +174,82 @@ public class VirtualColumns implements Cacheable } } + /** + * Create a dimension (string) selector. + * + * @param dimensionSpec the dimensionSpec for this selector + * @param factory base column selector factory + * + * @return selector + * + * @throws IllegalArgumentException if the virtual column does not exist (see {@link #exists(String)} + */ public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec, ColumnSelectorFactory factory) { final VirtualColumn virtualColumn = getVirtualColumn(dimensionSpec.getDimension()); if (virtualColumn == null) { - return dimensionSpec.decorate(NullDimensionSelector.instance()); + throw new IAE("No such virtual column[%s]", dimensionSpec.getDimension()); } else { final DimensionSelector selector = virtualColumn.makeDimensionSelector(dimensionSpec, factory); - return selector == null ? dimensionSpec.decorate(NullDimensionSelector.instance()) : selector; + Preconditions.checkNotNull(selector, "selector"); + return selector; } } + /** + * Create a float selector. + * + * @param columnName column mame + * @param factory base column selector factory + * + * @return selector + * + * @throws IllegalArgumentException if the virtual column does not exist (see {@link #exists(String)} + */ public FloatColumnSelector makeFloatColumnSelector(String columnName, ColumnSelectorFactory factory) { final VirtualColumn virtualColumn = getVirtualColumn(columnName); if (virtualColumn == null) { - return ZeroFloatColumnSelector.instance(); + throw new IAE("No such virtual column[%s]", columnName); } else { final FloatColumnSelector selector = virtualColumn.makeFloatColumnSelector(columnName, factory); - return selector == null ? ZeroFloatColumnSelector.instance() : selector; + Preconditions.checkNotNull(selector, "selector"); + return selector; } } + /** + * Create a long selector. + * + * @param columnName column mame + * @param factory base column selector factory + * + * @return selector + * + * @throws IllegalArgumentException if the virtual column does not exist (see {@link #exists(String)} + */ public LongColumnSelector makeLongColumnSelector(String columnName, ColumnSelectorFactory factory) { final VirtualColumn virtualColumn = getVirtualColumn(columnName); if (virtualColumn == null) { - return ZeroLongColumnSelector.instance(); + throw new IAE("No such virtual column[%s]", columnName); } else { final LongColumnSelector selector = virtualColumn.makeLongColumnSelector(columnName, factory); - return selector == null ? ZeroLongColumnSelector.instance() : selector; + Preconditions.checkNotNull(selector, "selector"); + return selector; } } + /** + * Create a double selector. + * + * @param columnName column mame + * @param factory base column selector factory + * + * @return selector + * + * @throws IllegalArgumentException if the virtual column does not exist (see {@link #exists(String)} + */ public DoubleColumnSelector makeDoubleColumnSelector( String columnName, ColumnSelectorFactory factory @@ -197,10 +257,11 @@ public class VirtualColumns implements Cacheable { final VirtualColumn virtualColumn = getVirtualColumn(columnName); if (virtualColumn == null) { - return ZeroDoubleColumnSelector.instance(); + throw new IAE("No such virtual column[%s]", columnName); } else { final DoubleColumnSelector selector = virtualColumn.makeDoubleColumnSelector(columnName, factory); - return selector == null ? ZeroDoubleColumnSelector.instance() : selector; + Preconditions.checkNotNull(selector, "selector"); + return selector; } } diff --git a/processing/src/main/java/io/druid/segment/column/DictionaryEncodedColumn.java b/processing/src/main/java/io/druid/segment/column/DictionaryEncodedColumn.java index 0a6cd57c8c2..6fed80fea70 100644 --- a/processing/src/main/java/io/druid/segment/column/DictionaryEncodedColumn.java +++ b/processing/src/main/java/io/druid/segment/column/DictionaryEncodedColumn.java @@ -22,7 +22,7 @@ package io.druid.segment.column; import io.druid.query.extraction.ExtractionFn; import io.druid.segment.DimensionSelector; import io.druid.segment.data.IndexedInts; -import io.druid.segment.historical.OffsetHolder; +import io.druid.segment.data.ReadableOffset; import java.io.Closeable; @@ -38,5 +38,5 @@ public interface DictionaryEncodedColumn extends public int lookupId(ActualType name); public int getCardinality(); - DimensionSelector makeDimensionSelector(OffsetHolder offsetHolder, ExtractionFn extractionFn); + DimensionSelector makeDimensionSelector(ReadableOffset offset, ExtractionFn extractionFn); } diff --git a/processing/src/main/java/io/druid/segment/column/GenericColumn.java b/processing/src/main/java/io/druid/segment/column/GenericColumn.java index 6c1c1f7618e..f19e3fd1d17 100644 --- a/processing/src/main/java/io/druid/segment/column/GenericColumn.java +++ b/processing/src/main/java/io/druid/segment/column/GenericColumn.java @@ -21,9 +21,10 @@ package io.druid.segment.column; import io.druid.query.monomorphicprocessing.CalledFromHotLoop; import io.druid.query.monomorphicprocessing.HotLoopCallee; -import io.druid.segment.data.Indexed; -import io.druid.segment.data.IndexedFloats; -import io.druid.segment.data.IndexedLongs; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; +import io.druid.segment.data.ReadableOffset; +import io.druid.segment.historical.HistoricalFloatColumnSelector; import java.io.Closeable; @@ -37,18 +38,18 @@ public interface GenericColumn extends HotLoopCallee, Closeable @CalledFromHotLoop public String getStringSingleValueRow(int rowNum); + @CalledFromHotLoop - public Indexed getStringMultiValueRow(int rowNum); + float getFloatSingleValueRow(int rowNum); + HistoricalFloatColumnSelector makeFloatSingleValueRowSelector(ReadableOffset offset); + @CalledFromHotLoop - public float getFloatSingleValueRow(int rowNum); - @CalledFromHotLoop - public IndexedFloats getFloatMultiValueRow(int rowNum); - @CalledFromHotLoop - public long getLongSingleValueRow(int rowNum); - @CalledFromHotLoop - public IndexedLongs getLongMultiValueRow(int rowNum); + long getLongSingleValueRow(int rowNum); + LongColumnSelector makeLongSingleValueRowSelector(ReadableOffset offset); + @CalledFromHotLoop double getDoubleSingleValueRow(int rowNum); + DoubleColumnSelector makeDoubleSingleValueRowSelector(ReadableOffset offset); @Override void close(); diff --git a/processing/src/main/java/io/druid/segment/column/IndexedDoublesGenericColumn.java b/processing/src/main/java/io/druid/segment/column/IndexedDoublesGenericColumn.java index ed053136881..9a69b3bdd6a 100644 --- a/processing/src/main/java/io/druid/segment/column/IndexedDoublesGenericColumn.java +++ b/processing/src/main/java/io/druid/segment/column/IndexedDoublesGenericColumn.java @@ -20,10 +20,11 @@ package io.druid.segment.column; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import io.druid.segment.data.Indexed; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; import io.druid.segment.data.IndexedDoubles; -import io.druid.segment.data.IndexedFloats; -import io.druid.segment.data.IndexedLongs; +import io.druid.segment.data.ReadableOffset; +import io.druid.segment.historical.HistoricalFloatColumnSelector; public class IndexedDoublesGenericColumn implements GenericColumn @@ -59,12 +60,6 @@ public class IndexedDoublesGenericColumn implements GenericColumn throw new UnsupportedOperationException(); } - @Override - public Indexed getStringMultiValueRow(int rowNum) - { - throw new UnsupportedOperationException(); - } - @Override public float getFloatSingleValueRow(int rowNum) { @@ -72,9 +67,9 @@ public class IndexedDoublesGenericColumn implements GenericColumn } @Override - public IndexedFloats getFloatMultiValueRow(int rowNum) + public HistoricalFloatColumnSelector makeFloatSingleValueRowSelector(ReadableOffset offset) { - throw new UnsupportedOperationException(); + return column.makeFloatColumnSelector(offset); } @Override @@ -84,9 +79,9 @@ public class IndexedDoublesGenericColumn implements GenericColumn } @Override - public IndexedLongs getLongMultiValueRow(int rowNum) + public LongColumnSelector makeLongSingleValueRowSelector(ReadableOffset offset) { - throw new UnsupportedOperationException(); + return column.makeLongColumnSelector(offset); } @Override @@ -95,6 +90,12 @@ public class IndexedDoublesGenericColumn implements GenericColumn return column.get(rowNum); } + @Override + public DoubleColumnSelector makeDoubleSingleValueRowSelector(ReadableOffset offset) + { + return column.makeDoubleColumnSelector(offset); + } + @Override public void close() { diff --git a/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java b/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java index af02877a364..0e9416f9c8e 100644 --- a/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java +++ b/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java @@ -20,9 +20,11 @@ package io.druid.segment.column; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import io.druid.segment.data.Indexed; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; import io.druid.segment.data.IndexedFloats; -import io.druid.segment.data.IndexedLongs; +import io.druid.segment.data.ReadableOffset; +import io.druid.segment.historical.HistoricalFloatColumnSelector; /** */ @@ -59,12 +61,6 @@ public class IndexedFloatsGenericColumn implements GenericColumn throw new UnsupportedOperationException(); } - @Override - public Indexed getStringMultiValueRow(int rowNum) - { - throw new UnsupportedOperationException(); - } - @Override public float getFloatSingleValueRow(int rowNum) { @@ -72,9 +68,9 @@ public class IndexedFloatsGenericColumn implements GenericColumn } @Override - public IndexedFloats getFloatMultiValueRow(int rowNum) + public HistoricalFloatColumnSelector makeFloatSingleValueRowSelector(ReadableOffset offset) { - throw new UnsupportedOperationException(); + return column.makeFloatColumnSelector(offset); } @Override @@ -84,9 +80,9 @@ public class IndexedFloatsGenericColumn implements GenericColumn } @Override - public IndexedLongs getLongMultiValueRow(int rowNum) + public LongColumnSelector makeLongSingleValueRowSelector(ReadableOffset offset) { - throw new UnsupportedOperationException(); + return column.makeLongColumnSelector(offset); } @Override @@ -95,6 +91,12 @@ public class IndexedFloatsGenericColumn implements GenericColumn return (double) column.get(rowNum); } + @Override + public DoubleColumnSelector makeDoubleSingleValueRowSelector(ReadableOffset offset) + { + return column.makeDoubleColumnSelector(offset); + } + @Override public void close() { diff --git a/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java b/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java index ecfd3330066..93e163f6458 100644 --- a/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java +++ b/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java @@ -20,9 +20,11 @@ package io.druid.segment.column; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; -import io.druid.segment.data.Indexed; -import io.druid.segment.data.IndexedFloats; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; import io.druid.segment.data.IndexedLongs; +import io.druid.segment.data.ReadableOffset; +import io.druid.segment.historical.HistoricalFloatColumnSelector; /** */ @@ -59,12 +61,6 @@ public class IndexedLongsGenericColumn implements GenericColumn throw new UnsupportedOperationException(); } - @Override - public Indexed getStringMultiValueRow(int rowNum) - { - throw new UnsupportedOperationException(); - } - @Override public float getFloatSingleValueRow(int rowNum) { @@ -72,9 +68,9 @@ public class IndexedLongsGenericColumn implements GenericColumn } @Override - public IndexedFloats getFloatMultiValueRow(int rowNum) + public HistoricalFloatColumnSelector makeFloatSingleValueRowSelector(ReadableOffset offset) { - throw new UnsupportedOperationException(); + return column.makeFloatColumnSelector(offset); } @Override @@ -84,9 +80,9 @@ public class IndexedLongsGenericColumn implements GenericColumn } @Override - public IndexedLongs getLongMultiValueRow(int rowNum) + public LongColumnSelector makeLongSingleValueRowSelector(ReadableOffset offset) { - throw new UnsupportedOperationException(); + return column.makeLongColumnSelector(offset); } @Override @@ -95,6 +91,12 @@ public class IndexedLongsGenericColumn implements GenericColumn return (double) column.get(rowNum); } + @Override + public DoubleColumnSelector makeDoubleSingleValueRowSelector(ReadableOffset offset) + { + return column.makeDoubleColumnSelector(offset); + } + @Override public void close() { diff --git a/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java b/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java index 16d309e898d..fb39014125a 100644 --- a/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java +++ b/processing/src/main/java/io/druid/segment/column/SimpleDictionaryEncodedColumn.java @@ -31,10 +31,10 @@ import io.druid.segment.IdLookup; import io.druid.segment.data.CachingIndexed; import io.druid.segment.data.IndexedInts; import io.druid.segment.data.IndexedMultivalue; +import io.druid.segment.data.ReadableOffset; import io.druid.segment.data.SingleIndexedInt; import io.druid.segment.filter.BooleanValueMatcher; import io.druid.segment.historical.HistoricalDimensionSelector; -import io.druid.segment.historical.OffsetHolder; import io.druid.segment.historical.SingleValueHistoricalDimensionSelector; import javax.annotation.Nullable; @@ -105,10 +105,7 @@ public class SimpleDictionaryEncodedColumn } @Override - public HistoricalDimensionSelector makeDimensionSelector( - final OffsetHolder offsetHolder, - final ExtractionFn extractionFn - ) + public HistoricalDimensionSelector makeDimensionSelector(final ReadableOffset offset, final ExtractionFn extractionFn) { abstract class QueryableDimensionSelector implements HistoricalDimensionSelector, IdLookup { @@ -158,7 +155,7 @@ public class SimpleDictionaryEncodedColumn @Override public IndexedInts getRow() { - return multiValueColumn.get(offsetHolder.getReadableOffset().getOffset()); + return multiValueColumn.get(offset.getOffset()); } @Override @@ -183,8 +180,7 @@ public class SimpleDictionaryEncodedColumn public void inspectRuntimeShape(RuntimeShapeInspector inspector) { inspector.visit("multiValueColumn", multiValueColumn); - inspector.visit("offsetHolder", offsetHolder); - inspector.visit("offset", offsetHolder.getReadableOffset()); + inspector.visit("offset", offset); inspector.visit("extractionFn", extractionFn); } } @@ -202,7 +198,7 @@ public class SimpleDictionaryEncodedColumn @Override public int getRowValue() { - return column.get(offsetHolder.getReadableOffset().getOffset()); + return column.get(offset.getOffset()); } @Override @@ -273,8 +269,7 @@ public class SimpleDictionaryEncodedColumn public void inspectRuntimeShape(RuntimeShapeInspector inspector) { inspector.visit("column", column); - inspector.visit("offsetHolder", offsetHolder); - inspector.visit("offset", offsetHolder.getReadableOffset()); + inspector.visit("offset", offset); inspector.visit("extractionFn", extractionFn); } } diff --git a/processing/src/main/java/io/druid/segment/data/ArrayBasedIndexedInts.java b/processing/src/main/java/io/druid/segment/data/ArrayBasedIndexedInts.java index f78ba2fca54..4c3302b1c9f 100644 --- a/processing/src/main/java/io/druid/segment/data/ArrayBasedIndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/ArrayBasedIndexedInts.java @@ -82,12 +82,6 @@ public final class ArrayBasedIndexedInts implements IndexedInts return IntIterators.wrap(expansion, 0, size); } - @Override - public void fill(int index, int[] toFill) - { - throw new UnsupportedOperationException("fill not supported"); - } - @Override public void close() throws IOException { diff --git a/processing/src/main/java/io/druid/segment/data/BitmapCompressedIndexedInts.java b/processing/src/main/java/io/druid/segment/data/BitmapCompressedIndexedInts.java index 36da8097e57..fabbbfcd82c 100644 --- a/processing/src/main/java/io/druid/segment/data/BitmapCompressedIndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/BitmapCompressedIndexedInts.java @@ -88,12 +88,6 @@ public class BitmapCompressedIndexedInts implements IndexedInts, Comparable 0) { - if (bufferNum != currIndex) { - loadBuffer(bufferNum); - } - - buffer.mark(); - buffer.position(buffer.position() + bufferIndex); - final int numToGet = Math.min(buffer.remaining(), leftToFill); - buffer.get(toFill, toFill.length - leftToFill, numToGet); - buffer.reset(); - leftToFill -= numToGet; - ++bufferNum; - bufferIndex = 0; - } - } - protected void loadBuffer(int bufferNum) { CloseQuietly.close(holder); diff --git a/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java index b18318c0c7f..c9a3c82d6f8 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplier.java @@ -380,12 +380,6 @@ public class CompressedVSizeIntsIndexedSupplier implements WritableSupplier extends Iterable, HotLoopCallee { Class getClazz(); diff --git a/processing/src/main/java/io/druid/segment/data/IndexedDoubles.java b/processing/src/main/java/io/druid/segment/data/IndexedDoubles.java index a5109ff05a8..84fbb8b21de 100644 --- a/processing/src/main/java/io/druid/segment/data/IndexedDoubles.java +++ b/processing/src/main/java/io/druid/segment/data/IndexedDoubles.java @@ -19,6 +19,11 @@ package io.druid.segment.data; +import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; +import io.druid.segment.historical.HistoricalFloatColumnSelector; + import java.io.Closeable; public interface IndexedDoubles extends Closeable @@ -29,5 +34,68 @@ public interface IndexedDoubles extends Closeable @Override void close(); + + default DoubleColumnSelector makeDoubleColumnSelector(ReadableOffset offset) + { + return new DoubleColumnSelector() + { + @Override + public double getDouble() + { + return IndexedDoubles.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedDoubles.this); + inspector.visit("offset", offset); + } + }; + } + + default HistoricalFloatColumnSelector makeFloatColumnSelector(ReadableOffset offset) + { + return new HistoricalFloatColumnSelector() + { + @Override + public float get(int offset) + { + return (float) IndexedDoubles.this.get(offset); + } + + @Override + public float getFloat() + { + return (float) IndexedDoubles.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedDoubles.this); + inspector.visit("offset", offset); + } + }; + } + + default LongColumnSelector makeLongColumnSelector(ReadableOffset offset) + { + return new LongColumnSelector() + { + @Override + public long getLong() + { + return (long) IndexedDoubles.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedDoubles.this); + inspector.visit("offset", offset); + } + }; + } } diff --git a/processing/src/main/java/io/druid/segment/data/IndexedFloats.java b/processing/src/main/java/io/druid/segment/data/IndexedFloats.java index 8a6f3f51651..82b6f7a6468 100644 --- a/processing/src/main/java/io/druid/segment/data/IndexedFloats.java +++ b/processing/src/main/java/io/druid/segment/data/IndexedFloats.java @@ -19,6 +19,11 @@ package io.druid.segment.data; +import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; +import io.druid.segment.historical.HistoricalFloatColumnSelector; + import java.io.Closeable; /** @@ -32,4 +37,67 @@ public interface IndexedFloats extends Closeable @Override void close(); + + default HistoricalFloatColumnSelector makeFloatColumnSelector(ReadableOffset offset) + { + return new HistoricalFloatColumnSelector() + { + @Override + public float getFloat() + { + return IndexedFloats.this.get(offset.getOffset()); + } + + @Override + public float get(int offset) + { + return IndexedFloats.this.get(offset); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedFloats.this); + inspector.visit("offset", offset); + } + }; + } + + default DoubleColumnSelector makeDoubleColumnSelector(ReadableOffset offset) + { + return new DoubleColumnSelector() + { + @Override + public double getDouble() + { + return IndexedFloats.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedFloats.this); + inspector.visit("offset", offset); + } + }; + } + + default LongColumnSelector makeLongColumnSelector(ReadableOffset offset) + { + return new LongColumnSelector() + { + @Override + public long getLong() + { + return (long) IndexedFloats.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedFloats.this); + inspector.visit("offset", offset); + } + }; + } } diff --git a/processing/src/main/java/io/druid/segment/data/IndexedInts.java b/processing/src/main/java/io/druid/segment/data/IndexedInts.java index 63af05f0169..2fc65b24e0c 100644 --- a/processing/src/main/java/io/druid/segment/data/IndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/IndexedInts.java @@ -34,5 +34,4 @@ public interface IndexedInts extends IntIterable, Closeable, HotLoopCallee int size(); @CalledFromHotLoop int get(int index); - void fill(int index, int[] toFill); } diff --git a/processing/src/main/java/io/druid/segment/data/IndexedLongs.java b/processing/src/main/java/io/druid/segment/data/IndexedLongs.java index c9b1bc67815..be40e1df5fa 100644 --- a/processing/src/main/java/io/druid/segment/data/IndexedLongs.java +++ b/processing/src/main/java/io/druid/segment/data/IndexedLongs.java @@ -19,6 +19,11 @@ package io.druid.segment.data; +import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.LongColumnSelector; +import io.druid.segment.historical.HistoricalFloatColumnSelector; + import java.io.Closeable; /** @@ -32,4 +37,67 @@ public interface IndexedLongs extends Closeable @Override void close(); + + default LongColumnSelector makeLongColumnSelector(ReadableOffset offset) + { + return new LongColumnSelector() + { + @Override + public long getLong() + { + return IndexedLongs.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedLongs.this); + inspector.visit("offset", offset); + } + }; + } + + default HistoricalFloatColumnSelector makeFloatColumnSelector(ReadableOffset offset) + { + return new HistoricalFloatColumnSelector() + { + @Override + public float getFloat() + { + return (float) IndexedLongs.this.get(offset.getOffset()); + } + + @Override + public float get(int offset) + { + return (float) IndexedLongs.this.get(offset); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedLongs.this); + inspector.visit("offset", offset); + } + }; + } + + default DoubleColumnSelector makeDoubleColumnSelector(ReadableOffset offset) + { + return new DoubleColumnSelector() + { + @Override + public double getDouble() + { + return (double) IndexedLongs.this.get(offset.getOffset()); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("indexed", IndexedLongs.this); + inspector.visit("offset", offset); + } + }; + } } diff --git a/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java b/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java deleted file mode 100644 index a284db4345b..00000000000 --- a/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.segment.data; - -import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; - -/** - */ -public class IntersectingOffset extends Offset -{ - private final Offset lhs; - private final Offset rhs; - - public IntersectingOffset(Offset lhs, Offset rhs) - { - this.lhs = lhs; - this.rhs = rhs; - - findIntersection(); - } - - @Override - public int getOffset() - { - return lhs.getOffset(); - } - - @Override - public void increment() - { - lhs.increment(); - rhs.increment(); - - findIntersection(); - } - - private void findIntersection() - { - if (!(lhs.withinBounds() && rhs.withinBounds())) { - return; - } - - int lhsOffset = lhs.getOffset(); - int rhsOffset = rhs.getOffset(); - - while (lhsOffset != rhsOffset) { - while (lhsOffset < rhsOffset) { - lhs.increment(); - if (!lhs.withinBounds()) { - return; - } - - lhsOffset = lhs.getOffset(); - } - - while (rhsOffset < lhsOffset) { - rhs.increment(); - if (!rhs.withinBounds()) { - return; - } - - rhsOffset = rhs.getOffset(); - } - } - } - - @Override - public boolean withinBounds() - { - return lhs.withinBounds() && rhs.withinBounds(); - } - - @Override - public Offset clone() - { - final Offset lhsClone = lhs.clone(); - final Offset rhsClone = rhs.clone(); - return new IntersectingOffset(lhsClone, rhsClone); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("lhs", lhs); - inspector.visit("rhs", rhs); - } -} diff --git a/processing/src/main/java/io/druid/segment/data/ObjectStrategy.java b/processing/src/main/java/io/druid/segment/data/ObjectStrategy.java index a0ab34ba6bd..dfcb60ed443 100644 --- a/processing/src/main/java/io/druid/segment/data/ObjectStrategy.java +++ b/processing/src/main/java/io/druid/segment/data/ObjectStrategy.java @@ -19,9 +19,12 @@ package io.druid.segment.data; +import io.druid.guice.annotations.ExtensionPoint; + import java.nio.ByteBuffer; import java.util.Comparator; +@ExtensionPoint public interface ObjectStrategy extends Comparator { public Class getClazz(); diff --git a/processing/src/main/java/io/druid/segment/data/Offset.java b/processing/src/main/java/io/druid/segment/data/Offset.java index 7eff391e65d..dacf0b7cfb4 100644 --- a/processing/src/main/java/io/druid/segment/data/Offset.java +++ b/processing/src/main/java/io/druid/segment/data/Offset.java @@ -25,11 +25,18 @@ import io.druid.query.monomorphicprocessing.CalledFromHotLoop; /** * The "mutable" version of a ReadableOffset. Introduces "increment()" and "withinBounds()" methods, which are * very similar to "next()" and "hasNext()" on the Iterator interface except increment() does not return a value. + * + * This class is not thread-safe, all it's methods, including {@link #reset()} and {@link #clone()}, must be called + * from a single thread. * * Annotated with {@link SubclassesMustBePublic} because Offset occurrences are replaced with a subclass in {@link * io.druid.query.topn.Historical1SimpleDoubleAggPooledTopNScannerPrototype} and {@link * io.druid.query.topn.HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPrototype} during * specialization, and specialized version of those prototypes must be able to any subclass of Offset. + * + * This interface is the core "pointer" interface that is used to create {@link io.druid.segment.ColumnValueSelector}s + * over historical segments. It's counterpart for incremental index is {@link + * io.druid.segment.incremental.TimeAndDimsHolder}. */ @SubclassesMustBePublic public abstract class Offset implements ReadableOffset, Cloneable @@ -40,6 +47,18 @@ public abstract class Offset implements ReadableOffset, Cloneable @CalledFromHotLoop public abstract boolean withinBounds(); + /** + * Resets the Offset to the position it was created or cloned with. + */ + public abstract void reset(); + + /** + * Returns the same offset ("this") or a readable "view" of this offset, which always returns the same value from + * {@link #getOffset()}, as this offset. This method is useful for "unwrapping" such offsets as {@link + * io.druid.segment.FilteredOffset} and reduce reference indirection, when only {@link ReadableOffset} API is needed. + */ + public abstract ReadableOffset getBaseReadableOffset(); + @Override public Offset clone() { diff --git a/processing/src/main/java/io/druid/segment/data/RangeIndexedInts.java b/processing/src/main/java/io/druid/segment/data/RangeIndexedInts.java index 0f426f32ec7..e5e85a70787 100644 --- a/processing/src/main/java/io/druid/segment/data/RangeIndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/RangeIndexedInts.java @@ -72,12 +72,6 @@ public class RangeIndexedInts implements IndexedInts return index; } - @Override - public void fill(int index, int[] toFill) - { - throw new UnsupportedOperationException("fill"); - } - @Override public IntIterator iterator() { diff --git a/processing/src/main/java/io/druid/segment/data/SingleIndexedInt.java b/processing/src/main/java/io/druid/segment/data/SingleIndexedInt.java index 0d7eaac4b80..d3d8427f1ab 100644 --- a/processing/src/main/java/io/druid/segment/data/SingleIndexedInt.java +++ b/processing/src/main/java/io/druid/segment/data/SingleIndexedInt.java @@ -55,12 +55,6 @@ public final class SingleIndexedInt implements IndexedInts return IntIterators.singleton(value); } - @Override - public void fill(int index, int[] toFill) - { - throw new UnsupportedOperationException("fill not supported"); - } - @Override public void close() throws IOException { diff --git a/processing/src/main/java/io/druid/segment/data/UnioningOffset.java b/processing/src/main/java/io/druid/segment/data/UnioningOffset.java deleted file mode 100644 index 61572b0a64e..00000000000 --- a/processing/src/main/java/io/druid/segment/data/UnioningOffset.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.segment.data; - -import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; - -/** - */ -public class UnioningOffset extends Offset -{ - private final Offset[] offsets = new Offset[2]; - private final int[] offsetVals = new int[2]; - - private int nextOffsetIndex; - - public UnioningOffset(Offset lhs, Offset rhs) - { - if (lhs.withinBounds()) { - offsets[0] = lhs; - } - - if (rhs.withinBounds()) { - if (offsets[0] == null) { - offsets[0] = rhs; - } else { - offsets[1] = rhs; - } - } - - if (offsets[0] != null) { - offsetVals[0] = offsets[0].getOffset(); - if (offsets[1] != null) { - offsetVals[1] = offsets[1].getOffset(); - } - } - figureOutNextValue(); - } - - private UnioningOffset( - Offset[] offsets, - int[] offsetVals, - int nextOffsetIndex - ) - { - System.arraycopy(offsets, 0, this.offsets, 0, 2); - System.arraycopy(offsetVals, 0, this.offsetVals, 0, 2); - this.nextOffsetIndex = nextOffsetIndex; - } - - private void figureOutNextValue() - { - if (offsets[0] != null) { - if (offsets[1] != null) { - int lhs = offsetVals[0]; - int rhs = offsetVals[1]; - - if (lhs < rhs) { - nextOffsetIndex = 0; - } else if (lhs == rhs) { - nextOffsetIndex = 0; - rollIndexForward(1); - } else { - nextOffsetIndex = 1; - } - } else { - nextOffsetIndex = 0; - } - } - } - - private void rollIndexForward(int i) - { - offsets[i].increment(); - - if (!offsets[i].withinBounds()) { - offsets[i] = null; - if (i == 0) { - offsets[0] = offsets[1]; - offsetVals[0] = offsetVals[1]; - } - } else { - offsetVals[i] = offsets[i].getOffset(); - } - } - - @Override - public int getOffset() - { - return offsetVals[nextOffsetIndex]; - } - - @Override - public void increment() - { - rollIndexForward(nextOffsetIndex); - figureOutNextValue(); - } - - @Override - public boolean withinBounds() - { - return offsets[0] != null; - } - - @Override - public Offset clone() - { - Offset[] newOffsets = new Offset[2]; - int[] newOffsetValues = new int[2]; - - for (int i = 0; i < newOffsets.length; ++i) { - newOffsets[i] = offsets[i] == null ? null : offsets[i].clone(); - newOffsetValues[i] = this.offsetVals[i]; - } - - return new UnioningOffset(newOffsets, newOffsetValues, nextOffsetIndex); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("lhs", offsets[0]); - inspector.visit("rhs", offsets[1]); - } -} diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java index cd3411f92ca..46d37cd72c1 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java @@ -210,12 +210,6 @@ public class VSizeIndexedInts implements IndexedInts, Comparable implements Iterable, { row = formatRow(row); if (row.getTimestampFromEpoch() < minTimestamp) { - throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, new DateTime(minTimestamp)); + throw new IAE("Cannot add row[%s] because it is below the minTimestamp[%s]", row, DateTimes.utc(minTimestamp)); } final List rowDimensions = row.getDimensions(); @@ -682,17 +683,20 @@ public abstract class IncrementalIndex implements Iterable, public Interval getInterval() { - return new Interval(minTimestamp, isEmpty() ? minTimestamp : gran.increment(new DateTime(getMaxTimeMillis())).getMillis()); + DateTime min = DateTimes.utc(minTimestamp); + return new Interval(min, isEmpty() ? min : gran.increment(DateTimes.utc(getMaxTimeMillis()))); } + @Nullable public DateTime getMinTime() { - return isEmpty() ? null : new DateTime(getMinTimeMillis()); + return isEmpty() ? null : DateTimes.utc(getMinTimeMillis()); } + @Nullable public DateTime getMaxTime() { - return isEmpty() ? null : new DateTime(getMaxTimeMillis()); + return isEmpty() ? null : DateTimes.utc(getMaxTimeMillis()); } public Integer getDimensionIndex(String dimension) @@ -1010,7 +1014,7 @@ public abstract class IncrementalIndex implements Iterable, public String toString() { return "TimeAndDims{" + - "timestamp=" + new DateTime(timestamp) + + "timestamp=" + DateTimes.utc(timestamp) + ", dims=" + Lists.transform( Arrays.asList(dims), new Function() { diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java index 067d9fa06f6..f0de26bf4e0 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexAdapter.java @@ -291,12 +291,6 @@ public class IncrementalIndexAdapter implements IndexableAdapter return IntIteratorUtils.fromRoaringBitmapIntIterator(bitmapIndex.iterator()); } - @Override - public void fill(int index, int[] toFill) - { - throw new UnsupportedOperationException("fill not supported"); - } - @Override public void close() throws IOException { diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexColumnSelectorFactory.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexColumnSelectorFactory.java new file mode 100644 index 00000000000..6433b277412 --- /dev/null +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexColumnSelectorFactory.java @@ -0,0 +1,341 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.incremental; + +import io.druid.query.dimension.DimensionSpec; +import io.druid.query.extraction.ExtractionFn; +import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; +import io.druid.segment.ColumnSelectorFactory; +import io.druid.segment.DimensionIndexer; +import io.druid.segment.DimensionSelector; +import io.druid.segment.DimensionSelectorUtils; +import io.druid.segment.DoubleColumnSelector; +import io.druid.segment.DoubleWrappingDimensionSelector; +import io.druid.segment.FloatColumnSelector; +import io.druid.segment.FloatWrappingDimensionSelector; +import io.druid.segment.LongColumnSelector; +import io.druid.segment.LongWrappingDimensionSelector; +import io.druid.segment.ObjectColumnSelector; +import io.druid.segment.SingleScanTimeDimSelector; +import io.druid.segment.VirtualColumns; +import io.druid.segment.ZeroDoubleColumnSelector; +import io.druid.segment.ZeroFloatColumnSelector; +import io.druid.segment.ZeroLongColumnSelector; +import io.druid.segment.column.Column; +import io.druid.segment.column.ColumnCapabilities; +import io.druid.segment.column.ValueType; + +import javax.annotation.Nullable; + +/** + * The basic implementation of {@link ColumnSelectorFactory} over an {@link IncrementalIndex}. It's counterpart for + * historical segments is {@link io.druid.segment.QueryableIndexColumnSelectorFactory}. + */ +class IncrementalIndexColumnSelectorFactory implements ColumnSelectorFactory +{ + private final IncrementalIndex index; + private final VirtualColumns virtualColumns; + private final boolean descending; + private final TimeAndDimsHolder timeAndDimsHolder; + + IncrementalIndexColumnSelectorFactory( + IncrementalIndex index, + VirtualColumns virtualColumns, + boolean descending, + TimeAndDimsHolder timeAndDimsHolder + ) + { + this.index = index; + this.virtualColumns = virtualColumns; + this.descending = descending; + this.timeAndDimsHolder = timeAndDimsHolder; + } + + @Override + public DimensionSelector makeDimensionSelector(DimensionSpec dimensionSpec) + { + if (virtualColumns.exists(dimensionSpec.getDimension())) { + return virtualColumns.makeDimensionSelector(dimensionSpec, this); + } + + return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec)); + } + + private DimensionSelector makeDimensionSelectorUndecorated(DimensionSpec dimensionSpec) + { + final String dimension = dimensionSpec.getDimension(); + final ExtractionFn extractionFn = dimensionSpec.getExtractionFn(); + + if (dimension.equals(Column.TIME_COLUMN_NAME)) { + return new SingleScanTimeDimSelector( + makeLongColumnSelector(dimension), + extractionFn, + descending + ); + } + + final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(dimensionSpec.getDimension()); + if (dimensionDesc == null) { + // not a dimension, column may be a metric + ColumnCapabilities capabilities = getColumnCapabilities(dimension); + if (capabilities == null) { + return DimensionSelectorUtils.constantSelector(null, extractionFn); + } + if (capabilities.getType() == ValueType.LONG) { + return new LongWrappingDimensionSelector(makeLongColumnSelector(dimension), extractionFn); + } + if (capabilities.getType() == ValueType.FLOAT) { + return new FloatWrappingDimensionSelector(makeFloatColumnSelector(dimension), extractionFn); + } + if (capabilities.getType() == ValueType.DOUBLE) { + return new DoubleWrappingDimensionSelector(makeDoubleColumnSelector(dimension), extractionFn); + } + + // if we can't wrap the base column, just return a column of all nulls + return DimensionSelectorUtils.constantSelector(null, extractionFn); + } else { + final DimensionIndexer indexer = dimensionDesc.getIndexer(); + return indexer.makeDimensionSelector(dimensionSpec, timeAndDimsHolder, dimensionDesc); + } + } + + @Override + public FloatColumnSelector makeFloatColumnSelector(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.makeFloatColumnSelector(columnName, this); + } + + final Integer dimIndex = index.getDimensionIndex(columnName); + if (dimIndex != null) { + final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName); + final DimensionIndexer indexer = dimensionDesc.getIndexer(); + return indexer.makeFloatColumnSelector(timeAndDimsHolder, dimensionDesc); + } + + final Integer metricIndexInt = index.getMetricIndex(columnName); + if (metricIndexInt == null) { + return ZeroFloatColumnSelector.instance(); + } + + final int metricIndex = metricIndexInt; + return new FloatColumnSelector() + { + @Override + public float getFloat() + { + return index.getMetricFloatValue(timeAndDimsHolder.getValue(), metricIndex); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("index", index); + } + }; + } + + @Override + public DoubleColumnSelector makeDoubleColumnSelector(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.makeDoubleColumnSelector(columnName, this); + } + + final Integer dimIndex = index.getDimensionIndex(columnName); + if (dimIndex != null) { + final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName); + final DimensionIndexer indexer = dimensionDesc.getIndexer(); + return indexer.makeDoubleColumnSelector(timeAndDimsHolder, dimensionDesc); + } + + final Integer metricIndexInt = index.getMetricIndex(columnName); + if (metricIndexInt == null) { + return ZeroDoubleColumnSelector.instance(); + } + + final int metricIndex = metricIndexInt; + return new DoubleColumnSelector() + { + @Override + public double getDouble() + { + return index.getMetricDoubleValue(timeAndDimsHolder.getValue(), metricIndex); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("index", index); + } + }; + } + + @Override + public LongColumnSelector makeLongColumnSelector(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.makeLongColumnSelector(columnName, this); + } + + if (columnName.equals(Column.TIME_COLUMN_NAME)) { + class TimeLongColumnSelector implements LongColumnSelector + { + @Override + public long getLong() + { + return timeAndDimsHolder.getKey().getTimestamp(); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + // nothing to inspect + } + } + return new TimeLongColumnSelector(); + } + + final Integer dimIndex = index.getDimensionIndex(columnName); + if (dimIndex != null) { + final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName); + final DimensionIndexer indexer = dimensionDesc.getIndexer(); + return indexer.makeLongColumnSelector(timeAndDimsHolder, dimensionDesc); + } + + final Integer metricIndexInt = index.getMetricIndex(columnName); + if (metricIndexInt == null) { + return ZeroLongColumnSelector.instance(); + } + + final int metricIndex = metricIndexInt; + + return new LongColumnSelector() + { + @Override + public long getLong() + { + return index.getMetricLongValue(timeAndDimsHolder.getValue(), metricIndex); + } + + @Override + public void inspectRuntimeShape(RuntimeShapeInspector inspector) + { + inspector.visit("index", index); + } + }; + } + + @Override + public ObjectColumnSelector makeObjectColumnSelector(String column) + { + if (virtualColumns.exists(column)) { + return virtualColumns.makeObjectColumnSelector(column, this); + } + + if (column.equals(Column.TIME_COLUMN_NAME)) { + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return Long.class; + } + + @Override + public Long get() + { + return timeAndDimsHolder.getKey().getTimestamp(); + } + }; + } + + final Integer metricIndexInt = index.getMetricIndex(column); + if (metricIndexInt != null) { + final int metricIndex = metricIndexInt; + final Class classOfObject = index.getMetricClass(column); + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return classOfObject; + } + + @Override + public Object get() + { + return index.getMetricObjectValue( + timeAndDimsHolder.getValue(), + metricIndex + ); + } + }; + } + + IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(column); + + if (dimensionDesc == null) { + return null; + } else { + + final int dimensionIndex = dimensionDesc.getIndex(); + final DimensionIndexer indexer = dimensionDesc.getIndexer(); + + return new ObjectColumnSelector() + { + @Override + public Class classOfObject() + { + return Object.class; + } + + @Override + public Object get() + { + IncrementalIndex.TimeAndDims key = timeAndDimsHolder.getKey(); + if (key == null) { + return null; + } + + Object[] dims = key.getDims(); + if (dimensionIndex >= dims.length) { + return null; + } + + return indexer.convertUnsortedEncodedKeyComponentToActualArrayOrList( + dims[dimensionIndex], DimensionIndexer.ARRAY + ); + } + }; + } + } + + @Nullable + @Override + public ColumnCapabilities getColumnCapabilities(String columnName) + { + if (virtualColumns.exists(columnName)) { + return virtualColumns.getColumnCapabilities(columnName); + } + + return index.getCapabilities(columnName); + } +} diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java index b747df94075..325ff4f8902 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java @@ -19,7 +19,6 @@ package io.druid.segment.incremental; -import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; @@ -28,34 +27,17 @@ import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.BaseQuery; import io.druid.query.QueryMetrics; -import io.druid.query.dimension.DimensionSpec; -import io.druid.query.extraction.ExtractionFn; import io.druid.query.filter.Filter; import io.druid.query.filter.ValueMatcher; -import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; import io.druid.segment.Capabilities; +import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.Cursor; -import io.druid.segment.DimensionHandler; import io.druid.segment.DimensionIndexer; -import io.druid.segment.DimensionSelector; -import io.druid.segment.DoubleColumnSelector; -import io.druid.segment.DoubleWrappingDimensionSelector; -import io.druid.segment.FloatColumnSelector; -import io.druid.segment.FloatWrappingDimensionSelector; -import io.druid.segment.LongColumnSelector; -import io.druid.segment.LongWrappingDimensionSelector; import io.druid.segment.Metadata; -import io.druid.segment.NullDimensionSelector; -import io.druid.segment.ObjectColumnSelector; -import io.druid.segment.SingleScanTimeDimSelector; import io.druid.segment.StorageAdapter; import io.druid.segment.VirtualColumns; -import io.druid.segment.ZeroDoubleColumnSelector; -import io.druid.segment.ZeroFloatColumnSelector; -import io.druid.segment.ZeroLongColumnSelector; import io.druid.segment.column.Column; import io.druid.segment.column.ColumnCapabilities; -import io.druid.segment.column.ValueType; import io.druid.segment.data.Indexed; import io.druid.segment.data.ListIndexed; import io.druid.segment.filter.BooleanValueMatcher; @@ -64,7 +46,6 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.util.Iterator; -import java.util.Map; /** */ @@ -72,9 +53,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter { private final IncrementalIndex index; - public IncrementalIndexStorageAdapter( - IncrementalIndex index - ) + public IncrementalIndexStorageAdapter(IncrementalIndex index) { this.index = index; } @@ -94,7 +73,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter @Override public Indexed getAvailableDimensions() { - return new ListIndexed(index.getDimensionNames(), String.class); + return new ListIndexed<>(index.getDimensionNames(), String.class); } @Override @@ -137,6 +116,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter return index.getMaxTime(); } + @Nullable @Override public Comparable getMinValue(String column) { @@ -149,6 +129,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter return indexer.getMinValue(); } + @Nullable @Override public Comparable getMaxValue(String column) { @@ -173,12 +154,6 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter return index.getCapabilities(column); } - @Override - public Map getDimensionHandlers() - { - return index.getDimensionHandlers(); - } - @Override public String getColumnTypeName(String column) { @@ -206,511 +181,29 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter return Sequences.empty(); } - Interval actualIntervalTmp = interval; + final Interval dataInterval = new Interval(getMinTime(), gran.bucketEnd(getMaxTime())); - final Interval dataInterval = new Interval( - getMinTime().getMillis(), - gran.bucketEnd(getMaxTime()).getMillis() - ); - - if (!actualIntervalTmp.overlaps(dataInterval)) { + if (!interval.overlaps(dataInterval)) { return Sequences.empty(); } - if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) { - actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart()); - } - if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) { - actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd()); - } - - final Interval actualInterval = actualIntervalTmp; + final Interval actualInterval = interval.overlap(dataInterval); Iterable iterable = gran.getIterable(actualInterval); if (descending) { iterable = Lists.reverse(ImmutableList.copyOf(iterable)); } - return Sequences.map( - Sequences.simple(iterable), - new Function() - { - EntryHolder currEntry = new EntryHolder(); - - @Override - public Cursor apply(@Nullable final Interval interval) - { - final long timeStart = Math.max(interval.getStartMillis(), actualInterval.getStartMillis()); - - return new Cursor() - { - private final ValueMatcher filterMatcher = makeFilterMatcher(filter, this); - private final int maxRowIndex; - private Iterator baseIter; - private Iterable cursorIterable; - private boolean emptyRange; - final DateTime time; - int numAdvanced = -1; - boolean done; - - { - maxRowIndex = index.getLastRowIndex(); - cursorIterable = index.getFacts().timeRangeIterable( - descending, - timeStart, - Math.min(actualInterval.getEndMillis(), gran.increment(interval.getStart()).getMillis()) - ); - emptyRange = !cursorIterable.iterator().hasNext(); - time = gran.toDateTime(interval.getStartMillis()); - - reset(); - } - - @Override - public DateTime getTime() - { - return time; - } - - @Override - public void advance() - { - if (!baseIter.hasNext()) { - done = true; - return; - } - - while (baseIter.hasNext()) { - BaseQuery.checkInterrupted(); - - IncrementalIndex.TimeAndDims entry = baseIter.next(); - if (beyondMaxRowIndex(entry.getRowIndex())) { - continue; - } - - currEntry.set(entry); - - if (filterMatcher.matches()) { - return; - } - } - - done = true; - } - - @Override - public void advanceUninterruptibly() - { - if (!baseIter.hasNext()) { - done = true; - return; - } - - while (baseIter.hasNext()) { - if (Thread.currentThread().isInterrupted()) { - return; - } - - IncrementalIndex.TimeAndDims entry = baseIter.next(); - if (beyondMaxRowIndex(entry.getRowIndex())) { - continue; - } - - currEntry.set(entry); - - if (filterMatcher.matches()) { - return; - } - } - - done = true; - } - - @Override - public void advanceTo(int offset) - { - int count = 0; - while (count < offset && !isDone()) { - advance(); - count++; - } - } - - @Override - public boolean isDone() - { - return done; - } - - @Override - public boolean isDoneOrInterrupted() - { - return isDone() || Thread.currentThread().isInterrupted(); - } - - @Override - public void reset() - { - baseIter = cursorIterable.iterator(); - - if (numAdvanced == -1) { - numAdvanced = 0; - } else { - Iterators.advance(baseIter, numAdvanced); - } - - BaseQuery.checkInterrupted(); - - boolean foundMatched = false; - while (baseIter.hasNext()) { - IncrementalIndex.TimeAndDims entry = baseIter.next(); - if (beyondMaxRowIndex(entry.getRowIndex())) { - numAdvanced++; - continue; - } - currEntry.set(entry); - if (filterMatcher.matches()) { - foundMatched = true; - break; - } - - numAdvanced++; - } - - done = !foundMatched && (emptyRange || !baseIter.hasNext()); - } - - private boolean beyondMaxRowIndex(int rowIndex) - { - // ignore rows whose rowIndex is beyond the maxRowIndex - // rows are order by timestamp, not rowIndex, - // so we still need to go through all rows to skip rows added after cursor created - return rowIndex > maxRowIndex; - } - - @Override - public DimensionSelector makeDimensionSelector( - DimensionSpec dimensionSpec - ) - { - if (virtualColumns.exists(dimensionSpec.getDimension())) { - return virtualColumns.makeDimensionSelector(dimensionSpec, this); - } - - return dimensionSpec.decorate(makeDimensionSelectorUndecorated(dimensionSpec)); - } - - private DimensionSelector makeDimensionSelectorUndecorated( - DimensionSpec dimensionSpec - ) - { - final String dimension = dimensionSpec.getDimension(); - final ExtractionFn extractionFn = dimensionSpec.getExtractionFn(); - - if (dimension.equals(Column.TIME_COLUMN_NAME)) { - DimensionSelector selector = new SingleScanTimeDimSelector( - makeLongColumnSelector(dimension), - extractionFn, - descending - ); - return selector; - } - - final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(dimensionSpec.getDimension()); - if (dimensionDesc == null) { - // not a dimension, column may be a metric - ColumnCapabilities capabilities = getColumnCapabilities(dimension); - if (capabilities == null) { - return NullDimensionSelector.instance(); - } - if (capabilities.getType() == ValueType.LONG) { - return new LongWrappingDimensionSelector(makeLongColumnSelector(dimension), extractionFn); - } - if (capabilities.getType() == ValueType.FLOAT) { - return new FloatWrappingDimensionSelector(makeFloatColumnSelector(dimension), extractionFn); - } - if (capabilities.getType() == ValueType.DOUBLE) { - return new DoubleWrappingDimensionSelector(makeDoubleColumnSelector(dimension), extractionFn); - } - - // if we can't wrap the base column, just return a column of all nulls - return NullDimensionSelector.instance(); - } else { - final DimensionIndexer indexer = dimensionDesc.getIndexer(); - return indexer.makeDimensionSelector(dimensionSpec, currEntry, dimensionDesc); - } - } - - @Override - public FloatColumnSelector makeFloatColumnSelector(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.makeFloatColumnSelector(columnName, this); - } - - final Integer dimIndex = index.getDimensionIndex(columnName); - if (dimIndex != null) { - final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName); - final DimensionIndexer indexer = dimensionDesc.getIndexer(); - return indexer.makeFloatColumnSelector( - currEntry, - dimensionDesc - ); - } - - final Integer metricIndexInt = index.getMetricIndex(columnName); - if (metricIndexInt == null) { - return ZeroFloatColumnSelector.instance(); - } - - final int metricIndex = metricIndexInt; - return new FloatColumnSelector() - { - @Override - public float getFloat() - { - return index.getMetricFloatValue(currEntry.getValue(), metricIndex); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("index", index); - } - }; - } - - @Override - public LongColumnSelector makeLongColumnSelector(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.makeLongColumnSelector(columnName, this); - } - - if (columnName.equals(Column.TIME_COLUMN_NAME)) { - class TimeLongColumnSelector implements LongColumnSelector - { - @Override - public long getLong() - { - return currEntry.getKey().getTimestamp(); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - // nothing to inspect - } - } - return new TimeLongColumnSelector(); - } - - final Integer dimIndex = index.getDimensionIndex(columnName); - if (dimIndex != null) { - final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName); - final DimensionIndexer indexer = dimensionDesc.getIndexer(); - return indexer.makeLongColumnSelector( - currEntry, - dimensionDesc - ); - } - - final Integer metricIndexInt = index.getMetricIndex(columnName); - if (metricIndexInt == null) { - return ZeroLongColumnSelector.instance(); - } - - final int metricIndex = metricIndexInt; - - return new LongColumnSelector() - { - @Override - public long getLong() - { - return index.getMetricLongValue( - currEntry.getValue(), - metricIndex - ); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("index", index); - } - }; - } - - @Override - public ObjectColumnSelector makeObjectColumnSelector(String column) - { - if (virtualColumns.exists(column)) { - return virtualColumns.makeObjectColumnSelector(column, this); - } - - if (column.equals(Column.TIME_COLUMN_NAME)) { - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return Long.class; - } - - @Override - public Long get() - { - return currEntry.getKey().getTimestamp(); - } - }; - } - - final Integer metricIndexInt = index.getMetricIndex(column); - if (metricIndexInt != null) { - final int metricIndex = metricIndexInt; - final Class classOfObject = index.getMetricClass(column); - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return classOfObject; - } - - @Override - public Object get() - { - return index.getMetricObjectValue( - currEntry.getValue(), - metricIndex - ); - } - }; - } - - IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(column); - - if (dimensionDesc == null) { - return null; - } else { - - final int dimensionIndex = dimensionDesc.getIndex(); - final DimensionIndexer indexer = dimensionDesc.getIndexer(); - - return new ObjectColumnSelector() - { - @Override - public Class classOfObject() - { - return Object.class; - } - - @Override - public Object get() - { - IncrementalIndex.TimeAndDims key = currEntry.getKey(); - if (key == null) { - return null; - } - - Object[] dims = key.getDims(); - if (dimensionIndex >= dims.length) { - return null; - } - - return indexer.convertUnsortedEncodedKeyComponentToActualArrayOrList( - dims[dimensionIndex], DimensionIndexer.ARRAY - ); - } - }; - } - } - - @Override - public DoubleColumnSelector makeDoubleColumnSelector(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.makeDoubleColumnSelector(columnName, this); - } - - final Integer dimIndex = index.getDimensionIndex(columnName); - if (dimIndex != null) { - final IncrementalIndex.DimensionDesc dimensionDesc = index.getDimension(columnName); - final DimensionIndexer indexer = dimensionDesc.getIndexer(); - return indexer.makeDoubleColumnSelector( - currEntry, - dimensionDesc - ); - } - - final Integer metricIndexInt = index.getMetricIndex(columnName); - if (metricIndexInt == null) { - return ZeroDoubleColumnSelector.instance(); - } - - final int metricIndex = metricIndexInt; - return new DoubleColumnSelector() - { - @Override - public double getDouble() - { - return index.getMetricDoubleValue(currEntry.getValue(), metricIndex); - } - - @Override - public void inspectRuntimeShape(RuntimeShapeInspector inspector) - { - inspector.visit("index", index); - } - }; - } - - @Nullable - @Override - public ColumnCapabilities getColumnCapabilities(String columnName) - { - if (virtualColumns.exists(columnName)) { - return virtualColumns.getColumnCapabilities(columnName); - } - - return index.getCapabilities(columnName); - } - }; - } - } - ); + return Sequences + .simple(iterable) + .map(i -> new IncrementalIndexCursor(virtualColumns, descending, filter, i, actualInterval, gran)); } private ValueMatcher makeFilterMatcher(final Filter filter, final Cursor cursor) { return filter == null ? BooleanValueMatcher.of(true) - : filter.makeMatcher(cursor); - } - - public static class EntryHolder - { - IncrementalIndex.TimeAndDims currEntry = null; - - public IncrementalIndex.TimeAndDims get() - { - return currEntry; - } - - public void set(IncrementalIndex.TimeAndDims currEntry) - { - this.currEntry = currEntry; - } - - public IncrementalIndex.TimeAndDims getKey() - { - return currEntry; - } - - public int getValue() - { - return currEntry.getRowIndex(); - } + : filter.makeMatcher(cursor.getColumnSelectorFactory()); } @Override @@ -718,4 +211,172 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter { return index.getMetadata(); } + + private class IncrementalIndexCursor implements Cursor + { + private TimeAndDimsHolder currEntry; + private final ColumnSelectorFactory columnSelectorFactory; + private final ValueMatcher filterMatcher; + private final int maxRowIndex; + private Iterator baseIter; + private Iterable cursorIterable; + private boolean emptyRange; + private final DateTime time; + private int numAdvanced; + private boolean done; + + IncrementalIndexCursor( + VirtualColumns virtualColumns, + boolean descending, + Filter filter, + Interval interval, + Interval actualInterval, + Granularity gran + ) + { + currEntry = new TimeAndDimsHolder(); + columnSelectorFactory = new IncrementalIndexColumnSelectorFactory(index, virtualColumns, descending, currEntry); + filterMatcher = makeFilterMatcher(filter, this); + numAdvanced = -1; + maxRowIndex = index.getLastRowIndex(); + final long timeStart = Math.max(interval.getStartMillis(), actualInterval.getStartMillis()); + cursorIterable = index.getFacts().timeRangeIterable( + descending, + timeStart, + Math.min(actualInterval.getEndMillis(), gran.increment(interval.getStart()).getMillis()) + ); + emptyRange = !cursorIterable.iterator().hasNext(); + time = gran.toDateTime(interval.getStartMillis()); + + reset(); + } + + @Override + public ColumnSelectorFactory getColumnSelectorFactory() + { + return columnSelectorFactory; + } + + @Override + public DateTime getTime() + { + return time; + } + + @Override + public void advance() + { + if (!baseIter.hasNext()) { + done = true; + return; + } + + while (baseIter.hasNext()) { + BaseQuery.checkInterrupted(); + + IncrementalIndex.TimeAndDims entry = baseIter.next(); + if (beyondMaxRowIndex(entry.getRowIndex())) { + continue; + } + + currEntry.set(entry); + + if (filterMatcher.matches()) { + return; + } + } + + done = true; + } + + @Override + public void advanceUninterruptibly() + { + if (!baseIter.hasNext()) { + done = true; + return; + } + + while (baseIter.hasNext()) { + if (Thread.currentThread().isInterrupted()) { + return; + } + + IncrementalIndex.TimeAndDims entry = baseIter.next(); + if (beyondMaxRowIndex(entry.getRowIndex())) { + continue; + } + + currEntry.set(entry); + + if (filterMatcher.matches()) { + return; + } + } + + done = true; + } + + @Override + public void advanceTo(int offset) + { + int count = 0; + while (count < offset && !isDone()) { + advance(); + count++; + } + } + + @Override + public boolean isDone() + { + return done; + } + + @Override + public boolean isDoneOrInterrupted() + { + return isDone() || Thread.currentThread().isInterrupted(); + } + + @Override + public void reset() + { + baseIter = cursorIterable.iterator(); + + if (numAdvanced == -1) { + numAdvanced = 0; + } else { + Iterators.advance(baseIter, numAdvanced); + } + + BaseQuery.checkInterrupted(); + + boolean foundMatched = false; + while (baseIter.hasNext()) { + IncrementalIndex.TimeAndDims entry = baseIter.next(); + if (beyondMaxRowIndex(entry.getRowIndex())) { + numAdvanced++; + continue; + } + currEntry.set(entry); + if (filterMatcher.matches()) { + foundMatched = true; + break; + } + + numAdvanced++; + } + + done = !foundMatched && (emptyRange || !baseIter.hasNext()); + } + + private boolean beyondMaxRowIndex(int rowIndex) + { + // ignore rows whose rowIndex is beyond the maxRowIndex + // rows are order by timestamp, not rowIndex, + // so we still need to go through all rows to skip rows added after cursor created + return rowIndex > maxRowIndex; + } + } } diff --git a/processing/src/main/java/io/druid/segment/incremental/TimeAndDimsHolder.java b/processing/src/main/java/io/druid/segment/incremental/TimeAndDimsHolder.java new file mode 100644 index 00000000000..664885897f5 --- /dev/null +++ b/processing/src/main/java/io/druid/segment/incremental/TimeAndDimsHolder.java @@ -0,0 +1,56 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.incremental; + +/** + * This interface is the core "pointer" interface that is used to create {@link io.druid.segment.ColumnValueSelector}s + * over incremental index. It's counterpart for historical segments is {@link io.druid.segment.data.Offset}. + */ +public class TimeAndDimsHolder +{ + IncrementalIndex.TimeAndDims currEntry = null; + + public IncrementalIndex.TimeAndDims get() + { + return currEntry; + } + + public void set(IncrementalIndex.TimeAndDims currEntry) + { + this.currEntry = currEntry; + } + + /** + * This method doesn't have well-defined semantics ("key" of what?), should be removed in favor of {@link #get()}. + */ + public IncrementalIndex.TimeAndDims getKey() + { + return currEntry; + } + + /** + * This method doesn't have well-defined semantics ("value" of what?), should be removed in favor of chaining + * get().getRowIndex(). + */ + public int getValue() + { + return currEntry.getRowIndex(); + } +} diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java b/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java index 9b97ceaa902..a7687849100 100644 --- a/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java @@ -19,6 +19,7 @@ package io.druid.segment.serde; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.GenericColumnSerializer; @@ -46,6 +47,7 @@ public class ComplexColumnSerializer implements GenericColumnSerializer this.strategy = strategy; } + @PublicApi public static ComplexColumnSerializer create( IOPeon ioPeon, String filenameBase, diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexMetricExtractor.java b/processing/src/main/java/io/druid/segment/serde/ComplexMetricExtractor.java index 243b0999fcb..37a8ea34248 100644 --- a/processing/src/main/java/io/druid/segment/serde/ComplexMetricExtractor.java +++ b/processing/src/main/java/io/druid/segment/serde/ComplexMetricExtractor.java @@ -20,9 +20,11 @@ package io.druid.segment.serde; import io.druid.data.input.InputRow; +import io.druid.guice.annotations.ExtensionPoint; /** */ +@ExtensionPoint public interface ComplexMetricExtractor { public Class extractedClass(); diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java b/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java index 5a0d3418ac4..e0b2aaa662d 100644 --- a/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java +++ b/processing/src/main/java/io/druid/segment/serde/ComplexMetricSerde.java @@ -20,6 +20,7 @@ package io.druid.segment.serde; import com.google.common.base.Function; +import io.druid.guice.annotations.ExtensionPoint; import io.druid.segment.GenericColumnSerializer; import io.druid.segment.column.ColumnBuilder; import io.druid.segment.data.IOPeon; @@ -29,6 +30,7 @@ import java.nio.ByteBuffer; /** */ +@ExtensionPoint public abstract class ComplexMetricSerde { public abstract String getTypeName(); diff --git a/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java b/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java index 888912da9b7..f3c12d8a919 100644 --- a/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java @@ -19,6 +19,7 @@ package io.druid.segment.serde; +import io.druid.guice.annotations.PublicApi; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.GenericColumnSerializer; @@ -58,6 +59,7 @@ public class LargeColumnSupportedComplexColumnSerializer implements GenericColum this.columnSize = columnSize; } + @PublicApi public static LargeColumnSupportedComplexColumnSerializer create( IOPeon ioPeon, String filenameBase, diff --git a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java index 02e88dc77f7..b60efee729a 100644 --- a/processing/src/test/java/io/druid/collections/CombiningIterableTest.java +++ b/processing/src/test/java/io/druid/collections/CombiningIterableTest.java @@ -19,9 +19,9 @@ package io.druid.collections; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.query.Result; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -38,12 +38,12 @@ public class CombiningIterableTest public void testMerge() { List> resultsBefore = Arrays.asList( - new Result(new DateTime("2011-01-01"), 1L), - new Result(new DateTime("2011-01-01"), 2L) + new Result(DateTimes.of("2011-01-01"), 1L), + new Result(DateTimes.of("2011-01-01"), 2L) ); Iterable> expectedResults = Arrays.>asList( - new Result(new DateTime("2011-01-01"), 3L) + new Result(DateTimes.of("2011-01-01"), 3L) ); Iterable> resultsAfter = CombiningIterable.create( diff --git a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java index b7466914745..166381ce95b 100644 --- a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java +++ b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; @@ -56,7 +58,7 @@ public class QueryGranularityTest @Test public void testIterableNone() throws Exception { - final Iterator iterator = Granularities.NONE.getIterable(new Interval(0, 1000)).iterator(); + final Iterator iterator = Granularities.NONE.getIterable(Intervals.utc(0, 1000)).iterator(); int count = 0; while (iterator.hasNext()) { Assert.assertEquals(count, iterator.next().getStartMillis()); @@ -67,252 +69,246 @@ public class QueryGranularityTest @Test public void testIterableMinuteSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:38:00.000Z"), - new DateTime("2011-01-01T09:39:00.000Z"), - new DateTime("2011-01-01T09:40:00.000Z") + DateTimes.of("2011-01-01T09:38:00.000Z"), + DateTimes.of("2011-01-01T09:39:00.000Z"), + DateTimes.of("2011-01-01T09:40:00.000Z") ), - Granularities.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) + Granularities.MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.THREE))) ); } @Test public void testIterableMinuteComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:38:00.000Z"), - new DateTime("2011-01-01T09:39:00.000Z"), - new DateTime("2011-01-01T09:40:00.000Z"), - new DateTime("2011-01-01T09:41:00.000Z") + DateTimes.of("2011-01-01T09:38:00.000Z"), + DateTimes.of("2011-01-01T09:39:00.000Z"), + DateTimes.of("2011-01-01T09:40:00.000Z"), + DateTimes.of("2011-01-01T09:41:00.000Z") ), - Granularities.MINUTE.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Minutes.THREE).getMillis())) + Granularities.MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.THREE))) ); } @Test public void testIterable15MinuteSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:30:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:30:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:30:00.000Z"), - new DateTime("2011-01-01T09:45:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z") + DateTimes.of("2011-01-01T09:30:00.000Z"), + DateTimes.of("2011-01-01T09:45:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z") ), - Granularities.FIFTEEN_MINUTE.getIterable( - new Interval( - baseTime.getMillis(), baseTime.plus(Minutes.minutes(45)).getMillis() - )) + Granularities.FIFTEEN_MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.minutes(45)))) ); } @Test public void testIterable15MinuteComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:30:00.000Z"), - new DateTime("2011-01-01T09:45:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z"), - new DateTime("2011-01-01T10:15:00.000Z") + DateTimes.of("2011-01-01T09:30:00.000Z"), + DateTimes.of("2011-01-01T09:45:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z"), + DateTimes.of("2011-01-01T10:15:00.000Z") ), - Granularities.FIFTEEN_MINUTE.getIterable(new Interval( - baseTime.getMillis(), - baseTime.plus(Minutes.minutes(45)).getMillis() - )) + Granularities.FIFTEEN_MINUTE.getIterable(new Interval(baseTime, baseTime.plus(Minutes.minutes(45)))) ); } @Test public void testIterableHourSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:00:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z"), - new DateTime("2011-01-01T11:00:00.000Z") - ), Granularities.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) + DateTimes.of("2011-01-01T09:00:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z"), + DateTimes.of("2011-01-01T11:00:00.000Z") + ), Granularities.HOUR.getIterable(new Interval(baseTime, baseTime.plus(Hours.hours(3)))) ); } @Test public void testIterableHourComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T09:00:00.000Z"), - new DateTime("2011-01-01T10:00:00.000Z"), - new DateTime("2011-01-01T11:00:00.000Z"), - new DateTime("2011-01-01T12:00:00.000Z") - ), Granularities.HOUR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(3)).getMillis())) + DateTimes.of("2011-01-01T09:00:00.000Z"), + DateTimes.of("2011-01-01T10:00:00.000Z"), + DateTimes.of("2011-01-01T11:00:00.000Z"), + DateTimes.of("2011-01-01T12:00:00.000Z") + ), Granularities.HOUR.getIterable(new Interval(baseTime, baseTime.plus(Hours.hours(3)))) ); } @Test public void testIterableDaySimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-01-02T00:00:00.000Z"), - new DateTime("2011-01-03T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-02T00:00:00.000Z"), + DateTimes.of("2011-01-03T00:00:00.000Z") ), - Granularities.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.DAY.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @Test public void testIterableDayComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-01-02T00:00:00.000Z"), - new DateTime("2011-01-03T00:00:00.000Z"), - new DateTime("2011-01-04T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-02T00:00:00.000Z"), + DateTimes.of("2011-01-03T00:00:00.000Z"), + DateTimes.of("2011-01-04T00:00:00.000Z") ), - Granularities.DAY.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.DAY.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @Test public void testIterableWeekSimple() { - final DateTime baseTime = new DateTime("2011-01-03T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-03T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-03T00:00:00.000Z"), - new DateTime("2011-01-10T00:00:00.000Z"), - new DateTime("2011-01-17T00:00:00.000Z") + DateTimes.of("2011-01-03T00:00:00.000Z"), + DateTimes.of("2011-01-10T00:00:00.000Z"), + DateTimes.of("2011-01-17T00:00:00.000Z") ), - Granularities.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) + Granularities.WEEK.getIterable(new Interval(baseTime, baseTime.plus(Weeks.THREE))) ); } @Test public void testIterableWeekComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2010-12-27T00:00:00.000Z"), - new DateTime("2011-01-03T00:00:00.000Z"), - new DateTime("2011-01-10T00:00:00.000Z"), - new DateTime("2011-01-17T00:00:00.000Z") + DateTimes.of("2010-12-27T00:00:00.000Z"), + DateTimes.of("2011-01-03T00:00:00.000Z"), + DateTimes.of("2011-01-10T00:00:00.000Z"), + DateTimes.of("2011-01-17T00:00:00.000Z") ), - Granularities.WEEK.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.THREE).getMillis())) + Granularities.WEEK.getIterable(new Interval(baseTime, baseTime.plus(Weeks.THREE))) ); } @Test public void testIterableMonthSimple() { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-02-01T00:00:00.000Z"), - new DateTime("2011-03-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z") ), - Granularities.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) + Granularities.MONTH.getIterable(new Interval(baseTime, baseTime.plus(Months.THREE))) ); } @Test public void testIterableMonthComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-02-01T00:00:00.000Z"), - new DateTime("2011-03-01T00:00:00.000Z"), - new DateTime("2011-04-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z") ), - Granularities.MONTH.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.THREE).getMillis())) + Granularities.MONTH.getIterable(new Interval(baseTime, baseTime.plus(Months.THREE))) ); } @Test public void testIterableQuarterSimple() { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-04-01T00:00:00.000Z"), - new DateTime("2011-07-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-07-01T00:00:00.000Z") ), - Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) + Granularities.QUARTER.getIterable(new Interval(baseTime, baseTime.plus(Months.NINE))) ); } @Test public void testIterableQuarterComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2011-04-01T00:00:00.000Z"), - new DateTime("2011-07-01T00:00:00.000Z"), - new DateTime("2011-10-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-07-01T00:00:00.000Z"), + DateTimes.of("2011-10-01T00:00:00.000Z") ), - Granularities.QUARTER.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.NINE).getMillis())) + Granularities.QUARTER.getIterable(new Interval(baseTime, baseTime.plus(Months.NINE))) ); } @Test public void testIterableYearSimple() { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2012-01-01T00:00:00.000Z"), - new DateTime("2013-01-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2012-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z") ), - Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) + Granularities.YEAR.getIterable(new Interval(baseTime, baseTime.plus(Years.THREE))) ); } @Test public void testIterableYearComplex() { - final DateTime baseTime = new DateTime("2011-01-01T09:38:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:00.000Z"); assertSameInterval( Lists.newArrayList( - new DateTime("2011-01-01T00:00:00.000Z"), - new DateTime("2012-01-01T00:00:00.000Z"), - new DateTime("2013-01-01T00:00:00.000Z"), - new DateTime("2014-01-01T00:00:00.000Z") + DateTimes.of("2011-01-01T00:00:00.000Z"), + DateTimes.of("2012-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), + DateTimes.of("2014-01-01T00:00:00.000Z") ), - Granularities.YEAR.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Years.THREE).getMillis())) + Granularities.YEAR.getIterable(new Interval(baseTime, baseTime.plus(Years.THREE))) ); } @@ -328,7 +324,7 @@ public class QueryGranularityTest new DateTime("2012-11-06T00:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1D"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); assertSameInterval( @@ -340,7 +336,7 @@ public class QueryGranularityTest new DateTime("2012-11-04T03:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("PT1H"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Hours.hours(5)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Hours.hours(5)))) ); final PeriodGranularity hour = new PeriodGranularity(new Period("PT1H"), null, tz); @@ -353,11 +349,11 @@ public class QueryGranularityTest new DateTime("2012-11-04T03:00:00.000-08:00", tz) ), Lists.newArrayList( - hour.bucketStart(new DateTime("2012-11-04T00:30:00-07:00")), - hour.bucketStart(new DateTime("2012-11-04T01:30:00-07:00")), - hour.bucketStart(new DateTime("2012-11-04T01:30:00-08:00")), - hour.bucketStart(new DateTime("2012-11-04T02:30:00-08:00")), - hour.bucketStart(new DateTime("2012-11-04T03:30:00-08:00")) + hour.bucketStart(DateTimes.of("2012-11-04T00:30:00-07:00")), + hour.bucketStart(DateTimes.of("2012-11-04T01:30:00-07:00")), + hour.bucketStart(DateTimes.of("2012-11-04T01:30:00-08:00")), + hour.bucketStart(DateTimes.of("2012-11-04T02:30:00-08:00")), + hour.bucketStart(DateTimes.of("2012-11-04T03:30:00-08:00")) ) ); } @@ -375,7 +371,7 @@ public class QueryGranularityTest new DateTime("2013-02-01T00:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1M"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Months.months(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Months.months(3)))) ); } @@ -392,7 +388,7 @@ public class QueryGranularityTest new DateTime("2012-11-19T00:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1W"), null, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.weeks(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3)))) ); assertSameInterval( @@ -402,7 +398,7 @@ public class QueryGranularityTest new DateTime("2012-11-17T10:00:00.000-08:00", tz) ), new PeriodGranularity(new Period("P1W"), baseTime, tz) - .getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Weeks.weeks(3)).getMillis())) + .getIterable(new Interval(baseTime, baseTime.plus(Weeks.weeks(3)))) ); } @@ -410,7 +406,7 @@ public class QueryGranularityTest public void testPeriodTruncateDays() throws Exception { final DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles"); - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("P2D"), origin, @@ -423,9 +419,9 @@ public class QueryGranularityTest new DateTime("2012-01-04T05:00:00.000-08:00", tz) ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-04T07:20:04.123-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-04T07:20:04.123-08:00")) ) ); @@ -442,9 +438,9 @@ public class QueryGranularityTest new DateTime("2012-01-03T00:00:00.000-08:00", tz) ), Lists.newArrayList( - periodNoOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-04T07:20:04.123-08:00")) + periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-04T07:20:04.123-08:00")) ) ); @@ -453,18 +449,18 @@ public class QueryGranularityTest @Test public void testPeriodTruncateMinutes() throws Exception { - final DateTime origin = new DateTime("2012-01-02T00:05:00.000Z"); + final DateTime origin = DateTimes.of("2012-01-02T00:05:00.000Z"); PeriodGranularity periodOrigin = new PeriodGranularity(new Period("PT15M"), origin, null); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T04:50:00.000Z"), - new DateTime("2012-01-02T07:05:00.000Z"), - new DateTime("2012-01-04T00:20:00.000Z") + DateTimes.of("2012-01-01T04:50:00.000Z"), + DateTimes.of("2012-01-02T07:05:00.000Z"), + DateTimes.of("2012-01-04T00:20:00.000Z") ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123Z")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:08:04.123Z")), - periodOrigin.bucketStart(new DateTime("2012-01-04T00:20:04.123Z")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123Z")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:08:04.123Z")), + periodOrigin.bucketStart(DateTimes.of("2012-01-04T00:20:04.123Z")) ) ); @@ -472,14 +468,14 @@ public class QueryGranularityTest PeriodGranularity periodNoOrigin = new PeriodGranularity(new Period("PT15M"), null, null); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T05:00:00.000Z"), - new DateTime("2012-01-02T07:00:00.000Z"), - new DateTime("2012-01-04T00:15:00.000Z") + DateTimes.of("2012-01-01T05:00:00.000Z"), + DateTimes.of("2012-01-02T07:00:00.000Z"), + DateTimes.of("2012-01-04T00:15:00.000Z") ), Lists.newArrayList( - periodNoOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123Z")), - periodNoOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123Z")), - periodNoOrigin.bucketStart(new DateTime("2012-01-04T00:20:04.123Z")) + periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123Z")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123Z")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-04T00:20:04.123Z")) ) ); @@ -490,7 +486,7 @@ public class QueryGranularityTest { { final DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles"); - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("P1M2D"), origin, @@ -504,10 +500,10 @@ public class QueryGranularityTest new DateTime("2012-02-04T05:00:00.000-08:00", tz) ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-03-01T07:20:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-02-04T05:00:00.000-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-03-01T07:20:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-02-04T05:00:00.000-08:00")) ) ); @@ -524,10 +520,10 @@ public class QueryGranularityTest new DateTime("2012-02-16T00:00:00.000-08:00", tz) ), Lists.newArrayList( - periodNoOrigin.bucketStart(new DateTime("1970-01-01T05:02:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-01T05:02:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-01-15T07:01:04.123-08:00")), - periodNoOrigin.bucketStart(new DateTime("2012-02-16T00:00:00.000-08:00")) + periodNoOrigin.bucketStart(DateTimes.of("1970-01-01T05:02:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-01T05:02:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-01-15T07:01:04.123-08:00")), + periodNoOrigin.bucketStart(DateTimes.of("2012-02-16T00:00:00.000-08:00")) ) ); @@ -535,7 +531,7 @@ public class QueryGranularityTest { final DateTimeZone tz = DateTimeZone.forID("America/Los_Angeles"); - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("PT12H5M"), origin, @@ -549,10 +545,10 @@ public class QueryGranularityTest new DateTime("2012-02-03T22:25:00.000-08:00", tz) ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-03T00:20:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-02-03T22:25:00.000-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } @@ -562,7 +558,7 @@ public class QueryGranularityTest public void testCompoundPeriodMillisTruncate() throws Exception { { - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); PeriodGranularity periodOrigin = new PeriodGranularity( new Period("PT12H5M"), origin, @@ -570,16 +566,16 @@ public class QueryGranularityTest ); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T04:50:00.000-08:00"), - new DateTime("2012-01-02T05:00:00.000-08:00"), - new DateTime("2012-01-02T17:05:00.000-08:00"), - new DateTime("2012-02-03T22:25:00.000-08:00") + DateTimes.of("2012-01-01T04:50:00.000-08:00"), + DateTimes.of("2012-01-02T05:00:00.000-08:00"), + DateTimes.of("2012-01-02T17:05:00.000-08:00"), + DateTimes.of("2012-02-03T22:25:00.000-08:00") ), Lists.newArrayList( - periodOrigin.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-01-03T00:20:04.123-08:00")), - periodOrigin.bucketStart(new DateTime("2012-02-03T22:25:00.000-08:00")) + periodOrigin.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), + periodOrigin.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } @@ -589,23 +585,23 @@ public class QueryGranularityTest public void testDurationTruncate() throws Exception { { - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( new Period("PT12H5M").toStandardDuration().getMillis(), origin ); assertSameDateTime( Lists.newArrayList( - new DateTime("2012-01-01T04:50:00.000-08:00"), - new DateTime("2012-01-02T05:00:00.000-08:00"), - new DateTime("2012-01-02T17:05:00.000-08:00"), - new DateTime("2012-02-03T22:25:00.000-08:00") + DateTimes.of("2012-01-01T04:50:00.000-08:00"), + DateTimes.of("2012-01-02T05:00:00.000-08:00"), + DateTimes.of("2012-01-02T17:05:00.000-08:00"), + DateTimes.of("2012-02-03T22:25:00.000-08:00") ), Lists.newArrayList( - gran.bucketStart(new DateTime("2012-01-01T05:00:04.123-08:00")), - gran.bucketStart(new DateTime("2012-01-02T07:00:04.123-08:00")), - gran.bucketStart(new DateTime("2012-01-03T00:20:04.123-08:00")), - gran.bucketStart(new DateTime("2012-02-03T22:25:00.000-08:00")) + gran.bucketStart(DateTimes.of("2012-01-01T05:00:04.123-08:00")), + gran.bucketStart(DateTimes.of("2012-01-02T07:00:04.123-08:00")), + gran.bucketStart(DateTimes.of("2012-01-03T00:20:04.123-08:00")), + gran.bucketStart(DateTimes.of("2012-02-03T22:25:00.000-08:00")) ) ); } @@ -614,15 +610,15 @@ public class QueryGranularityTest @Test public void testDurationToDateTime() throws Exception { - final DateTime origin = new DateTime("2012-01-02T05:00:00.000-08:00"); + final DateTime origin = DateTimes.of("2012-01-02T05:00:00.000-08:00"); Granularity gran = new DurationGranularity( new Period("PT12H5M").toStandardDuration().getMillis(), origin ); Assert.assertEquals( - new DateTime("2012-01-01T05:00:04.123-08:00"), - gran.toDateTime(new DateTime("2012-01-01T05:00:04.123-08:00").getMillis()) + DateTimes.of("2012-01-01T05:00:04.123-08:00"), + gran.toDateTime(DateTimes.of("2012-01-01T05:00:04.123-08:00").getMillis()) ); } @@ -630,22 +626,22 @@ public class QueryGranularityTest @Test public void testIterableAllSimple() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T00:00:00.000Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T00:00:00.000Z"); assertSameInterval( Lists.newArrayList(baseTime), - Granularities.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.ALL.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @Test public void testIterableAllComplex() throws Exception { - final DateTime baseTime = new DateTime("2011-01-01T09:38:02.992Z"); + final DateTime baseTime = DateTimes.of("2011-01-01T09:38:02.992Z"); assertSameInterval( Lists.newArrayList(baseTime), - Granularities.ALL.getIterable(new Interval(baseTime.getMillis(), baseTime.plus(Days.days(3)).getMillis())) + Granularities.ALL.getIterable(new Interval(baseTime, baseTime.plus(Days.days(3)))) ); } @@ -669,13 +665,13 @@ public class QueryGranularityTest gran = mapper.readValue(json, Granularity.class); Assert.assertEquals(new PeriodGranularity( new Period("P1D"), - new DateTime(0L), + DateTimes.EPOCH, DateTimeZone.forID("America/Los_Angeles") ), gran); PeriodGranularity expected = new PeriodGranularity( new Period("P1D"), - new DateTime("2012-01-01"), + DateTimes.of("2012-01-01"), DateTimeZone.forID("America/Los_Angeles") ); diff --git a/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java b/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java index d2f1edaaed6..2d750de3318 100644 --- a/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java +++ b/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java @@ -20,6 +20,7 @@ package io.druid.jackson; import com.fasterxml.jackson.databind.ObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.junit.Assert; @@ -34,7 +35,7 @@ public class DefaultObjectMapperTest @Test public void testDateTime() throws Exception { - final DateTime time = new DateTime(); + final DateTime time = DateTimes.nowUtc(); Assert.assertEquals(StringUtils.format("\"%s\"", time), mapper.writeValueAsString(time)); } diff --git a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java index e17b20369f4..fb0b18a80c2 100644 --- a/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/io/druid/query/MultiValuedDimensionTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.CSVParseSpec; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -61,7 +62,6 @@ import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -313,7 +313,7 @@ public class MultiValuedDimensionTest Sequence> result = runner.run(QueryPlus.wrap(query), context); List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( diff --git a/processing/src/test/java/io/druid/query/QueryContextsTest.java b/processing/src/test/java/io/druid/query/QueryContextsTest.java index 81f23374a21..32050ec9261 100644 --- a/processing/src/test/java/io/druid/query/QueryContextsTest.java +++ b/processing/src/test/java/io/druid/query/QueryContextsTest.java @@ -21,8 +21,8 @@ package io.druid.query; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.query.spec.MultipleIntervalSegmentSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,7 @@ public class QueryContextsTest { final Query query = new TestQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("0/100"))), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, new HashMap() ); @@ -48,7 +48,7 @@ public class QueryContextsTest { Query query = new TestQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("0/100"))), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, new HashMap() ); @@ -63,7 +63,7 @@ public class QueryContextsTest { Query query = new TestQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(ImmutableList.of(new Interval("0/100"))), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("0/100"))), false, ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1000) ); diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index 0196205bb4b..f9fff791b1a 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -27,6 +27,8 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.metamx.emitter.core.NoopEmitter; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.UOE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -138,7 +140,7 @@ public class QueryRunnerTestHelper "ntimestamps", Arrays.asList("__time"), "function aggregate(current, t) { if (t > " + - new DateTime("2011-04-01T12:00:00Z").getMillis() + + DateTimes.of("2011-04-01T12:00:00Z").getMillis() + ") { return current + 1; } else { return current; } }", JS_RESET_0, JS_COMBINE_A_PLUS_B, @@ -156,6 +158,12 @@ public class QueryRunnerTestHelper "uniques", "quality_uniques" ); + public static final HyperUniquesAggregatorFactory qualityUniquesRounded = new HyperUniquesAggregatorFactory( + "uniques", + "quality_uniques", + false, + true + ); public static final CardinalityAggregatorFactory qualityCardinality = new CardinalityAggregatorFactory( "cardinality", Arrays.asList(new DefaultDimensionSpec("quality", "quality")), @@ -238,22 +246,22 @@ public class QueryRunnerTestHelper expectedFullOnIndexValuesDesc = list.toArray(new String[list.size()]); } - public static final DateTime earliest = new DateTime("2011-01-12"); - public static final DateTime last = new DateTime("2011-04-15"); + public static final DateTime earliest = DateTimes.of("2011-01-12"); + public static final DateTime last = DateTimes.of("2011-04-15"); - public static final DateTime skippedDay = new DateTime("2011-01-21T00:00:00.000Z"); + public static final DateTime skippedDay = DateTimes.of("2011-01-21T00:00:00.000Z"); public static final QuerySegmentSpec firstToThird = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) + Arrays.asList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) ); public static final QuerySegmentSpec secondOnly = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2011-04-02T00:00:00.000Z/P1D")) + Arrays.asList(Intervals.of("2011-04-02T00:00:00.000Z/P1D")) ); public static final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); public static final QuerySegmentSpec emptyInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2020-04-02T00:00:00.000Z/P1D")) + Arrays.asList(Intervals.of("2020-04-02T00:00:00.000Z/P1D")) ); public static Iterable transformToConstructionFeeder(Iterable in) diff --git a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java index c85ed44f1f9..d819dde055b 100644 --- a/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java +++ b/processing/src/test/java/io/druid/query/ResultGranularTimestampComparatorTest.java @@ -19,6 +19,7 @@ package io.druid.query; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; @@ -48,7 +49,7 @@ public class ResultGranularTimestampComparatorTest this.descending = descending; } - private final DateTime time = new DateTime("2011-11-11"); + private final DateTime time = DateTimes.of("2011-11-11"); @Test public void testCompareAll() diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index e046b60d664..b9c40aeea71 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -32,8 +34,6 @@ import io.druid.query.timeseries.TimeseriesQueryQueryToolChest; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.SegmentMissingException; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -98,12 +98,7 @@ public class RetryQueryRunnerTest public Sequence> run(QueryPlus queryPlus, Map context) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } @@ -158,12 +153,7 @@ public class RetryQueryRunnerTest { if ((int) context.get("count") == 0) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", 1); return Sequences.empty(); @@ -171,7 +161,7 @@ public class RetryQueryRunnerTest return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -217,12 +207,7 @@ public class RetryQueryRunnerTest { if ((int) context.get("count") < 3) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); context.put("count", (int) context.get("count") + 1); return Sequences.empty(); @@ -230,7 +215,7 @@ public class RetryQueryRunnerTest return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -274,12 +259,7 @@ public class RetryQueryRunnerTest ) { ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); return Sequences.empty(); } @@ -321,26 +301,16 @@ public class RetryQueryRunnerTest if ((int) context.get("count") == 0) { // assume 2 missing segments at first run ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 1 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 1) ); ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 2 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 2) ); context.put("count", 1); return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -352,18 +322,13 @@ public class RetryQueryRunnerTest Assert.assertTrue("Should retry with 2 missing segments", ((MultipleSpecificSegmentSpec) ((BaseQuery) query).getQuerySegmentSpec()).getDescriptors().size() == 2); // assume only left 1 missing at first retry ((List) context.get(Result.MISSING_SEGMENTS_KEY)).add( - new SegmentDescriptor( - new Interval( - 178888, - 1999999 - ), "test", 2 - ) + new SegmentDescriptor(Intervals.utc(178888, 1999999), "test", 2) ); context.put("count", 2); return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) @@ -378,7 +343,7 @@ public class RetryQueryRunnerTest return Sequences.simple( Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeseriesResultValue( Maps.newHashMap() ) diff --git a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java index 71871864778..71b30f92789 100644 --- a/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java +++ b/processing/src/test/java/io/druid/query/SchemaEvolutionTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.guava.Sequence; @@ -50,7 +51,6 @@ import io.druid.segment.IndexBuilder; import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexSegment; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -73,7 +73,7 @@ public class SchemaEvolutionTest public static List> timeseriesResult(final Map map) { - return ImmutableList.of(new Result<>(new DateTime("2000"), new TimeseriesResultValue((Map) map))); + return ImmutableList.of(new Result<>(DateTimes.of("2000"), new TimeseriesResultValue((Map) map))); } public static List inputRowsWithDimensions(final List dimensions) diff --git a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java index f0b6c490f0a..8b5675b646f 100644 --- a/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/io/druid/query/TimewarpOperatorTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; @@ -44,32 +45,26 @@ public class TimewarpOperatorTest public static final ImmutableMap CONTEXT = ImmutableMap.of(); TimewarpOperator> testOperator = new TimewarpOperator<>( - new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), + new Interval(DateTimes.of("2014-01-01"), DateTimes.of("2014-01-15")), new Period("P1W"), - new DateTime("2014-01-06") // align on Monday + DateTimes.of("2014-01-06") // align on Monday ); @Test public void testComputeOffset() throws Exception { { - final DateTime t = new DateTime("2014-01-23"); - final DateTime tOffset = new DateTime("2014-01-09"); + final DateTime t = DateTimes.of("2014-01-23"); + final DateTime tOffset = DateTimes.of("2014-01-09"); - Assert.assertEquals( - new DateTime(tOffset), - t.plus(testOperator.computeOffset(t.getMillis())) - ); + Assert.assertEquals(tOffset, t.plus(testOperator.computeOffset(t.getMillis()))); } { - final DateTime t = new DateTime("2014-08-02"); - final DateTime tOffset = new DateTime("2014-01-11"); + final DateTime t = DateTimes.of("2014-08-02"); + final DateTime tOffset = DateTimes.of("2014-01-11"); - Assert.assertEquals( - new DateTime(tOffset), - t.plus(testOperator.computeOffset(t.getMillis())) - ); + Assert.assertEquals(tOffset, t.plus(testOperator.computeOffset(t.getMillis()))); } } @@ -88,11 +83,11 @@ public class TimewarpOperatorTest return Sequences.simple( ImmutableList.of( new Result<>( - new DateTime(new DateTime("2014-01-09")), + DateTimes.of("2014-01-09"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( - new DateTime(new DateTime("2014-01-11")), + DateTimes.of("2014-01-11"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( @@ -103,7 +98,7 @@ public class TimewarpOperatorTest ); } }, - new DateTime("2014-08-02").getMillis() + DateTimes.of("2014-08-02").getMillis() ); final Query> query = @@ -116,15 +111,15 @@ public class TimewarpOperatorTest Assert.assertEquals( Lists.newArrayList( new Result<>( - new DateTime("2014-07-31"), + DateTimes.of("2014-07-31"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ), new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 5)) ) ), @@ -136,9 +131,9 @@ public class TimewarpOperatorTest TimewarpOperator> timeBoundaryOperator = new TimewarpOperator<>( - new Interval(new DateTime("2014-01-01"), new DateTime("2014-01-15")), + new Interval(DateTimes.of("2014-01-01"), DateTimes.of("2014-01-15")), new Period("P1W"), - new DateTime("2014-01-06") // align on Monday + DateTimes.of("2014-01-06") // align on Monday ); QueryRunner> timeBoundaryRunner = timeBoundaryOperator.postProcess( @@ -153,11 +148,11 @@ public class TimewarpOperatorTest return Sequences.simple( ImmutableList.of( new Result<>( - new DateTime("2014-01-12"), + DateTimes.of("2014-01-12"), new TimeBoundaryResultValue( ImmutableMap.of( "maxTime", - new DateTime("2014-01-12") + DateTimes.of("2014-01-12") ) ) ) @@ -165,7 +160,7 @@ public class TimewarpOperatorTest ); } }, - new DateTime("2014-08-02").getMillis() + DateTimes.of("2014-08-02").getMillis() ); final Query> timeBoundaryQuery = @@ -176,8 +171,8 @@ public class TimewarpOperatorTest Assert.assertEquals( Lists.newArrayList( new Result<>( - new DateTime("2014-08-02"), - new TimeBoundaryResultValue(ImmutableMap.of("maxTime", new DateTime("2014-08-02"))) + DateTimes.of("2014-08-02"), + new TimeBoundaryResultValue(ImmutableMap.of("maxTime", DateTimes.of("2014-08-02"))) ) ), Sequences.toList( @@ -215,7 +210,7 @@ public class TimewarpOperatorTest ); } }, - new DateTime("2014-08-02").getMillis() + DateTimes.of("2014-08-02").getMillis() ); final Query> query = @@ -228,11 +223,11 @@ public class TimewarpOperatorTest Assert.assertEquals( Lists.newArrayList( new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 2)) ), new Result<>( - new DateTime("2014-08-02"), + DateTimes.of("2014-08-02"), new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ) ), diff --git a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java index 0f53090c482..362f14285a2 100644 --- a/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java @@ -148,12 +148,6 @@ public class CardinalityAggregatorTest return IntIterators.asIntIterator(Iterators.forArray(column.get(p))); } - @Override - public void fill(int index, int[] toFill) - { - throw new UnsupportedOperationException("fill not supported"); - } - @Override public void close() throws IOException { @@ -292,6 +286,7 @@ public class CardinalityAggregatorTest List> dimInfoList; List selectorList; CardinalityAggregatorFactory rowAggregatorFactory; + CardinalityAggregatorFactory rowAggregatorFactoryRounded; CardinalityAggregatorFactory valueAggregatorFactory; final TestDimensionSelector dim1; final TestDimensionSelector dim2; @@ -341,6 +336,17 @@ public class CardinalityAggregatorTest true ); + rowAggregatorFactoryRounded = new CardinalityAggregatorFactory( + "billy", + null, + Lists.newArrayList( + dimSpec1, + dimSpec2 + ), + true, + true + ); + valueAggregatorFactory = new CardinalityAggregatorFactory( "billy", Lists.newArrayList( @@ -409,6 +415,7 @@ public class CardinalityAggregatorTest aggregate(selectorList, agg); } Assert.assertEquals(9.0, (Double) rowAggregatorFactory.finalizeComputation(agg.get()), 0.05); + Assert.assertEquals(9L, rowAggregatorFactoryRounded.finalizeComputation(agg.get())); } @Test @@ -424,6 +431,7 @@ public class CardinalityAggregatorTest aggregate(selectorList, agg); } Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05); + Assert.assertEquals(7L, rowAggregatorFactoryRounded.finalizeComputation(agg.get())); } @Test @@ -445,6 +453,7 @@ public class CardinalityAggregatorTest bufferAggregate(selectorList, agg, buf, pos); } Assert.assertEquals(9.0, (Double) rowAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05); + Assert.assertEquals(9L, rowAggregatorFactoryRounded.finalizeComputation(agg.get(buf, pos))); } @Test @@ -466,6 +475,7 @@ public class CardinalityAggregatorTest bufferAggregate(selectorList, agg, buf, pos); } Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05); + Assert.assertEquals(7L, rowAggregatorFactoryRounded.finalizeComputation(agg.get(buf, pos))); } @Test @@ -612,11 +622,13 @@ public class CardinalityAggregatorTest { CardinalityAggregatorFactory factory = new CardinalityAggregatorFactory( "billy", + null, ImmutableList.of( new DefaultDimensionSpec("b", "b"), new DefaultDimensionSpec("a", "a"), new DefaultDimensionSpec("c", "c") ), + true, true ); ObjectMapper objectMapper = new DefaultObjectMapper(); @@ -625,7 +637,13 @@ public class CardinalityAggregatorTest objectMapper.readValue(objectMapper.writeValueAsString(factory), AggregatorFactory.class) ); - String fieldNamesOnly = "{\"type\":\"cardinality\",\"name\":\"billy\",\"fields\":[\"b\",\"a\",\"c\"],\"byRow\":true}"; + String fieldNamesOnly = "{" + + "\"type\":\"cardinality\"," + + "\"name\":\"billy\"," + + "\"fields\":[\"b\",\"a\",\"c\"]," + + "\"byRow\":true," + + "\"round\":true" + + "}"; Assert.assertEquals( factory, objectMapper.readValue(fieldNamesOnly, AggregatorFactory.class) diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java index c16b51a03ec..b543f73e9cf 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniqueFinalizingPostAggregatorTest.java @@ -23,9 +23,13 @@ import com.google.common.collect.ImmutableMap; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.druid.hll.HyperLogLogCollector; +import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; +import io.druid.query.dimension.DefaultDimensionSpec; +import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Test; +import java.util.Collections; import java.util.Random; /** @@ -52,4 +56,35 @@ public class HyperUniqueFinalizingPostAggregatorTest Assert.assertTrue(cardinality == 99.37233005831612); } + + @Test + public void testComputeRounded() throws Exception + { + Random random = new Random(0L); + HyperUniqueFinalizingPostAggregator postAggregator = new HyperUniqueFinalizingPostAggregator( + "uniques", "uniques" + ).decorate( + ImmutableMap.of( + "uniques", + new CardinalityAggregatorFactory( + "uniques", + null, + Collections.singletonList(DefaultDimensionSpec.of("dummy")), + false, + true + ) + ) + ); + HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector(); + + for (int i = 0; i < 100; ++i) { + byte[] hashedVal = fn.hashLong(random.nextLong()).asBytes(); + collector.add(hashedVal); + } + + Object cardinality = postAggregator.compute(ImmutableMap.of("uniques", collector)); + + Assert.assertThat(cardinality, CoreMatchers.instanceOf(Long.class)); + Assert.assertEquals(99L, cardinality); + } } diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java index b9f7be621ec..7a54e87a525 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java @@ -19,11 +19,14 @@ package io.druid.query.aggregation.hyperloglog; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.druid.hll.HLLCV0; import io.druid.hll.HyperLogLogCollector; import io.druid.java.util.common.StringUtils; +import io.druid.query.aggregation.AggregatorFactory; +import io.druid.segment.TestHelper; import org.junit.Assert; import org.junit.Test; @@ -83,8 +86,8 @@ public class HyperUniquesAggregatorFactoryTest } Assert.assertEquals( - Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), - comparator.compare(collector1, collector2) + Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), + comparator.compare(collector1, collector2) ); } @@ -102,8 +105,8 @@ public class HyperUniquesAggregatorFactoryTest } Assert.assertEquals( - Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), - comparator.compare(collector1, collector2) + Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), + comparator.compare(collector1, collector2) ); } @@ -121,8 +124,8 @@ public class HyperUniquesAggregatorFactoryTest } Assert.assertEquals( - Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), - comparator.compare(collector1, collector2) + Double.compare(collector1.estimateCardinality(), collector2.estimateCardinality()), + comparator.compare(collector1, collector2) ); } } @@ -150,20 +153,42 @@ public class HyperUniquesAggregatorFactoryTest } // when - final int orderedByCardinality = Double.compare(leftCollector.estimateCardinality(), - rightCollector.estimateCardinality()); + final int orderedByCardinality = Double.compare( + leftCollector.estimateCardinality(), + rightCollector.estimateCardinality() + ); final int orderedByComparator = comparator.compare(leftCollector, rightCollector); // then, assert hyperloglog comparator behaves consistently with estimated cardinalities Assert.assertEquals( - StringUtils.format("orderedByComparator=%d, orderedByCardinality=%d,\n" + - "Left={cardinality=%f, hll=%s},\n" + - "Right={cardinality=%f, hll=%s},\n", orderedByComparator, orderedByCardinality, - leftCollector.estimateCardinality(), leftCollector, - rightCollector.estimateCardinality(), rightCollector), - orderedByCardinality, - orderedByComparator + StringUtils.format("orderedByComparator=%d, orderedByCardinality=%d,\n" + + "Left={cardinality=%f, hll=%s},\n" + + "Right={cardinality=%f, hll=%s},\n", orderedByComparator, orderedByCardinality, + leftCollector.estimateCardinality(), leftCollector, + rightCollector.estimateCardinality(), rightCollector + ), + orderedByCardinality, + orderedByComparator ); } } + + @Test + public void testSerde() throws Exception + { + final HyperUniquesAggregatorFactory factory = new HyperUniquesAggregatorFactory( + "foo", + "bar", + true, + true + ); + + final ObjectMapper jsonMapper = TestHelper.getJsonMapper(); + final AggregatorFactory factory2 = jsonMapper.readValue( + jsonMapper.writeValueAsString(factory), + AggregatorFactory.class + ); + + Assert.assertEquals(factory, factory2); + } } diff --git a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index 12dbfad4cfe..858bb3568ca 100644 --- a/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -19,7 +19,6 @@ package io.druid.query.datasourcemetadata; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -27,7 +26,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.query.DefaultGenericQueryMetricsFactory; import io.druid.query.Druids; import io.druid.query.GenericQueryMetricsFactory; @@ -124,7 +126,7 @@ public class DataSourceMetadataQueryTest ), new IncrementalIndexSegment(rtIndex, "test"), null ); - DateTime timestamp = new DateTime(System.currentTimeMillis()); + DateTime timestamp = DateTimes.nowUtc(); rtIndex.add( new MapBasedInputRow( timestamp.getMillis(), @@ -154,50 +156,50 @@ public class DataSourceMetadataQueryTest DataSourceQueryQueryToolChest toolChest = new DataSourceQueryQueryToolChest(queryMetricsFactory); List segments = toolChest .filterSegments( - null, - Arrays.asList( - new LogicalSegment() - { - @Override - public Interval getInterval() - { - return new Interval("2012-01-01/P1D"); - } - }, - new LogicalSegment() - { - @Override - public Interval getInterval() - { - return new Interval("2012-01-01T01/PT1H"); - } - }, - new LogicalSegment() - { - @Override - public Interval getInterval() - { - return new Interval("2013-01-01/P1D"); - } - }, - new LogicalSegment() - { - @Override - public Interval getInterval() - { - return new Interval("2013-01-01T01/PT1H"); - } - }, - new LogicalSegment() - { - @Override - public Interval getInterval() - { - return new Interval("2013-01-01T02/PT1H"); - } - } - ) - ); + null, + Arrays.asList( + new LogicalSegment() + { + @Override + public Interval getInterval() + { + return Intervals.of("2012-01-01/P1D"); + } + }, + new LogicalSegment() + { + @Override + public Interval getInterval() + { + return Intervals.of("2012-01-01T01/PT1H"); + } + }, + new LogicalSegment() + { + @Override + public Interval getInterval() + { + return Intervals.of("2013-01-01/P1D"); + } + }, + new LogicalSegment() + { + @Override + public Interval getInterval() + { + return Intervals.of("2013-01-01T01/PT1H"); + } + }, + new LogicalSegment() + { + @Override + public Interval getInterval() + { + return Intervals.of("2013-01-01T02/PT1H"); + } + } + ) + ); Assert.assertEquals(segments.size(), 2); // should only have the latest segments. @@ -207,7 +209,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01/P1D"); + return Intervals.of("2013-01-01/P1D"); } }, new LogicalSegment() @@ -215,7 +217,7 @@ public class DataSourceMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-01T02/PT1H"); + return Intervals.of("2013-01-01T02/PT1H"); } } ); @@ -228,12 +230,10 @@ public class DataSourceMetadataQueryTest @Test public void testResultSerialization() { - final DataSourceMetadataResultValue resultValue = new DataSourceMetadataResultValue(new DateTime("2000-01-01T00Z")); + final DataSourceMetadataResultValue resultValue = new DataSourceMetadataResultValue(DateTimes.of("2000-01-01T00Z")); final Map resultValueMap = new DefaultObjectMapper().convertValue( resultValue, - new TypeReference>() - { - } + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); Assert.assertEquals( ImmutableMap.of("maxIngestedEventTime", "2000-01-01T00:00:00.000Z"), @@ -252,7 +252,7 @@ public class DataSourceMetadataQueryTest resultValueMap, DataSourceMetadataResultValue.class ); - Assert.assertEquals(new DateTime("2000"), resultValue.getMaxIngestedEventTime()); + Assert.assertEquals(DateTimes.of("2000"), resultValue.getMaxIngestedEventTime()); } } diff --git a/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java b/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java index 5c09723c8a9..fb811984298 100644 --- a/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java +++ b/processing/src/test/java/io/druid/query/expression/ExprMacroTest.java @@ -20,9 +20,9 @@ package io.druid.query.expression; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.math.expr.Expr; import io.druid.math.expr.Parser; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -32,7 +32,7 @@ public class ExprMacroTest { private static final Expr.ObjectBinding BINDINGS = Parser.withMap( ImmutableMap.builder() - .put("t", new DateTime("2000-02-03T04:05:06").getMillis()) + .put("t", DateTimes.of("2000-02-03T04:05:06").getMillis()) .put("tstr", "2000-02-03T04:05:06") .put("tstr_sql", "2000-02-03 04:05:06") .put("x", "foo") @@ -82,28 +82,28 @@ public class ExprMacroTest @Test public void testTimestampCeil() { - assertExpr("timestamp_ceil(t, 'P1M')", new DateTime("2000-03-01").getMillis()); - assertExpr("timestamp_ceil(t, 'P1D','','America/Los_Angeles')", new DateTime("2000-02-03T08").getMillis()); - assertExpr("timestamp_ceil(t, 'P1D','',CityOfAngels)", new DateTime("2000-02-03T08").getMillis()); - assertExpr("timestamp_ceil(t, 'P1D','1970-01-01T01','Etc/UTC')", new DateTime("2000-02-04T01").getMillis()); + assertExpr("timestamp_ceil(t, 'P1M')", DateTimes.of("2000-03-01").getMillis()); + assertExpr("timestamp_ceil(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-03T08").getMillis()); + assertExpr("timestamp_ceil(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-03T08").getMillis()); + assertExpr("timestamp_ceil(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-04T01").getMillis()); } @Test public void testTimestampFloor() { - assertExpr("timestamp_floor(t, 'P1M')", new DateTime("2000-02-01").getMillis()); - assertExpr("timestamp_floor(t, 'P1D','','America/Los_Angeles')", new DateTime("2000-02-02T08").getMillis()); - assertExpr("timestamp_floor(t, 'P1D','',CityOfAngels)", new DateTime("2000-02-02T08").getMillis()); - assertExpr("timestamp_floor(t, 'P1D','1970-01-01T01','Etc/UTC')", new DateTime("2000-02-03T01").getMillis()); + assertExpr("timestamp_floor(t, 'P1M')", DateTimes.of("2000-02-01").getMillis()); + assertExpr("timestamp_floor(t, 'P1D','','America/Los_Angeles')", DateTimes.of("2000-02-02T08").getMillis()); + assertExpr("timestamp_floor(t, 'P1D','',CityOfAngels)", DateTimes.of("2000-02-02T08").getMillis()); + assertExpr("timestamp_floor(t, 'P1D','1970-01-01T01','Etc/UTC')", DateTimes.of("2000-02-03T01").getMillis()); } @Test public void testTimestampShift() { - assertExpr("timestamp_shift(t, 'P1D', 2)", new DateTime("2000-02-05T04:05:06").getMillis()); - assertExpr("timestamp_shift(t, 'P1D', 2, 'America/Los_Angeles')", new DateTime("2000-02-05T04:05:06").getMillis()); - assertExpr("timestamp_shift(t, 'P1D', 2, CityOfAngels)", new DateTime("2000-02-05T04:05:06").getMillis()); - assertExpr("timestamp_shift(t, 'P1D', 2, '-08:00')", new DateTime("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2)", DateTimes.of("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2, 'America/Los_Angeles')", DateTimes.of("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2, CityOfAngels)", DateTimes.of("2000-02-05T04:05:06").getMillis()); + assertExpr("timestamp_shift(t, 'P1D', 2, '-08:00')", DateTimes.of("2000-02-05T04:05:06").getMillis()); } @Test @@ -118,12 +118,12 @@ public class ExprMacroTest @Test public void testTimestampParse() { - assertExpr("timestamp_parse(tstr)", new DateTime("2000-02-03T04:05:06").getMillis()); + assertExpr("timestamp_parse(tstr)", DateTimes.of("2000-02-03T04:05:06").getMillis()); assertExpr("timestamp_parse(tstr_sql)", null); - assertExpr("timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss')", new DateTime("2000-02-03T04:05:06").getMillis()); + assertExpr("timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss')", DateTimes.of("2000-02-03T04:05:06").getMillis()); assertExpr( "timestamp_parse(tstr_sql,'yyyy-MM-dd HH:mm:ss','America/Los_Angeles')", - new DateTime("2000-02-03T04:05:06-08:00").getMillis() + DateTimes.of("2000-02-03T04:05:06-08:00").getMillis() ); } diff --git a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java index b2bfad510c0..dc4494dadbd 100644 --- a/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/JavaScriptExtractionFnTest.java @@ -24,8 +24,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.js.JavaScriptConfig; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -77,7 +77,7 @@ public class JavaScriptExtractionFnTest public void testTimeExample() throws Exception { String utcHour = "function(t) {\nreturn 'Second ' + Math.floor((t % 60000) / 1000);\n}"; - final long millis = new DateTime("2015-01-02T13:00:59.999Z").getMillis(); + final long millis = DateTimes.of("2015-01-02T13:00:59.999Z").getMillis(); Assert.assertEquals("Second 59", new JavaScriptExtractionFn(utcHour, false, JavaScriptConfig.getEnabledInstance()).apply(millis)); } diff --git a/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java index 2a608b6923c..83b416f486f 100644 --- a/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/TimeFormatExtractionFnTest.java @@ -21,9 +21,9 @@ package io.druid.query.extraction; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Assert; import org.junit.Test; @@ -34,12 +34,12 @@ public class TimeFormatExtractionFnTest { private static final long[] timestamps = { - new DateTime("2015-01-01T23:00:00Z").getMillis(), - new DateTime("2015-01-02T23:00:00Z").getMillis(), - new DateTime("2015-03-03T23:00:00Z").getMillis(), - new DateTime("2015-03-04T23:00:00Z").getMillis(), - new DateTime("2015-05-02T23:00:00Z").getMillis(), - new DateTime("2015-12-21T23:00:00Z").getMillis() + DateTimes.of("2015-01-01T23:00:00Z").getMillis(), + DateTimes.of("2015-01-02T23:00:00Z").getMillis(), + DateTimes.of("2015-03-03T23:00:00Z").getMillis(), + DateTimes.of("2015-03-04T23:00:00Z").getMillis(), + DateTimes.of("2015-05-02T23:00:00Z").getMillis(), + DateTimes.of("2015-12-21T23:00:00Z").getMillis() }; @Test diff --git a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java index d335e770f27..30bfe9bcb28 100644 --- a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java +++ b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java @@ -23,12 +23,12 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableRangeSet; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; +import io.druid.java.util.common.Intervals; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.IdentityExtractionFn; import io.druid.query.ordering.StringComparators; import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.column.Column; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -66,8 +66,8 @@ public class GetDimensionRangeSetTest private final DimFilter interval1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -75,8 +75,8 @@ public class GetDimensionRangeSetTest private final DimFilter interval2 = new IntervalDimFilter( "dim1", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); diff --git a/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java b/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java index ad9aec4cb30..06538e40daa 100644 --- a/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java +++ b/processing/src/test/java/io/druid/query/filter/IntervalDimFilterTest.java @@ -24,9 +24,9 @@ import com.google.inject.Injector; import com.google.inject.Key; import io.druid.guice.GuiceInjectors; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.Intervals; import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.segment.column.Column; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -51,8 +51,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -63,8 +63,8 @@ public class IntervalDimFilterTest intervalFilter = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), new RegexDimExtractionFn(".*", false, null) ); @@ -80,8 +80,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -89,8 +89,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter2 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -100,16 +100,16 @@ public class IntervalDimFilterTest DimFilter intervalFilter3 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); DimFilter intervalFilter4 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); @@ -124,8 +124,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -133,8 +133,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter2 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); @@ -142,8 +142,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter3 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -154,9 +154,9 @@ public class IntervalDimFilterTest DimFilter intervalFilter4 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -166,8 +166,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter5 = new IntervalDimFilter( "__thyme", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -182,8 +182,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter1 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -191,8 +191,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter2 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), regexFn ); @@ -200,8 +200,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter3 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1977-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -212,9 +212,9 @@ public class IntervalDimFilterTest DimFilter intervalFilter4 = new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), - Interval.parse("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1977-01-01T00:00:00.004Z"), + Intervals.of("1976-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); @@ -223,8 +223,8 @@ public class IntervalDimFilterTest DimFilter intervalFilter5 = new IntervalDimFilter( "__thyme", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), - Interval.parse("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") + Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.004Z"), + Intervals.of("1975-01-01T00:00:00.001Z/1980-01-01T00:00:00.004Z") ), null ); diff --git a/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java b/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java index ca14d9dd5a2..951c97d09d4 100644 --- a/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java +++ b/processing/src/test/java/io/druid/query/groupby/DefaultGroupByQueryMetricsTest.java @@ -22,6 +22,7 @@ package io.druid.query.groupby; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.query.CachingEmitter; import io.druid.query.DefaultQueryMetricsTest; @@ -99,7 +100,7 @@ public class DefaultGroupByQueryMetricsTest Assert.assertEquals("", actualEvent.get("service")); Assert.assertEquals(QueryRunnerTestHelper.dataSource, actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - Interval expectedInterval = new Interval("2011-04-02/2011-04-04"); + Interval expectedInterval = Intervals.of("2011-04-02/2011-04-04"); Assert.assertEquals(Collections.singletonList(expectedInterval.toString()), actualEvent.get(DruidMetrics.INTERVAL)); Assert.assertEquals("true", actualEvent.get("hasFilters")); Assert.assertEquals(expectedInterval.toDuration().toString(), actualEvent.get("duration")); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java index 9611b0cff1e..1f3bcdf7946 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByMultiSegmentTest.java @@ -40,6 +40,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -76,7 +77,6 @@ import io.druid.segment.column.ColumnConfig; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -311,7 +311,9 @@ public class GroupByMultiSegmentTest ), (QueryToolChest) toolChest ); - QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(0, 1000000))); + QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec( + Collections.singletonList(Intervals.utc(0, 1000000)) + ); GroupByQuery query = GroupByQuery .builder() diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java index 8bc0140e215..eff05ca2033 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryConfigTest.java @@ -21,9 +21,9 @@ package io.druid.query.groupby; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.segment.TestHelper; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -65,7 +65,7 @@ public class GroupByQueryConfigTest final GroupByQueryConfig config2 = config.withOverrides( GroupByQuery.builder() .setDataSource("test") - .setInterval(new Interval("2000/P1D")) + .setInterval(Intervals.of("2000/P1D")) .setGranularity(Granularities.ALL) .build() ); @@ -87,7 +87,7 @@ public class GroupByQueryConfigTest final GroupByQueryConfig config2 = config.withOverrides( GroupByQuery.builder() .setDataSource("test") - .setInterval(new Interval("2000/P1D")) + .setInterval(Intervals.of("2000/P1D")) .setGranularity(Granularities.ALL) .setContext( ImmutableMap.of( diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index ddef2a474c7..439dc6fc34c 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -36,8 +36,10 @@ import io.druid.collections.DefaultBlockingPool; import io.druid.collections.NonBlockingPool; import io.druid.collections.StupidPool; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; @@ -92,6 +94,7 @@ import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.extraction.RegexDimExtractionFn; +import io.druid.query.extraction.StringFormatExtractionFn; import io.druid.query.extraction.StrlenExtractionFn; import io.druid.query.extraction.TimeFormatExtractionFn; import io.druid.query.filter.AndDimFilter; @@ -127,7 +130,6 @@ import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Assert; import org.junit.Ignore; @@ -142,6 +144,7 @@ import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; @@ -452,6 +455,36 @@ public class GroupByQueryRunnerTest TestHelper.assertExpectedObjects(expectedResults, results, ""); } + @Test + public void testGroupByOnMissingColumn() + { + GroupByQuery query = GroupByQuery + .builder() + .setDataSource(QueryRunnerTestHelper.dataSource) + .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDimensions( + Lists.newArrayList( + new DefaultDimensionSpec("nonexistent0", "alias0"), + new ExtractionDimensionSpec("nonexistent1", "alias1", new StringFormatExtractionFn("foo")) + ) + ) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setGranularity(QueryRunnerTestHelper.allGran) + .build(); + + List expectedResults = Collections.singletonList( + GroupByQueryRunnerTestHelper.createExpectedRow( + "2011-04-01", + "alias0", null, + "alias1", "foo", + "rows", 26L + ) + ); + + Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + TestHelper.assertExpectedObjects(expectedResults, results, ""); + } + @Test public void testGroupByWithStringPostAggregator() { @@ -2492,10 +2525,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -2803,10 +2836,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -3610,10 +3643,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -3828,7 +3861,7 @@ public class GroupByQueryRunnerTest new SelectorDimFilter("idx", "217", null) ) ), - new SelectorDimFilter("__time", String.valueOf(new DateTime("2011-04-01").getMillis()), null) + new SelectorDimFilter("__time", String.valueOf(DateTimes.of("2011-04-01").getMillis()), null) ) ) ); @@ -3943,10 +3976,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -4052,10 +4085,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return new MergeSequence( queryPlus.getQuery().getResultOrdering(), @@ -4278,8 +4311,8 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), - new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) @@ -4351,8 +4384,8 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), - new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) @@ -4425,7 +4458,7 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) @@ -6601,7 +6634,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -6613,7 +6646,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -6663,7 +6696,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -6675,7 +6708,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -6737,7 +6770,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -6749,7 +6782,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -7153,7 +7186,7 @@ public class GroupByQueryRunnerTest { int segmentCount = 32; Result singleSegmentResult = new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow( @@ -7165,7 +7198,7 @@ public class GroupByQueryRunnerTest "idx", 4420L ) - ), "testSegment", new Interval("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") + ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); List bySegmentResults = Lists.newArrayList(); @@ -8721,10 +8754,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -8805,10 +8838,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -8898,10 +8931,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -8995,10 +9028,10 @@ public class GroupByQueryRunnerTest { // simulate two daily segments final QueryPlus queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-02/2011-04-03"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-02/2011-04-03"))) ); final QueryPlus queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-04-03/2011-04-04"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-04-03/2011-04-04"))) ); return factory.getToolchest().mergeResults( @@ -9176,8 +9209,8 @@ public class GroupByQueryRunnerTest .setQuerySegmentSpec( new MultipleIntervalSegmentSpec( ImmutableList.of( - new Interval("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), - new Interval("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-01T23:58:00.000Z"), + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) ) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java index f8f98e1280a..111d09e5759 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTestHelper.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.FinalizeResultsQueryRunner; @@ -34,6 +35,7 @@ import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryToolChest; import io.druid.segment.column.Column; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import java.util.Arrays; import java.util.List; @@ -58,7 +60,7 @@ public class GroupByQueryRunnerTestHelper public static Row createExpectedRow(final String timestamp, Object... vals) { - return createExpectedRow(new DateTime(timestamp), vals); + return createExpectedRow(DateTimes.of(timestamp), vals); } public static Row createExpectedRow(final DateTime timestamp, Object... vals) @@ -70,8 +72,7 @@ public class GroupByQueryRunnerTestHelper theVals.put(vals[i].toString(), vals[i + 1]); } - DateTime ts = new DateTime(timestamp); - return new MapBasedRow(ts, theVals); + return new MapBasedRow(timestamp, theVals); } public static List createExpectedRows(String[] columnNames, Object[]... values) @@ -88,7 +89,7 @@ public class GroupByQueryRunnerTestHelper theVals.put(columnNames[i], value[i]); } } - expected.add(new MapBasedRow(new DateTime(value[timeIndex]), theVals)); + expected.add(new MapBasedRow(new DateTime(value[timeIndex], ISOChronology.getInstanceUTC()), theVals)); } return expected; } diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 21e956594c8..bb7edc21069 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; @@ -154,8 +155,8 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest .descending(descending) .build(); - DateTime expectedEarliest = new DateTime("1970-01-01"); - DateTime expectedLast = new DateTime("2011-04-15"); + DateTime expectedEarliest = DateTimes.of("1970-01-01"); + DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable> results = Sequences.toList( runner.run(QueryPlus.wrap(query), CONTEXT), diff --git a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java index 03cdcd9cfae..2337d338379 100644 --- a/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/orderby/DefaultLimitSpecTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.aggregation.AggregatorFactory; @@ -39,7 +40,6 @@ import io.druid.query.dimension.DimensionSpec; import io.druid.query.expression.TestExprMacroTable; import io.druid.query.ordering.StringComparators; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -278,7 +278,6 @@ public class DefaultLimitSpecTest theVals.put(vals[i].toString(), vals[i + 1]); } - DateTime ts = new DateTime(timestamp); - return new MapBasedRow(ts, theVals); + return new MapBasedRow(DateTimes.of(timestamp), theVals); } } diff --git a/processing/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java b/processing/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java index f3d1abd2e52..2e74bbf2385 100644 --- a/processing/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/lookup/RegisteredLookupExtractionFnTest.java @@ -19,10 +19,10 @@ package io.druid.query.lookup; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.query.extraction.MapLookupExtractor; import org.easymock.EasyMock; import org.junit.Assert; @@ -123,11 +123,8 @@ public class RegisteredLookupExtractionFnTest ); EasyMock.verify(manager); - final TypeReference> typeReference = new TypeReference>() - { - }; - final Map result = mapper.readValue(mapper.writeValueAsString(fn), typeReference); - Assert.assertEquals(mapper.convertValue(fn, typeReference), result); + final Map result = mapper.readValue(mapper.writeValueAsString(fn), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT); + Assert.assertEquals(mapper.convertValue(fn, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT), result); Assert.assertEquals(LOOKUP_NAME, result.get("lookup")); Assert.assertEquals(true, result.get("retainMissingValue")); Assert.assertEquals(true, result.get("injective")); diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index 18ad069f0bc..76347a1fb87 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.CacheStrategy; import io.druid.query.TableDataSource; import io.druid.query.aggregation.AggregatorFactory; @@ -37,7 +38,6 @@ import io.druid.query.metadata.metadata.SegmentAnalysis; import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.spec.QuerySegmentSpecs; import io.druid.segment.column.ValueType; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -69,9 +69,7 @@ public class SegmentMetadataQueryQueryToolChestTest SegmentAnalysis result = new SegmentAnalysis( "testSegment", - ImmutableList.of( - new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") - ), + ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "placement", new ColumnAnalysis( diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java index 899f65ad2b5..deaebcc734e 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryTest.java @@ -26,7 +26,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.granularity.Granularities; @@ -170,9 +170,7 @@ public class SegmentMetadataQueryTest expectedSegmentAnalysis1 = new SegmentAnalysis( id1, - ImmutableList.of( - new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") - ), + ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "__time", new ColumnAnalysis( @@ -213,9 +211,7 @@ public class SegmentMetadataQueryTest ); expectedSegmentAnalysis2 = new SegmentAnalysis( id2, - ImmutableList.of( - new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") - ), + ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "__time", new ColumnAnalysis( @@ -878,7 +874,10 @@ public class SegmentMetadataQueryTest Query query = MAPPER.readValue(queryStr, Query.class); Assert.assertTrue(query instanceof SegmentMetadataQuery); Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames())); - Assert.assertEquals(new Interval("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), query.getIntervals().get(0)); + Assert.assertEquals( + Intervals.of("2013-12-04T00:00:00.000Z/2013-12-05T00:00:00.000Z"), + query.getIntervals().get(0) + ); Assert.assertEquals(expectedAnalysisTypes, ((SegmentMetadataQuery) query).getAnalysisTypes()); // test serialize and deserialize @@ -895,7 +894,7 @@ public class SegmentMetadataQueryTest Query query = MAPPER.readValue(queryStr, Query.class); Assert.assertTrue(query instanceof SegmentMetadataQuery); Assert.assertEquals("test_ds", Iterables.getOnlyElement(query.getDataSource().getNames())); - Assert.assertEquals(new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT), query.getIntervals().get(0)); + Assert.assertEquals(Intervals.ETERNITY, query.getIntervals().get(0)); Assert.assertTrue(((SegmentMetadataQuery) query).isUsingDefaultInterval()); // test serialize and deserialize @@ -910,14 +909,9 @@ public class SegmentMetadataQueryTest .toInclude(new ListColumnIncluderator(Arrays.asList("placement"))) .merge(true) .build(); - - Interval expectedInterval = new Interval( - JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT - ); - /* No interval specified, should use default interval */ Assert.assertTrue(testQuery.isUsingDefaultInterval()); - Assert.assertEquals(testQuery.getIntervals().get(0), expectedInterval); + Assert.assertEquals(Intervals.ETERNITY, testQuery.getIntervals().get(0)); Assert.assertEquals(testQuery.getIntervals().size(), 1); List testSegments = Arrays.asList( @@ -926,7 +920,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2012-01-01/P1D"); + return Intervals.of("2012-01-01/P1D"); } }, new LogicalSegment() @@ -934,7 +928,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2012-01-01T01/PT1H"); + return Intervals.of("2012-01-01T01/PT1H"); } }, new LogicalSegment() @@ -942,7 +936,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-01-05/P1D"); + return Intervals.of("2013-01-05/P1D"); } }, new LogicalSegment() @@ -950,7 +944,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-05-20/P1D"); + return Intervals.of("2013-05-20/P1D"); } }, new LogicalSegment() @@ -958,7 +952,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-01-05/P1D"); + return Intervals.of("2014-01-05/P1D"); } }, new LogicalSegment() @@ -966,7 +960,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-02-05/P1D"); + return Intervals.of("2014-02-05/P1D"); } }, new LogicalSegment() @@ -974,7 +968,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-19T01/PT1H"); + return Intervals.of("2015-01-19T01/PT1H"); } }, new LogicalSegment() @@ -982,7 +976,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-20T02/PT1H"); + return Intervals.of("2015-01-20T02/PT1H"); } } ); @@ -1001,7 +995,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-19T01/PT1H"); + return Intervals.of("2015-01-19T01/PT1H"); } }, new LogicalSegment() @@ -1009,7 +1003,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-20T02/PT1H"); + return Intervals.of("2015-01-20T02/PT1H"); } } ); @@ -1034,7 +1028,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2013-05-20/P1D"); + return Intervals.of("2013-05-20/P1D"); } }, new LogicalSegment() @@ -1042,7 +1036,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-01-05/P1D"); + return Intervals.of("2014-01-05/P1D"); } }, new LogicalSegment() @@ -1050,7 +1044,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2014-02-05/P1D"); + return Intervals.of("2014-02-05/P1D"); } }, new LogicalSegment() @@ -1058,7 +1052,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-19T01/PT1H"); + return Intervals.of("2015-01-19T01/PT1H"); } }, new LogicalSegment() @@ -1066,7 +1060,7 @@ public class SegmentMetadataQueryTest @Override public Interval getInterval() { - return new Interval("2015-01-20T02/PT1H"); + return Intervals.of("2015-01-20T02/PT1H"); } } ); diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java index d2811e0bbd3..e6106c44fdb 100644 --- a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryPlus; @@ -38,7 +39,6 @@ import io.druid.segment.QueryableIndexSegment; import io.druid.segment.TestHelper; import io.druid.segment.TestIndex; import io.druid.segment.column.ValueType; -import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -98,7 +98,7 @@ public class SegmentMetadataUnionQueryTest { SegmentAnalysis expected = new SegmentAnalysis( QueryRunnerTestHelper.segmentId, - Lists.newArrayList(new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), + Lists.newArrayList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "placement", new ColumnAnalysis( diff --git a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java index 4c7d112b314..5ef096773bd 100644 --- a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java @@ -20,6 +20,7 @@ package io.druid.query.search; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.ordering.StringComparators; @@ -39,7 +40,7 @@ import java.util.List; */ public class SearchBinaryFnTest { - private final DateTime currTime = new DateTime(); + private final DateTime currTime = DateTimes.nowUtc(); private void assertSearchMergeResult(SearchResultValue o1, SearchResultValue o2) { diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java index 4db4c1a54dd..988eeb516b6 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java @@ -22,6 +22,8 @@ package io.druid.query.search; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.CacheStrategy; import io.druid.query.Druids; @@ -31,8 +33,6 @@ import io.druid.query.search.search.FragmentSearchQuerySpec; import io.druid.query.search.search.SearchHit; import io.druid.query.search.search.SearchQuery; import io.druid.query.spec.MultipleIntervalSegmentSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -49,13 +49,7 @@ public class SearchQueryQueryToolChestTest null, Granularities.ALL, 1, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")), new FragmentSearchQuerySpec(ImmutableList.of("a", "b")), null, @@ -64,11 +58,8 @@ public class SearchQueryQueryToolChestTest ); final Result result = new Result<>( - new DateTime(123L), new SearchResultValue( - ImmutableList.of( - new SearchHit("dim1", "a") - ) - ) + DateTimes.utc(123L), + new SearchResultValue(ImmutableList.of(new SearchHit("dim1", "a"))) ); Object preparedValue = strategy.prepareForCache().apply( diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java index 1cd5b15374b..95691ceaaa7 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java @@ -23,6 +23,8 @@ import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.logger.Logger; @@ -60,8 +62,6 @@ import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -171,10 +171,10 @@ public class SearchQueryRunnerTest ) { final QueryPlus> queryPlus1 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-01-12/2011-02-28"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-01-12/2011-02-28"))) ); final QueryPlus> queryPlus2 = queryPlus.withQuerySegmentSpec( - new MultipleIntervalSegmentSpec(Lists.newArrayList(new Interval("2011-03-01/2011-04-15"))) + new MultipleIntervalSegmentSpec(Lists.newArrayList(Intervals.of("2011-03-01/2011-04-15"))) ); return Sequences.concat(runner.run(queryPlus1, responseContext), runner.run(queryPlus2, responseContext)); } @@ -746,7 +746,7 @@ public class SearchQueryRunnerTest IncrementalIndex index = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .build() ) .setMaxRowCount(10) @@ -820,7 +820,7 @@ public class SearchQueryRunnerTest ); List copy = Lists.newLinkedList(expectedResults); for (Result result : results) { - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), result.getTimestamp()); Assert.assertTrue(result.getValue() instanceof Iterable); Iterable resultValues = result.getValue(); diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java index 231d9103c27..a5ea8127f11 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.CharSource; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -41,7 +42,6 @@ import io.druid.segment.QueryableIndex; import io.druid.segment.QueryableIndexSegment; import io.druid.segment.TestIndex; import io.druid.segment.incremental.IncrementalIndex; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -249,7 +249,7 @@ public class SearchQueryRunnerWithCaseTest ); for (Result result : results) { - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), result.getTimestamp()); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), result.getTimestamp()); Assert.assertNotNull(result.getValue()); Iterable resultValues = result.getValue(); diff --git a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java index 9930097e2d1..f924626f23c 100644 --- a/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/io/druid/query/select/MultiSegmentSelectQueryTest.java @@ -26,6 +26,8 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.CharSource; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -51,7 +53,6 @@ import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Assert; @@ -153,7 +154,7 @@ public class MultiSegmentSelectQueryTest timeline.add(index2.getInterval(), "v2", new SingleElementPartitionChunk(segment_override)); segmentIdentifiers = Lists.newArrayList(); - for (TimelineObjectHolder holder : timeline.lookup(new Interval("2011-01-12/2011-01-14"))) { + for (TimelineObjectHolder holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-14"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion())); } @@ -184,7 +185,7 @@ public class MultiSegmentSelectQueryTest private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) + .withMinTimestamp(DateTimes.of(minTimeStamp).getMillis()) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); diff --git a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java index aa7f5eafe29..bc8f0ef0c87 100644 --- a/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectBinaryFnTest.java @@ -23,10 +23,10 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -50,7 +50,7 @@ public class SelectBinaryFnTest SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); Result res1 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("first", "fourth"), @@ -61,7 +61,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "first" ) @@ -71,7 +71,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T03"), + DateTimes.of("2013-01-01T03"), "dim", "fourth" ) @@ -81,7 +81,7 @@ public class SelectBinaryFnTest 2, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T05"), + DateTimes.of("2013-01-01T05"), "dim", "sixth" ) @@ -92,7 +92,7 @@ public class SelectBinaryFnTest Result res2 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("second", "third"), @@ -103,7 +103,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "second" ) @@ -113,7 +113,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T02"), + DateTimes.of("2013-01-01T02"), "dim", "third" ) @@ -123,7 +123,7 @@ public class SelectBinaryFnTest 2, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T04"), + DateTimes.of("2013-01-01T04"), "dim", "fifth" ) @@ -159,7 +159,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), "dim", "first" + DateTimes.of("2013-01-01T00"), "dim", "first" ) ), new EventHolder( @@ -167,7 +167,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "second" ) @@ -177,7 +177,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T02"), + DateTimes.of("2013-01-01T02"), "dim", "third" ) @@ -187,7 +187,7 @@ public class SelectBinaryFnTest 1, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T03"), + DateTimes.of("2013-01-01T03"), "dim", "fourth" ) @@ -197,7 +197,7 @@ public class SelectBinaryFnTest 2, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T04"), + DateTimes.of("2013-01-01T04"), "dim", "fifth" ) @@ -216,7 +216,7 @@ public class SelectBinaryFnTest SelectBinaryFn binaryFn = new SelectBinaryFn(Granularities.ALL, new PagingSpec(null, 5), false); Result res1 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("first", "second", "fourth"), @@ -227,14 +227,14 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), "dim", "first" + DateTimes.of("2013-01-01T00"), "dim", "first" ) )) ) ); Result res2 = new Result<>( - new DateTime("2013-01-01"), + DateTimes.of("2013-01-01"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet("third", "second", "fifth"), @@ -245,7 +245,7 @@ public class SelectBinaryFnTest 0, ImmutableMap.of( EventHolder.timestampKey, - new DateTime("2013-01-01T00"), + DateTimes.of("2013-01-01T00"), "dim", "second" ) diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 0fdeb9e9d65..3b598d4da2b 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -29,7 +29,9 @@ import com.google.common.collect.Maps; import com.google.common.collect.ObjectArrays; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequences; import io.druid.js.JavaScriptConfig; import io.druid.query.Druids; @@ -57,7 +59,7 @@ import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; import org.joda.time.DateTime; -import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -108,9 +110,7 @@ public class SelectQueryRunnerTest "2011-01-13T00:00:00.000Z\tupfront\tpremium\t1600\t16000.0\t160000\tpreferred\tppreferred\t1564.617729\tvalue" }; - public static final QuerySegmentSpec I_0112_0114 = new LegacySegmentSpec( - new Interval("2011-01-12/2011-01-14") - ); + public static final QuerySegmentSpec I_0112_0114 = new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")); public static final String[] V_0112_0114 = ObjectArrays.concat(V_0112, V_0113, String.class); private static final boolean DEFAULT_FROM_NEXT = true; @@ -282,7 +282,7 @@ public class SelectQueryRunnerTest List> expectedResultsAsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("mar", "qual", "place"), @@ -292,7 +292,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "automotive0") .put("place", "preferred") @@ -303,7 +303,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "business0") .put("place", "preferred") @@ -314,7 +314,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "entertainment0") .put("place", "preferred") @@ -328,7 +328,7 @@ public class SelectQueryRunnerTest List> expectedResultsDsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("mar", "qual", "place"), @@ -338,7 +338,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "upfront") .put("qual", "premium0") .put("place", "preferred") @@ -349,7 +349,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "upfront") .put("qual", "mezzanine0") .put("place", "preferred") @@ -360,7 +360,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-04-15T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "total_market") .put("qual", "premium0") .put("place", "preferred") @@ -649,7 +649,7 @@ public class SelectQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(), Sets.newHashSet( @@ -737,7 +737,7 @@ public class SelectQueryRunnerTest List> expectedResultsAsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -747,7 +747,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -758,7 +758,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -769,7 +769,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -783,7 +783,7 @@ public class SelectQueryRunnerTest List> expectedResultsDsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -793,7 +793,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 1564.6177f) .put(QueryRunnerTestHelper.indexMetric, 1564.6177f) @@ -804,7 +804,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 826.0602f) .put(QueryRunnerTestHelper.indexMetric, 826.0602f) @@ -815,7 +815,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 1689.0128f) .put(QueryRunnerTestHelper.indexMetric, 1689.0128f) @@ -855,7 +855,7 @@ public class SelectQueryRunnerTest List> expectedResultsAsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -865,7 +865,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -876,7 +876,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -887,7 +887,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-12T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") .put(QueryRunnerTestHelper.indexMetric, 100.000000F) @@ -901,7 +901,7 @@ public class SelectQueryRunnerTest List> expectedResultsDsc = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), Sets.newHashSet("null_column", "floatIndex", "longTime"), @@ -911,7 +911,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-1564.617729") .put(QueryRunnerTestHelper.indexMetric, 1564.6177f) @@ -922,7 +922,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-826.060182") .put(QueryRunnerTestHelper.indexMetric, 826.0602f) @@ -933,7 +933,7 @@ public class SelectQueryRunnerTest QueryRunnerTestHelper.segmentId, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, new DateTime("2011-01-13T00:00:00.000Z")) + .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-1689.012875") .put(QueryRunnerTestHelper.indexMetric, 1689.0128f) @@ -993,7 +993,7 @@ public class SelectQueryRunnerTest event.put( specs[0], specs.length == 1 || specs[1].equals("STRING") ? values[i] : - specs[1].equals("TIME") ? new DateTime(values[i]) : + specs[1].equals("TIME") ? DateTimes.of(values[i]) : specs[1].equals("FLOAT") ? Float.valueOf(values[i]) : specs[1].equals("DOUBLE") ? Double.valueOf(values[i]) : specs[1].equals("LONG") ? Long.valueOf(values[i]) : @@ -1042,7 +1042,7 @@ public class SelectQueryRunnerTest int lastOffset = holders.isEmpty() ? offset : holders.get(holders.size() - 1).getOffset(); expected.add( new Result( - new DateTime(group.get(0).get(EventHolder.timestampKey)), + new DateTime(group.get(0).get(EventHolder.timestampKey), ISOChronology.getInstanceUTC()), new SelectResultValue( ImmutableMap.of(QueryRunnerTestHelper.segmentId, lastOffset), Sets.newHashSet(dimensions), diff --git a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java index 96bb12a3817..6f352392e8f 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQuerySpecTest.java @@ -22,11 +22,11 @@ package io.druid.query.select; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.TableDataSource; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.spec.LegacySegmentSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -77,7 +77,7 @@ public class SelectQuerySpecTest SelectQuery query = new SelectQuery( new TableDataSource(QueryRunnerTestHelper.dataSource), - new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), + new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, @@ -126,7 +126,7 @@ public class SelectQuerySpecTest SelectQuery queryWithNull = new SelectQuery( new TableDataSource(QueryRunnerTestHelper.dataSource), - new LegacySegmentSpec(new Interval("2011-01-12/2011-01-14")), + new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), true, null, QueryRunnerTestHelper.allGran, diff --git a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java index 5c92b5e5275..fb5082c22a4 100644 --- a/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java +++ b/processing/src/test/java/io/druid/query/spec/QuerySegmentSpecTest.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.query.SegmentDescriptor; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -44,7 +44,7 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof LegacySegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-10-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-10-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); } @@ -57,7 +57,7 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof LegacySegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-09-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-09-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); } @@ -71,7 +71,7 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof MultipleIntervalSegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-08-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-08-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); } @@ -106,14 +106,14 @@ public class QuerySegmentSpecTest ); Assert.assertTrue(spec instanceof MultipleSpecificSegmentSpec); Assert.assertEquals( - ImmutableList.of(new Interval("2011-07-01/2011-10-10"), new Interval("2011-11-01/2011-11-10")), + ImmutableList.of(Intervals.of("2011-07-01/2011-10-10"), Intervals.of("2011-11-01/2011-11-10")), spec.getIntervals() ); Assert.assertEquals( ImmutableList.of( - new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 0), - new SegmentDescriptor(new Interval("2011-07-01/2011-10-10"), "1", 1), - new SegmentDescriptor(new Interval("2011-11-01/2011-11-10"), "2", 10) + new SegmentDescriptor(Intervals.of("2011-07-01/2011-10-10"), "1", 0), + new SegmentDescriptor(Intervals.of("2011-07-01/2011-10-10"), "1", 1), + new SegmentDescriptor(Intervals.of("2011-11-01/2011-11-10"), "2", 10) ), ((MultipleSpecificSegmentSpec) spec).getDescriptors() ); diff --git a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java index 76591d64405..f7f4dd0b12d 100644 --- a/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/spec/SpecificSegmentQueryRunnerTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; @@ -43,8 +45,6 @@ import io.druid.query.timeseries.TimeseriesQuery; import io.druid.query.timeseries.TimeseriesResultBuilder; import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.SegmentMissingException; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -60,7 +60,7 @@ public class SpecificSegmentQueryRunnerTest { final ObjectMapper mapper = new DefaultObjectMapper(); SegmentDescriptor descriptor = new SegmentDescriptor( - new Interval("2012-01-01T00:00:00Z/P1D"), + Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0 ); @@ -100,7 +100,7 @@ public class SpecificSegmentQueryRunnerTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) + .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( new CountAggregatorFactory("rows") @@ -135,13 +135,13 @@ public class SpecificSegmentQueryRunnerTest { final ObjectMapper mapper = new DefaultObjectMapper(); SegmentDescriptor descriptor = new SegmentDescriptor( - new Interval("2012-01-01T00:00:00Z/P1D"), + Intervals.of("2012-01-01T00:00:00Z/P1D"), "version", 0 ); TimeseriesResultBuilder builder = new TimeseriesResultBuilder( - new DateTime("2012-01-01T00:00:00Z") + DateTimes.of("2012-01-01T00:00:00Z") ); CountAggregator rows = new CountAggregator(); rows.aggregate(); @@ -177,7 +177,7 @@ public class SpecificSegmentQueryRunnerTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) + .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( new CountAggregatorFactory("rows") diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java index 843083b8128..1742fa41107 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryQueryToolChestTest.java @@ -23,13 +23,14 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.query.CacheStrategy; import io.druid.query.Druids; import io.druid.query.Result; import io.druid.query.TableDataSource; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.timeline.LogicalSegment; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -89,25 +90,25 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( TIME_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); Assert.assertEquals(6, segments.size()); List expected = Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ); for (int i = 0; i < segments.size(); i++) { @@ -121,22 +122,22 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( MAXTIME_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); Assert.assertEquals(3, segments.size()); List expected = Arrays.asList( - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ); for (int i = 0; i < segments.size(); i++) { @@ -150,22 +151,22 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( MINTIME_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); Assert.assertEquals(3, segments.size()); List expected = Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")) ); for (int i = 0; i < segments.size(); i++) { @@ -179,13 +180,13 @@ public class TimeBoundaryQueryQueryToolChestTest List segments = new TimeBoundaryQueryQueryToolChest().filterSegments( FILTERED_BOUNDARY_QUERY, Arrays.asList( - createLogicalSegment(new Interval("2013-01-01/P1D")), - createLogicalSegment(new Interval("2013-01-01T01/PT1H")), - createLogicalSegment(new Interval("2013-01-01T02/PT1H")), - createLogicalSegment(new Interval("2013-01-02/P1D")), - createLogicalSegment(new Interval("2013-01-03T01/PT1H")), - createLogicalSegment(new Interval("2013-01-03T02/PT1H")), - createLogicalSegment(new Interval("2013-01-03/P1D")) + createLogicalSegment(Intervals.of("2013-01-01/P1D")), + createLogicalSegment(Intervals.of("2013-01-01T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-01T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-02/P1D")), + createLogicalSegment(Intervals.of("2013-01-03T01/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03T02/PT1H")), + createLogicalSegment(Intervals.of("2013-01-03/P1D")) ) ); @@ -198,13 +199,7 @@ public class TimeBoundaryQueryQueryToolChestTest new TimeBoundaryQueryQueryToolChest().getCacheStrategy( new TimeBoundaryQuery( new TableDataSource("dummy"), - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, null, null @@ -212,10 +207,10 @@ public class TimeBoundaryQueryQueryToolChestTest ); final Result result = new Result<>( - new DateTime(123L), new TimeBoundaryResultValue( + DateTimes.utc(123L), new TimeBoundaryResultValue( ImmutableMap.of( - TimeBoundaryQuery.MIN_TIME, new DateTime(0L).toString(), - TimeBoundaryQuery.MAX_TIME, new DateTime("2015-01-01").toString() + TimeBoundaryQuery.MIN_TIME, DateTimes.EPOCH.toString(), + TimeBoundaryQuery.MAX_TIME, DateTimes.of("2015-01-01").toString() ) ) ); diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 7e7273ab375..245ae21007f 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -24,6 +24,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.io.CharSource; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -115,7 +117,7 @@ public class TimeBoundaryQueryRunnerTest private static IncrementalIndex newIndex(String minTimeStamp, int maxRowCount) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime(minTimeStamp).getMillis()) + .withMinTimestamp(DateTimes.of(minTimeStamp).getMillis()) .withQueryGranularity(Granularities.HOUR) .withMetrics(TestIndex.METRIC_AGGS) .build(); @@ -157,7 +159,7 @@ public class TimeBoundaryQueryRunnerTest timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk(segment1)); segmentIdentifiers = Lists.newArrayList(); - for (TimelineObjectHolder holder : timeline.lookup(new Interval("2011-01-12/2011-01-17"))) { + for (TimelineObjectHolder holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-17"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion())); } @@ -186,8 +188,8 @@ public class TimeBoundaryQueryRunnerTest DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); - Assert.assertEquals(new DateTime("2011-01-13T00:00:00.000Z"), minTime); - Assert.assertEquals(new DateTime("2011-01-16T00:00:00.000Z"), maxTime); + Assert.assertEquals(DateTimes.of("2011-01-13T00:00:00.000Z"), minTime); + Assert.assertEquals(DateTimes.of("2011-01-16T00:00:00.000Z"), maxTime); } @Test @@ -226,8 +228,8 @@ public class TimeBoundaryQueryRunnerTest DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime); - Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), minTime); + Assert.assertEquals(DateTimes.of("2011-04-15T00:00:00.000Z"), maxTime); } @Test @@ -249,7 +251,7 @@ public class TimeBoundaryQueryRunnerTest DateTime maxTime = val.getMaxTime(); Assert.assertNull(minTime); - Assert.assertEquals(new DateTime("2011-04-15T00:00:00.000Z"), maxTime); + Assert.assertEquals(DateTimes.of("2011-04-15T00:00:00.000Z"), maxTime); } @Test @@ -270,7 +272,7 @@ public class TimeBoundaryQueryRunnerTest DateTime minTime = val.getMinTime(); DateTime maxTime = val.getMaxTime(); - Assert.assertEquals(new DateTime("2011-01-12T00:00:00.000Z"), minTime); + Assert.assertEquals(DateTimes.of("2011-01-12T00:00:00.000Z"), minTime); Assert.assertNull(maxTime); } @@ -279,7 +281,7 @@ public class TimeBoundaryQueryRunnerTest { List> results = Arrays.asList( new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeBoundaryResultValue( ImmutableMap.of( "maxTime", "2012-01-01", @@ -288,7 +290,7 @@ public class TimeBoundaryQueryRunnerTest ) ), new Result<>( - new DateTime(), + DateTimes.nowUtc(), new TimeBoundaryResultValue( ImmutableMap.of( "maxTime", "2012-02-01", @@ -301,7 +303,7 @@ public class TimeBoundaryQueryRunnerTest TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null); Iterable> actual = query.mergeResults(results); - Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(new DateTime("2012-02-01"))); + Assert.assertTrue(actual.iterator().next().getValue().getMaxTime().equals(DateTimes.of("2012-02-01"))); } @Test diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index 80464169b61..0fbdb3d039b 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -22,6 +22,7 @@ package io.druid.query.timeseries; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -36,7 +37,6 @@ import io.druid.query.UnionQueryRunner; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -108,13 +108,13 @@ public class TimeSeriesUnionQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 52L, "idx", 26476L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 52L, "idx", 23308L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -157,25 +157,25 @@ public class TimeSeriesUnionQueryRunnerTest QueryToolChest toolChest = new TimeseriesQueryQueryToolChest(QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator()); final List> ds1 = Lists.newArrayList( new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 1L, "idx", 2L)) ), new Result<>( - new DateTime("2011-04-03"), + DateTimes.of("2011-04-03"), new TimeseriesResultValue(ImmutableMap.of("rows", 3L, "idx", 4L)) ) ); final List> ds2 = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue(ImmutableMap.of("rows", 5L, "idx", 6L)) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue(ImmutableMap.of("rows", 7L, "idx", 8L)) ), new Result<>( - new DateTime("2011-04-04"), + DateTimes.of("2011-04-04"), new TimeseriesResultValue(ImmutableMap.of("rows", 9L, "idx", 10L)) ) ); @@ -202,25 +202,25 @@ public class TimeSeriesUnionQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 5L, "idx", 6L) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 8L, "idx", 10L) ) ), new Result<>( - new DateTime("2011-04-03"), + DateTimes.of("2011-04-03"), new TimeseriesResultValue( ImmutableMap.of("rows", 3L, "idx", 4L) ) ), new Result<>( - new DateTime("2011-04-04"), + DateTimes.of("2011-04-04"), new TimeseriesResultValue( ImmutableMap.of("rows", 9L, "idx", 10L) ) diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java index 7d70579b79d..0a4e5ae826a 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesBinaryFnTest.java @@ -20,6 +20,7 @@ package io.druid.query.timeseries; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; @@ -42,7 +43,7 @@ public class TimeseriesBinaryFnTest rowsCount, indexLongSum ); - final DateTime currTime = new DateTime(); + final DateTime currTime = DateTimes.nowUtc(); @Test public void testMerge() diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java index 38c315433e4..b23cb0fe28f 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChestTest.java @@ -22,6 +22,8 @@ package io.druid.query.timeseries; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.CacheStrategy; import io.druid.query.Druids; @@ -33,8 +35,6 @@ import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.spec.MultipleIntervalSegmentSpec; import io.druid.segment.TestHelper; import io.druid.segment.VirtualColumns; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -68,13 +68,7 @@ public class TimeseriesQueryQueryToolChestTest TOOL_CHEST.getCacheStrategy( new TimeseriesQuery( new TableDataSource("dummy"), - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), descending, VirtualColumns.EMPTY, null, @@ -90,7 +84,7 @@ public class TimeseriesQueryQueryToolChestTest final Result result = new Result<>( // test timestamps that result in integer size millis - new DateTime(123L), + DateTimes.utc(123L), new TimeseriesResultValue( ImmutableMap.of("metric1", 2, "metric0", 3) ) diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java index 526751ba929..8b218d29550 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerBonusTest.java @@ -23,6 +23,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -39,8 +41,6 @@ import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.Segment; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -73,7 +73,7 @@ public class TimeseriesQueryRunnerBonusTest final IncrementalIndex oneRowIndex = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2012-01-01T00:00:00Z").getMillis()) + .withMinTimestamp(DateTimes.of("2012-01-01T00:00:00Z").getMillis()) .build() ) .setMaxRowCount(1000) @@ -83,7 +83,7 @@ public class TimeseriesQueryRunnerBonusTest oneRowIndex.add( new MapBasedInputRow( - new DateTime("2012-01-01T00:00:00Z").getMillis(), + DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "x") ) @@ -93,12 +93,12 @@ public class TimeseriesQueryRunnerBonusTest Assert.assertEquals("index size", 1, oneRowIndex.size()); Assert.assertEquals("result size", 1, results.size()); - Assert.assertEquals("result timestamp", new DateTime("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); + Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); Assert.assertEquals("result count metric", 1, (long) results.get(0).getValue().getLongMetric("rows")); oneRowIndex.add( new MapBasedInputRow( - new DateTime("2012-01-01T00:00:00Z").getMillis(), + DateTimes.of("2012-01-01T00:00:00Z").getMillis(), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "y") ) @@ -108,7 +108,7 @@ public class TimeseriesQueryRunnerBonusTest Assert.assertEquals("index size", 2, oneRowIndex.size()); Assert.assertEquals("result size", 1, results.size()); - Assert.assertEquals("result timestamp", new DateTime("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); + Assert.assertEquals("result timestamp", DateTimes.of("2012-01-01T00:00:00Z"), results.get(0).getTimestamp()); Assert.assertEquals("result count metric", 2, (long) results.get(0).getValue().getLongMetric("rows")); } @@ -129,7 +129,7 @@ public class TimeseriesQueryRunnerBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2012-01-01T00:00:00Z/P1D"))) + .intervals(ImmutableList.of(Intervals.of("2012-01-01T00:00:00Z/P1D"))) .aggregators( ImmutableList.of( new CountAggregatorFactory("rows") diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 2c98d74facf..8d7b57056fb 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -23,8 +23,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import io.druid.java.util.common.StringUtils; import com.google.common.primitives.Doubles; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -146,7 +148,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - new DateTime("2020-04-02"), + DateTimes.of("2020-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -289,8 +291,8 @@ public class TimeseriesQueryRunnerTest .descending(descending) .build(); - DateTime expectedEarliest = new DateTime("2011-01-12"); - DateTime expectedLast = new DateTime("2011-04-15"); + DateTime expectedEarliest = DateTimes.of("2011-01-12"); + DateTime expectedLast = DateTimes.of("2011-04-15"); Iterable> results = Sequences.toList( runner.run(QueryPlus.wrap(query), CONTEXT), @@ -392,13 +394,13 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -440,13 +442,13 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -517,9 +519,7 @@ public class TimeseriesQueryRunnerTest .granularity(new PeriodGranularity(new Period("P1M"), null, null)) .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -537,7 +537,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults1 = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -555,9 +555,7 @@ public class TimeseriesQueryRunnerTest .granularity("DAY") .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -574,7 +572,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults2 = Collections.singletonList( new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -603,9 +601,7 @@ public class TimeseriesQueryRunnerTest ) .intervals( Collections.singletonList( - new Interval( - "2011-01-12T00:00:00.000-08:00/2011-01-20T00:00:00.000-08:00" - ) + Intervals.of("2011-01-12T00:00:00.000-08:00/2011-01-20T00:00:00.000-08:00") ) ) .aggregators( @@ -651,9 +647,7 @@ public class TimeseriesQueryRunnerTest .granularity(Granularities.HOUR) .intervals( Collections.singletonList( - new Interval( - "2011-04-14T00:00:00.000Z/2011-05-01T00:00:00.000Z" - ) + Intervals.of("2011-04-14T00:00:00.000Z/2011-05-01T00:00:00.000Z") ) ) .aggregators( @@ -669,7 +663,9 @@ public class TimeseriesQueryRunnerTest .build(); List> lotsOfZeroes = Lists.newArrayList(); - final Iterable iterable = Granularities.HOUR.getIterable(new Interval(new DateTime("2011-04-14T01").getMillis(), new DateTime("2011-04-15").getMillis())); + final Iterable iterable = Granularities.HOUR.getIterable( + new Interval(DateTimes.of("2011-04-14T01"), DateTimes.of("2011-04-15")) + ); for (Interval interval : iterable) { lotsOfZeroes.add( new Result<>( @@ -685,7 +681,7 @@ public class TimeseriesQueryRunnerTest Iterables.concat( Collections.singletonList( new Result<>( - new DateTime("2011-04-14T00"), + DateTimes.of("2011-04-14T00"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 4907L) ) @@ -694,7 +690,7 @@ public class TimeseriesQueryRunnerTest lotsOfZeroes, Collections.singletonList( new Result<>( - new DateTime("2011-04-15T00"), + DateTimes.of("2011-04-15T00"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 4717L) ) @@ -719,17 +715,11 @@ public class TimeseriesQueryRunnerTest .granularity( new PeriodGranularity( new Period("PT1H"), - new DateTime(60000), + DateTimes.utc(60000), DateTimeZone.UTC ) ) - .intervals( - Collections.singletonList( - new Interval( - "2011-04-15T00:00:00.000Z/2012" - ) - ) - ) + .intervals(Collections.singletonList(Intervals.of("2011-04-15T00:00:00.000Z/2012"))) .aggregators( Arrays.asList( QueryRunnerTestHelper.rowsCount, @@ -744,7 +734,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults1 = Collections.singletonList( new Result<>( - new DateTime("2011-04-14T23:01Z"), + DateTimes.of("2011-04-14T23:01Z"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 4717L) ) @@ -767,9 +757,7 @@ public class TimeseriesQueryRunnerTest .granularity(new PeriodGranularity(new Period("P1M"), null, null)) .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -787,7 +775,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults1 = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -805,9 +793,7 @@ public class TimeseriesQueryRunnerTest .granularity("DAY") .intervals( Collections.singletonList( - new Interval( - "2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z" - ) + Intervals.of("2011-04-02T00:00:00.000Z/2011-04-03T00:00:00.000Z") ) ) .aggregators( @@ -824,7 +810,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults2 = Collections.singletonList( new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -846,11 +832,7 @@ public class TimeseriesQueryRunnerTest .granularity(QueryRunnerTestHelper.dayGran) .intervals( new MultipleIntervalSegmentSpec( - Collections.singletonList( - new Interval( - "2015-01-01/2015-01-10" - ) - ) + Collections.singletonList(Intervals.of("2015-01-01/2015-01-10")) ) ) .aggregators( @@ -896,7 +878,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -907,7 +889,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -947,7 +929,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, @@ -958,7 +940,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, @@ -999,7 +981,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -1010,7 +992,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -1051,7 +1033,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1062,7 +1044,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1103,7 +1085,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1114,7 +1096,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1163,7 +1145,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1174,7 +1156,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1223,7 +1205,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1234,7 +1216,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1283,7 +1265,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1294,7 +1276,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 1L, @@ -1341,7 +1323,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1352,7 +1334,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1405,7 +1387,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1416,7 +1398,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1469,7 +1451,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1480,7 +1462,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 2L, @@ -1515,7 +1497,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1526,7 +1508,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1585,7 +1567,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1596,7 +1578,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1631,7 +1613,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1642,7 +1624,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 13L, @@ -1677,7 +1659,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1688,7 +1670,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1737,7 +1719,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1748,7 +1730,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 0L, @@ -1786,11 +1768,7 @@ public class TimeseriesQueryRunnerTest Iterable> expectedResults = ImmutableList.of( new Result<>( - new DateTime( - QueryRunnerTestHelper.firstToThird.getIntervals() - .get(0) - .getStart() - ), + QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), new TimeseriesResultValue( ImmutableMap.of( "index", 12459.361190795898d, @@ -1828,11 +1806,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - new DateTime( - QueryRunnerTestHelper.firstToThird.getIntervals() - .get(0) - .getStart() - ), + QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), new TimeseriesResultValue( ImmutableMap.of( "index", 283.31103515625d, @@ -1871,7 +1845,7 @@ public class TimeseriesQueryRunnerTest // to select different value from the list of first and last dates List> expectedAscendingResults = ImmutableList.of( new Result<>( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(100.000000).doubleValue(), @@ -1880,7 +1854,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01"), + DateTimes.of("2011-02-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(132.123776).doubleValue(), @@ -1889,7 +1863,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01"), + DateTimes.of("2011-03-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(153.059937).doubleValue(), @@ -1898,7 +1872,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(135.885094).doubleValue(), @@ -1910,7 +1884,7 @@ public class TimeseriesQueryRunnerTest List> expectedDescendingResults = ImmutableList.of( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(1234.247546).doubleValue(), @@ -1919,7 +1893,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01"), + DateTimes.of("2011-03-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(1004.940887).doubleValue(), @@ -1928,7 +1902,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01"), + DateTimes.of("2011-02-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(913.561076).doubleValue(), @@ -1937,7 +1911,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), new TimeseriesResultValue( ImmutableMap.of( "first", new Float(800.000000).doubleValue(), @@ -2191,7 +2165,7 @@ public class TimeseriesQueryRunnerTest ); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 18L, @@ -2242,7 +2216,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 0L, @@ -2293,7 +2267,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 26L, @@ -2345,7 +2319,7 @@ public class TimeseriesQueryRunnerTest ); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 26L, @@ -2397,7 +2371,7 @@ public class TimeseriesQueryRunnerTest ); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "filteredAgg", 26L, @@ -2432,7 +2406,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", @@ -2510,7 +2484,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -2521,7 +2495,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 9L, @@ -2568,7 +2542,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, @@ -2579,7 +2553,7 @@ public class TimeseriesQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of( "rows", 11L, diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java index edaa2cf6e8a..cd99923b4af 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnBenchmark.java @@ -23,6 +23,7 @@ import com.google.caliper.Param; import com.google.caliper.Runner; import com.google.caliper.SimpleBenchmark; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; @@ -82,7 +83,7 @@ public class TopNBinaryFnBenchmark extends SimpleBenchmark ) ); } - final DateTime currTime = new DateTime(); + final DateTime currTime = DateTimes.nowUtc(); List> list = new ArrayList<>(); for (int i = 0; i < threshold; i++) { Map res = new HashMap<>(); diff --git a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java index c45d2f62739..be0917b904c 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNBinaryFnTest.java @@ -22,6 +22,7 @@ package io.druid.query.topn; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Result; import io.druid.query.aggregation.AggregatorFactory; @@ -64,7 +65,7 @@ public class TopNBinaryFnTest final List postAggregators = Arrays.asList( addrowsindexconstant ); - private final DateTime currTime = new DateTime(); + private final DateTime currTime = DateTimes.nowUtc(); private void assertTopNMergeResult(Object o1, Object o2) { diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java index 43a8dace166..e1d570bf1dd 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.query.CacheStrategy; @@ -45,8 +47,6 @@ import io.druid.segment.IncrementalIndexSegment; import io.druid.segment.TestHelper; import io.druid.segment.TestIndex; import io.druid.segment.VirtualColumns; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -69,13 +69,7 @@ public class TopNQueryQueryToolChestTest new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("metric1"), 3, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), @@ -86,7 +80,7 @@ public class TopNQueryQueryToolChestTest final Result result = new Result<>( // test timestamps that result in integer size millis - new DateTime(123L), + DateTimes.utc(123L), new TopNResultValue( Arrays.asList( ImmutableMap.of( @@ -121,13 +115,7 @@ public class TopNQueryQueryToolChestTest new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), @@ -141,13 +129,7 @@ public class TopNQueryQueryToolChestTest new DefaultDimensionSpec("test", "test"), new NumericTopNMetricSpec("post"), 3, - new MultipleIntervalSegmentSpec( - ImmutableList.of( - new Interval( - "2015-01-01/2015-01-02" - ) - ) - ), + new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2015-01-01/2015-01-02"))), null, Granularities.ALL, ImmutableList.of(new CountAggregatorFactory("metric1")), diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 112522f8895..998a4334919 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -31,8 +31,10 @@ import com.google.common.collect.Sets; import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import io.druid.collections.StupidPool; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -78,6 +80,7 @@ import io.druid.query.extraction.ExtractionFn; import io.druid.query.extraction.JavaScriptExtractionFn; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.extraction.RegexDimExtractionFn; +import io.druid.query.extraction.StringFormatExtractionFn; import io.druid.query.extraction.StrlenExtractionFn; import io.druid.query.extraction.TimeFormatExtractionFn; import io.druid.query.filter.AndDimFilter; @@ -92,8 +95,6 @@ import io.druid.segment.TestHelper; import io.druid.segment.column.Column; import io.druid.segment.column.ValueType; import io.druid.segment.virtual.ExpressionVirtualColumn; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -298,7 +299,7 @@ public class TopNQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - new DateTime("2020-04-02T00:00:00.000Z"), + DateTimes.of("2020-04-02T00:00:00.000Z"), new TopNResultValue(ImmutableList.of()) ) ); @@ -331,7 +332,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -377,6 +378,61 @@ public class TopNQueryRunnerTest ); } + @Test + public void testTopNOnMissingColumn() + { + TopNQuery query = new TopNQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(new DefaultDimensionSpec("nonexistentColumn", "alias")) + .metric("rows") + .threshold(4) + .intervals(QueryRunnerTestHelper.fullOnInterval) + .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) + .build(); + + final HashMap resultMap = new HashMap<>(); + resultMap.put("alias", null); + resultMap.put("rows", 1209L); + + List> expectedResults = Collections.singletonList( + new Result<>( + DateTimes.of("2011-01-12T00:00:00.000Z"), + new TopNResultValue(Collections.>singletonList(resultMap)) + ) + ); + assertExpectedResults(expectedResults, query); + } + + @Test + public void testTopNOnMissingColumnWithExtractionFn() + { + TopNQuery query = new TopNQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(new ExtractionDimensionSpec("nonexistentColumn", "alias", new StringFormatExtractionFn("theValue"))) + .metric("rows") + .threshold(4) + .intervals(QueryRunnerTestHelper.fullOnInterval) + .aggregators(Collections.singletonList(new CountAggregatorFactory("rows"))) + .build(); + + List> expectedResults = Collections.singletonList( + new Result<>( + DateTimes.of("2011-01-12T00:00:00.000Z"), + new TopNResultValue( + Collections.>singletonList( + ImmutableMap.builder() + .put("alias", "theValue") + .put("rows", 1209L) + .build() + ) + ) + ) + ); + assertExpectedResults(expectedResults, query); + } + @Test public void testFullOnTopNOverPostAggs() { @@ -403,7 +459,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -475,7 +531,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -538,7 +594,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -592,7 +648,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -637,7 +693,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -687,7 +743,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -712,6 +768,56 @@ public class TopNQueryRunnerTest assertExpectedResults(expectedResults, query); } + @Test + public void testTopNOverHyperUniqueExpressionRounded() + { + TopNQuery query = new TopNQueryBuilder() + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric) + .threshold(3) + .intervals(QueryRunnerTestHelper.fullOnInterval) + .aggregators( + Arrays.asList(QueryRunnerTestHelper.qualityUniquesRounded) + ) + .postAggregators( + Collections.singletonList(new ExpressionPostAggregator( + QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + "uniques + 1", + null, + TestExprMacroTable.INSTANCE + )) + ) + .build(); + + List> expectedResults = Arrays.asList( + new Result<>( + DateTimes.of("2011-01-12T00:00:00.000Z"), + new TopNResultValue( + Arrays.>asList( + ImmutableMap.builder() + .put("market", "spot") + .put(QueryRunnerTestHelper.uniqueMetric, 9L) + .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 10L) + .build(), + ImmutableMap.builder() + .put("market", "total_market") + .put(QueryRunnerTestHelper.uniqueMetric, 2L) + .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 3L) + .build(), + ImmutableMap.builder() + .put("market", "upfront") + .put(QueryRunnerTestHelper.uniqueMetric, 2L) + .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 3L) + .build() + ) + ) + ) + ); + assertExpectedResults(expectedResults, query); + } + @Test public void testTopNOverFirstLastAggregator() { @@ -732,7 +838,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -754,7 +860,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -776,7 +882,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -798,7 +904,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -844,7 +950,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -866,7 +972,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -888,7 +994,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -910,7 +1016,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -957,7 +1063,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -979,7 +1085,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1001,7 +1107,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1023,7 +1129,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1068,7 +1174,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-01T00:00:00.000Z"), + DateTimes.of("2011-01-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1090,7 +1196,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-02-01T00:00:00.000Z"), + DateTimes.of("2011-02-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1112,7 +1218,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-03-01T00:00:00.000Z"), + DateTimes.of("2011-03-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1134,7 +1240,7 @@ public class TopNQueryRunnerTest ) ), new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1182,7 +1288,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1287,7 +1393,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1335,7 +1441,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1383,7 +1489,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1431,7 +1537,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1472,7 +1578,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1506,7 +1612,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1549,7 +1655,7 @@ public class TopNQueryRunnerTest .threshold(4) .intervals( new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("2011-04-01T00:00:00.000Z/2011-04-02T00:00:00.000Z")) + Arrays.asList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-02T00:00:00.000Z")) ) ) .aggregators(commonAggregators) @@ -1558,7 +1664,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1606,7 +1712,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1648,7 +1754,7 @@ public class TopNQueryRunnerTest assertExpectedResults( Lists.>newArrayList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Lists.>newArrayList()) ) ), query @@ -1685,7 +1791,7 @@ public class TopNQueryRunnerTest assertExpectedResults( Lists.>newArrayList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue(Lists.>newArrayList()) ) ), query @@ -1777,7 +1883,7 @@ public class TopNQueryRunnerTest final ArrayList> expectedResults = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1818,7 +1924,7 @@ public class TopNQueryRunnerTest final ArrayList> expectedResults = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1866,7 +1972,7 @@ public class TopNQueryRunnerTest final ArrayList> expectedResults = Lists.newArrayList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -1920,7 +2026,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Collections.>singletonList( QueryRunnerTestHelper.orderedMap( @@ -1954,7 +2060,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Collections.>singletonList( QueryRunnerTestHelper.orderedMap( @@ -1988,7 +2094,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Collections.>singletonList( QueryRunnerTestHelper.orderedMap( @@ -2021,7 +2127,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2066,7 +2172,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2101,7 +2207,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2141,7 +2247,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2181,7 +2287,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2221,7 +2327,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2270,7 +2376,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2319,7 +2425,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2384,7 +2490,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2435,7 +2541,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2486,7 +2592,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2550,7 +2656,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2615,7 +2721,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2682,7 +2788,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2748,7 +2854,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2814,7 +2920,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2881,7 +2987,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2934,7 +3040,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -2987,7 +3093,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3040,7 +3146,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3110,7 +3216,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3157,7 +3263,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3203,7 +3309,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3277,7 +3383,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3365,7 +3471,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3414,7 +3520,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3468,7 +3574,7 @@ public class TopNQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.>asList( @@ -3525,7 +3631,7 @@ public class TopNQueryRunnerTest List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Collections.singletonList( @@ -3575,7 +3681,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -3693,16 +3799,16 @@ public class TopNQueryRunnerTest List> expectedResults = Collections.singletonList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass( Collections.singletonList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), topNResult ) ), QueryRunnerTestHelper.segmentId, - new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z") + Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z") ) ) ); @@ -3733,7 +3839,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3797,7 +3903,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -3853,7 +3959,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -3901,7 +4007,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -3932,7 +4038,7 @@ public class TopNQueryRunnerTest map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map, @@ -3970,7 +4076,7 @@ public class TopNQueryRunnerTest map.put("uniques", QueryRunnerTestHelper.UNIQUES_9); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -3997,7 +4103,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.asList( ImmutableMap.of( @@ -4030,7 +4136,7 @@ public class TopNQueryRunnerTest .build(); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-02T00:00:00.000Z"), + DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.asList( @@ -4069,7 +4175,7 @@ public class TopNQueryRunnerTest .build(); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-02T00:00:00.000Z"), + DateTimes.of("2011-04-02T00:00:00.000Z"), new TopNResultValue( withDuplicateResults( Arrays.asList( @@ -4120,7 +4226,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -4185,7 +4291,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -4250,7 +4356,7 @@ public class TopNQueryRunnerTest map.put("minIndex", 59.02102279663086D); List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( map @@ -4290,7 +4396,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4365,7 +4471,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4437,7 +4543,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4509,7 +4615,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4582,7 +4688,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4652,7 +4758,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2011-04-01T00:00:00.000Z"), + DateTimes.of("2011-04-01T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.of( @@ -4712,7 +4818,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4784,7 +4890,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4856,7 +4962,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -4928,7 +5034,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5000,7 +5106,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5075,7 +5181,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5163,7 +5269,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( expectedMap @@ -5202,7 +5308,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5280,7 +5386,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5349,7 +5455,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -5476,7 +5582,7 @@ public class TopNQueryRunnerTest rows.sort((r1, r2) -> ((Comparable) r2.get(metric)).compareTo(r1.get(metric))); List> expectedResults = Collections.singletonList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(rows) ) ); @@ -5513,7 +5619,7 @@ public class TopNQueryRunnerTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue(Arrays.asList()) ) ); diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java index 35d541486c1..533c6709e37 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTestHelper.java @@ -22,8 +22,8 @@ package io.druid.query.topn; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.DateTimes; import io.druid.query.Result; -import org.joda.time.DateTime; import java.util.List; import java.util.Map; @@ -43,6 +43,6 @@ public class TopNQueryRunnerTestHelper } expected.add(theVals); } - return new Result(new DateTime(date), new TopNResultValue(expected)); + return new Result(DateTimes.of(date), new TopNResultValue(expected)); } } diff --git a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java index 6758d2d9ecc..4d1804a3568 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNUnionQueryTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.StupidPool; +import io.druid.java.util.common.DateTimes; import io.druid.query.QueryPlus; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; @@ -34,7 +35,6 @@ import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.PostAggregator; import io.druid.segment.TestHelper; -import org.joda.time.DateTime; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -131,7 +131,7 @@ public class TopNUnionQueryTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index b1365002263..73ff071087b 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -22,6 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -54,8 +56,6 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @@ -102,7 +102,7 @@ public class AppendTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private Segment segment; @@ -121,8 +121,8 @@ public class AppendTest new Pair("append.json.2", METRIC_AGGS) ), Arrays.asList( - new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), - new Interval("2011-01-14T22:00:00.000Z/2011-01-16T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), + Intervals.of("2011-01-14T22:00:00.000Z/2011-01-16T00:00:00.000Z") ) ); segment = new QueryableIndexSegment(null, appendedIndex); @@ -136,8 +136,8 @@ public class AppendTest new Pair("append.json.4", METRIC_AGGS) ), Arrays.asList( - new Interval("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), - new Interval("2011-01-13T00:00:00.000Z/2011-01-14T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-16T00:00:00.000Z"), + Intervals.of("2011-01-13T00:00:00.000Z/2011-01-14T00:00:00.000Z") ) ); segment2 = new QueryableIndexSegment(null, append2); @@ -153,9 +153,9 @@ public class AppendTest new Pair("append.json.7", METRIC_AGGS) ), Arrays.asList( - new Interval("2011-01-12T00:00:00.000Z/2011-01-22T00:00:00.000Z"), - new Interval("2011-01-13T00:00:00.000Z/2011-01-16T00:00:00.000Z"), - new Interval("2011-01-18T00:00:00.000Z/2011-01-21T00:00:00.000Z") + Intervals.of("2011-01-12T00:00:00.000Z/2011-01-22T00:00:00.000Z"), + Intervals.of("2011-01-13T00:00:00.000Z/2011-01-16T00:00:00.000Z"), + Intervals.of("2011-01-18T00:00:00.000Z/2011-01-21T00:00:00.000Z") ) ); segment3 = new QueryableIndexSegment(null, append3); @@ -166,13 +166,13 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-15T02:00:00.000Z") + DateTimes.of("2011-01-15T02:00:00.000Z") ) ) ) @@ -191,13 +191,13 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-15T00:00:00.000Z") + DateTimes.of("2011-01-15T00:00:00.000Z") ) ) ) @@ -216,7 +216,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 8L) @@ -241,7 +241,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 7L) @@ -266,7 +266,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 5L) @@ -291,7 +291,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 4L) @@ -316,7 +316,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -362,7 +362,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -408,7 +408,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -436,7 +436,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Lists.>newArrayList() ) @@ -454,7 +454,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -477,7 +477,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -499,7 +499,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementDimension, "mezzanine"), @@ -520,7 +520,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -542,7 +542,7 @@ public class AppendTest { List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 5L) diff --git a/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java new file mode 100644 index 00000000000..344c4938c8f --- /dev/null +++ b/processing/src/test/java/io/druid/segment/ConstantDimensionSelectorTest.java @@ -0,0 +1,97 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment; + +import io.druid.query.extraction.StringFormatExtractionFn; +import io.druid.query.extraction.SubstringDimExtractionFn; +import io.druid.segment.data.IndexedInts; +import org.junit.Assert; +import org.junit.Test; + +import java.util.Iterator; + +public class ConstantDimensionSelectorTest +{ + private final DimensionSelector NULL_SELECTOR = DimensionSelectorUtils.constantSelector(null); + private final DimensionSelector CONST_SELECTOR = DimensionSelectorUtils.constantSelector("billy"); + private final DimensionSelector NULL_EXTRACTION_SELECTOR = DimensionSelectorUtils.constantSelector( + null, + new StringFormatExtractionFn("billy") + ); + private final DimensionSelector CONST_EXTRACTION_SELECTOR = DimensionSelectorUtils.constantSelector( + "billybilly", + new SubstringDimExtractionFn(0, 5) + ); + + @Test + public void testGetRow() throws Exception + { + IndexedInts row = NULL_SELECTOR.getRow(); + Assert.assertEquals(1, row.size()); + Assert.assertEquals(0, row.get(0)); + + Iterator iter = row.iterator(); + Assert.assertEquals(true, iter.hasNext()); + Assert.assertEquals(0, iter.next().intValue()); + Assert.assertEquals(false, iter.hasNext()); + } + + @Test + public void testGetValueCardinality() throws Exception + { + Assert.assertEquals(1, NULL_SELECTOR.getValueCardinality()); + Assert.assertEquals(1, CONST_SELECTOR.getValueCardinality()); + Assert.assertEquals(1, NULL_EXTRACTION_SELECTOR.getValueCardinality()); + Assert.assertEquals(1, CONST_EXTRACTION_SELECTOR.getValueCardinality()); + } + + @Test + public void testLookupName() throws Exception + { + Assert.assertEquals(null, NULL_SELECTOR.lookupName(0)); + Assert.assertEquals("billy", CONST_SELECTOR.lookupName(0)); + Assert.assertEquals("billy", NULL_EXTRACTION_SELECTOR.lookupName(0)); + Assert.assertEquals("billy", CONST_EXTRACTION_SELECTOR.lookupName(0)); + } + + @Test + public void testLookupId() throws Exception + { + Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId(null)); + Assert.assertEquals(0, NULL_SELECTOR.idLookup().lookupId("")); + Assert.assertEquals(-1, NULL_SELECTOR.idLookup().lookupId("billy")); + Assert.assertEquals(-1, NULL_SELECTOR.idLookup().lookupId("bob")); + + Assert.assertEquals(-1, CONST_SELECTOR.idLookup().lookupId(null)); + Assert.assertEquals(-1, CONST_SELECTOR.idLookup().lookupId("")); + Assert.assertEquals(0, CONST_SELECTOR.idLookup().lookupId("billy")); + Assert.assertEquals(-1, CONST_SELECTOR.idLookup().lookupId("bob")); + + Assert.assertEquals(-1, NULL_EXTRACTION_SELECTOR.idLookup().lookupId(null)); + Assert.assertEquals(-1, NULL_EXTRACTION_SELECTOR.idLookup().lookupId("")); + Assert.assertEquals(0, NULL_EXTRACTION_SELECTOR.idLookup().lookupId("billy")); + Assert.assertEquals(-1, NULL_EXTRACTION_SELECTOR.idLookup().lookupId("bob")); + + Assert.assertEquals(-1, CONST_EXTRACTION_SELECTOR.idLookup().lookupId(null)); + Assert.assertEquals(-1, CONST_EXTRACTION_SELECTOR.idLookup().lookupId("")); + Assert.assertEquals(0, CONST_EXTRACTION_SELECTOR.idLookup().lookupId("billy")); + Assert.assertEquals(-1, CONST_EXTRACTION_SELECTOR.idLookup().lookupId("bob")); + } +} diff --git a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java index 47c5d888b1c..99654c6ff18 100644 --- a/processing/src/test/java/io/druid/segment/EmptyIndexTest.java +++ b/processing/src/test/java/io/druid/segment/EmptyIndexTest.java @@ -22,12 +22,12 @@ package io.druid.segment; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.collections.bitmap.ConciseBitmapFactory; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.column.Column; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -53,7 +53,7 @@ public class EmptyIndexTest .buildOnheap(); IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( - new Interval("2012-08-01/P3D"), + Intervals.of("2012-08-01/P3D"), emptyIndex, new ConciseBitmapFactory() ); @@ -69,7 +69,7 @@ public class EmptyIndexTest Assert.assertEquals("getDimensionNames", 0, Iterables.size(emptyQueryableIndex.getAvailableDimensions())); Assert.assertEquals("getMetricNames", 0, Iterables.size(emptyQueryableIndex.getColumnNames())); - Assert.assertEquals("getDataInterval", new Interval("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); + Assert.assertEquals("getDataInterval", Intervals.of("2012-08-01/P3D"), emptyQueryableIndex.getDataInterval()); Assert.assertEquals( "getReadOnlyTimestamps", 0, diff --git a/processing/src/test/java/io/druid/segment/IndexIOTest.java b/processing/src/test/java/io/druid/segment/IndexIOTest.java index ac8289cd7fe..d4470ee9dcf 100644 --- a/processing/src/test/java/io/druid/segment/IndexIOTest.java +++ b/processing/src/test/java/io/druid/segment/IndexIOTest.java @@ -29,6 +29,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.UOE; import io.druid.query.aggregation.Aggregator; import io.druid.query.aggregation.CountAggregatorFactory; @@ -62,7 +63,7 @@ import java.util.Map; @RunWith(Parameterized.class) public class IndexIOTest { - private static Interval DEFAULT_INTERVAL = Interval.parse("1970-01-01/2000-01-01"); + private static Interval DEFAULT_INTERVAL = Intervals.of("1970-01-01/2000-01-01"); private static final IndexSpec INDEX_SPEC = IndexMergerTestBase.makeIndexSpec( new ConciseBitmapSerdeFactory(), CompressedObjectStrategy.CompressionStrategy.LZ4, diff --git a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java index c6142afb3bb..f5196d062a5 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerTestBase.java @@ -36,6 +36,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.FloatDimensionSchema; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; @@ -56,7 +57,6 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IndexSizeExceededException; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; @@ -1746,7 +1746,7 @@ public class IndexMergerTestBase )); closer.closeLater(index2); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1797,7 +1797,7 @@ public class IndexMergerTestBase ); closer.closeLater(index2); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1867,7 +1867,7 @@ public class IndexMergerTestBase ); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1927,7 +1927,7 @@ public class IndexMergerTestBase closer.closeLater(index5); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index1, factory), @@ -1976,7 +1976,7 @@ public class IndexMergerTestBase closer.closeLater(index5); - Interval interval = new Interval(0, new DateTime().getMillis()); + Interval interval = new Interval(DateTimes.EPOCH, DateTimes.nowUtc()); RoaringBitmapFactory factory = new RoaringBitmapFactory(); ArrayList toMerge = Lists.newArrayList( new IncrementalIndexAdapter(interval, index2, factory) diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java index 7091817e617..f116a2c022a 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9CompatibilityTest.java @@ -23,7 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.io.ByteSource; import com.google.common.io.Files; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.JodaUtils; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.query.aggregation.AggregatorFactory; diff --git a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java index e4218468ed3..9c17dc9078d 100644 --- a/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/io/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -28,6 +28,8 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; @@ -47,7 +49,6 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; @@ -69,7 +70,7 @@ public class IndexMergerV9WithSpatialIndexTest private static IndexIO INDEX_IO = TestHelper.getTestIndexIO(); public static final int NUM_POINTS = 5000; - private static Interval DATA_INTERVAL = new Interval("2013-01-01/2013-01-07"); + private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), @@ -132,10 +133,10 @@ public class IndexMergerV9WithSpatialIndexTest theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -145,10 +146,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -158,10 +159,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -171,10 +172,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -184,10 +185,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -197,10 +198,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -210,10 +211,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -222,10 +223,10 @@ public class IndexMergerV9WithSpatialIndexTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -238,10 +239,10 @@ public class IndexMergerV9WithSpatialIndexTest for (int i = 8; i < NUM_POINTS; i++) { theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -359,10 +360,10 @@ public class IndexMergerV9WithSpatialIndexTest first.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -372,10 +373,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -385,10 +386,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -398,10 +399,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -411,10 +412,10 @@ public class IndexMergerV9WithSpatialIndexTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -423,10 +424,10 @@ public class IndexMergerV9WithSpatialIndexTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -436,10 +437,10 @@ public class IndexMergerV9WithSpatialIndexTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -449,10 +450,10 @@ public class IndexMergerV9WithSpatialIndexTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -465,10 +466,10 @@ public class IndexMergerV9WithSpatialIndexTest for (int i = 8; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -539,7 +540,7 @@ public class IndexMergerV9WithSpatialIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -556,7 +557,7 @@ public class IndexMergerV9WithSpatialIndexTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -593,7 +594,7 @@ public class IndexMergerV9WithSpatialIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", @@ -610,7 +611,7 @@ public class IndexMergerV9WithSpatialIndexTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -646,7 +647,7 @@ public class IndexMergerV9WithSpatialIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -663,7 +664,7 @@ public class IndexMergerV9WithSpatialIndexTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -672,7 +673,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -681,7 +682,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -690,7 +691,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -699,7 +700,7 @@ public class IndexMergerV9WithSpatialIndexTest ) ), new Result<>( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) diff --git a/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java b/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java deleted file mode 100644 index dbbee144208..00000000000 --- a/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.segment; - -import io.druid.segment.data.IndexedInts; -import org.junit.Assert; -import org.junit.Test; - -import java.util.Iterator; - -public class NullDimensionSelectorTest -{ - - private final NullDimensionSelector selector = NullDimensionSelector.instance(); - - @Test - public void testGetRow() throws Exception - { - IndexedInts row = selector.getRow(); - Assert.assertEquals(1, row.size()); - Assert.assertEquals(0, row.get(0)); - - Iterator iter = row.iterator(); - Assert.assertEquals(true, iter.hasNext()); - Assert.assertEquals(0, iter.next().intValue()); - Assert.assertEquals(false, iter.hasNext()); - } - - @Test - public void testGetValueCardinality() throws Exception - { - Assert.assertEquals(1, selector.getValueCardinality()); - } - - @Test - public void testLookupName() throws Exception - { - Assert.assertEquals(null, selector.lookupName(0)); - } - - @Test - public void testLookupId() throws Exception - { - Assert.assertEquals(0, selector.idLookup().lookupId(null)); - Assert.assertEquals(0, selector.idLookup().lookupId("")); - Assert.assertEquals(-1, selector.idLookup().lookupId("billy")); - } -} diff --git a/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java b/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java index ac66b6c4c3d..724bcb41686 100644 --- a/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java +++ b/processing/src/test/java/io/druid/segment/ReferenceCountingSegmentTest.java @@ -20,7 +20,7 @@ package io.druid.segment; import com.google.common.base.Throwables; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.joda.time.Days; import org.joda.time.Interval; import org.junit.Assert; @@ -54,7 +54,7 @@ public class ReferenceCountingSegmentTest @Override public Interval getDataInterval() { - return new Interval(DateTime.now().minus(Days.days(1)), DateTime.now()); + return new Interval(DateTimes.nowUtc().minus(Days.days(1)), DateTimes.nowUtc()); } @Override diff --git a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java index d5fda9e46ab..ca2caca63f2 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessIndexTest.java @@ -29,6 +29,8 @@ import com.google.common.collect.Maps; import io.druid.data.input.MapBasedInputRow; import io.druid.hll.HyperLogLogHash; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Comparators; @@ -49,6 +51,7 @@ import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.ShardSpec; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import javax.annotation.Nullable; import java.io.File; @@ -138,7 +141,7 @@ public class SchemalessIndexTest continue; } - final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis(); + final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); if (theIndex == null) { theIndex = new IncrementalIndex.Builder() @@ -349,7 +352,7 @@ public class SchemalessIndexTest for (final Map event : events) { - final long timestamp = new DateTime(event.get(TIMESTAMP)).getMillis(); + final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); final List dims = Lists.newArrayList(); for (Map.Entry entry : event.entrySet()) { if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) { @@ -397,7 +400,7 @@ public class SchemalessIndexTest final IncrementalIndex retVal = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .withQueryGranularity(Granularities.MINUTE) .withMetrics(aggs) .build() @@ -419,7 +422,7 @@ public class SchemalessIndexTest retVal.add( new MapBasedInputRow( - new DateTime(event.get(TIMESTAMP)).getMillis(), + new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(), dims, event ) @@ -478,7 +481,7 @@ public class SchemalessIndexTest Iterables.concat( // TimelineObjectHolder is actually an iterable of iterable of indexable adapters Iterables.transform( - timeline.lookup(new Interval("1000-01-01/3000-01-01")), + timeline.lookup(Intervals.of("1000-01-01/3000-01-01")), new Function, Iterable>() { @Override diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index 9a8fe1ba56f..ea1391825d7 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -22,6 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -55,8 +57,6 @@ import io.druid.query.timeseries.TimeseriesResultValue; import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Test; import java.util.Arrays; @@ -94,7 +94,7 @@ public class SchemalessTestFullTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); @Test @@ -102,7 +102,7 @@ public class SchemalessTestFullTest { List> expectedTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -118,7 +118,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -134,7 +134,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -162,7 +162,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -181,7 +181,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -195,7 +195,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -207,13 +207,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) @@ -237,7 +237,7 @@ public class SchemalessTestFullTest { List> expectedTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -253,7 +253,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -269,7 +269,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -297,7 +297,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -325,7 +325,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( ) @@ -335,7 +335,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( ) @@ -345,13 +345,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -376,7 +376,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -392,7 +392,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -408,7 +408,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -436,7 +436,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -455,7 +455,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -467,7 +467,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(qualityDimension, "automotive") @@ -478,13 +478,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -508,7 +508,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -524,7 +524,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -540,7 +540,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -568,7 +568,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -587,7 +587,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(qualityDimension, "automotive"), @@ -599,7 +599,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(qualityDimension, "automotive") @@ -610,13 +610,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) @@ -640,7 +640,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -658,7 +658,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -679,7 +679,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -693,13 +693,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -723,7 +723,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) @@ -739,7 +739,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -755,7 +755,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -783,7 +783,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -802,7 +802,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -816,13 +816,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -846,7 +846,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -862,7 +862,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 0L) @@ -878,7 +878,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Collections.singletonList( QueryRunnerTestHelper.orderedMap( @@ -896,7 +896,7 @@ public class SchemalessTestFullTest ); List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Collections.emptyList() ) @@ -905,7 +905,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Collections.emptyList() ) @@ -915,13 +915,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -947,7 +947,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -965,7 +965,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -986,7 +986,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -1000,13 +1000,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -1032,7 +1032,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -1048,7 +1048,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -1064,7 +1064,7 @@ public class SchemalessTestFullTest List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( QueryRunnerTestHelper.orderedMap( @@ -1092,7 +1092,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1112,7 +1112,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementDimension, "mezzanine") @@ -1123,7 +1123,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList() ) @@ -1132,13 +1132,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-12T00:00:00.000Z") + DateTimes.of("2011-01-12T00:00:00.000Z") ) ) ) @@ -1162,7 +1162,7 @@ public class SchemalessTestFullTest { List> expectedTimeseriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 11L) @@ -1178,7 +1178,7 @@ public class SchemalessTestFullTest List> expectedFilteredTimeSeriesResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 4L) @@ -1195,7 +1195,7 @@ public class SchemalessTestFullTest /* Uncomment when Druid support for nulls/empty strings is actually consistent List> expectedTopNResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1232,7 +1232,7 @@ public class SchemalessTestFullTest */ List> expectedTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1269,7 +1269,7 @@ public class SchemalessTestFullTest List> expectedFilteredTopNResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( ImmutableMap.builder() @@ -1297,7 +1297,7 @@ public class SchemalessTestFullTest List> expectedSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -1311,7 +1311,7 @@ public class SchemalessTestFullTest List> expectedFilteredSearchResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -1323,13 +1323,13 @@ public class SchemalessTestFullTest List> expectedTimeBoundaryResults = Arrays.asList( new Result<>( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java index 2a8489f0c47..69facab0d42 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestSimpleTest.java @@ -22,6 +22,8 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.Druids; @@ -53,8 +55,6 @@ import io.druid.query.topn.TopNQuery; import io.druid.query.topn.TopNQueryBuilder; import io.druid.query.topn.TopNResultValue; import io.druid.segment.incremental.IncrementalIndex; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -118,7 +118,7 @@ public class SchemalessTestSimpleTest final List commonAggregators = Arrays.asList(rowsCount, indexDoubleSum, uniques); final QuerySegmentSpec fullOnInterval = new MultipleIntervalSegmentSpec( - Arrays.asList(new Interval("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) + Arrays.asList(Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z")) ); private Segment segment; @@ -153,7 +153,7 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 11L) @@ -199,7 +199,7 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.asList( new DimensionAndMetricValueExtractor( @@ -257,7 +257,7 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new SearchResultValue( Arrays.asList( new SearchHit(placementishDimension, "a"), @@ -283,13 +283,13 @@ public class SchemalessTestSimpleTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), new TimeBoundaryResultValue( ImmutableMap.of( TimeBoundaryQuery.MIN_TIME, - new DateTime("2011-01-12T00:00:00.000Z"), + DateTimes.of("2011-01-12T00:00:00.000Z"), TimeBoundaryQuery.MAX_TIME, - new DateTime("2011-01-13T00:00:00.000Z") + DateTimes.of("2011-01-13T00:00:00.000Z") ) ) ) diff --git a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java index c98fe959ee9..6103abfd867 100644 --- a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java +++ b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java @@ -22,6 +22,7 @@ package io.druid.segment; import com.google.common.collect.ImmutableMap; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.data.CompressedObjectStrategy; @@ -44,7 +45,7 @@ import java.util.Map; public class StringDimensionHandlerTest { - private static final Interval TEST_INTERVAL = Interval.parse("2015-01-01/2015-12-31"); + private static final Interval TEST_INTERVAL = Intervals.of("2015-01-01/2015-12-31"); private static final IndexSpec INDEX_SPEC = new IndexSpec( new ConciseBitmapSerdeFactory(), diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java index c62b0259c9c..a0f5e62d5d2 100644 --- a/processing/src/test/java/io/druid/segment/TestHelper.java +++ b/processing/src/test/java/io/druid/segment/TestHelper.java @@ -165,7 +165,11 @@ public class TestHelper && (((Result) expectedNext).getValue()) instanceof TopNResultValue) { // Special to allow a floating point delta to be used in result comparison due to legacy expected results assertTopNResultValue(failMsg, (Result) expectedNext, (Result) next); - assertTopNResultValue(String.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), (Result) expectedNext, (Result) next2); + assertTopNResultValue( + StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), + (Result) expectedNext, + (Result) next2 + ); } else { assertResult(failMsg, (Result) expectedNext, (Result) next); assertResult( @@ -290,7 +294,7 @@ public class TestHelper Assert.assertEquals("Size of list must match", listExpectedRows.size(), listActualRows.size()); IntStream.range(0, listExpectedRows.size()).forEach(value -> assertRow( - String.format("%s, on value number [%s]", msg, value), + StringUtils.format("%s, on value number [%s]", msg, value), listExpectedRows.get(value), listActualRows.get(value) )); diff --git a/processing/src/test/java/io/druid/segment/TestIndex.java b/processing/src/test/java/io/druid/segment/TestIndex.java index c59c89f8f5c..4271eaa213b 100644 --- a/processing/src/test/java/io/druid/segment/TestIndex.java +++ b/processing/src/test/java/io/druid/segment/TestIndex.java @@ -34,6 +34,8 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.hll.HyperLogLogHash; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; @@ -50,7 +52,6 @@ import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.virtual.ExpressionVirtualColumn; -import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.File; @@ -117,7 +118,7 @@ public class TestIndex public static final String[] DOUBLE_METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"}; public static final String[] FLOAT_METRICS = new String[]{"indexFloat", "indexMinFloat", "indexMaxFloat"}; private static final Logger log = new Logger(TestIndex.class); - private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); + private static final Interval DATA_INTERVAL = Intervals.of("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create( Collections.singletonList( new ExpressionVirtualColumn("expr", "index + 10", ValueType.FLOAT, TestExprMacroTable.INSTANCE) @@ -269,7 +270,7 @@ public class TestIndex public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() - .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) + .withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()) .withTimestampSpec(new TimestampSpec("ds", "auto", null)) .withDimensionsSpec(DIMENSIONS_SPEC) .withVirtualColumns(VIRTUAL_COLUMNS) diff --git a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java index cd0ddc240ec..455b1af0d31 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java @@ -162,24 +162,6 @@ public class CompressedIntsIndexedSupplierTest extends CompressionStrategyTest setupLargeChunks(maxChunkSize + 1, 10 * (maxChunkSize + 1)); } - @Test - public void testBulkFill() throws Exception - { - setupSimple(5); - - tryFill(0, 15); - tryFill(3, 6); - tryFill(7, 7); - tryFill(7, 9); - } - - @Test(expected = IndexOutOfBoundsException.class) - public void testBulkFillTooMuch() throws Exception - { - setupSimple(5); - tryFill(7, 10); - } - @Test public void testSanityWithSerde() throws Exception { @@ -189,24 +171,6 @@ public class CompressedIntsIndexedSupplierTest extends CompressionStrategyTest assertIndexMatchesVals(); } - @Test - public void testBulkFillWithSerde() throws Exception - { - setupSimpleWithSerde(5); - - tryFill(0, 15); - tryFill(3, 6); - tryFill(7, 7); - tryFill(7, 9); - } - - @Test(expected = IndexOutOfBoundsException.class) - public void testBulkFillTooMuchWithSerde() throws Exception - { - setupSimpleWithSerde(5); - tryFill(7, 10); - } - // This test attempts to cause a race condition with the DirectByteBuffers, it's non-deterministic in causing it, // which sucks but I can't think of a way to deterministically cause it... @Test @@ -311,16 +275,6 @@ public class CompressedIntsIndexedSupplierTest extends CompressionStrategyTest } } - private void tryFill(final int startIndex, final int size) - { - int[] filled = new int[size]; - indexed.fill(startIndex, filled); - - for (int i = startIndex; i < filled.length; i++) { - Assert.assertEquals(vals[i + startIndex], filled[i]); - } - } - private void assertIndexMatchesVals() { Assert.assertEquals(vals.length, indexed.size()); diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index e6849544a7a..bbcc3697ca3 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -34,6 +34,7 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; @@ -468,7 +469,7 @@ public class IncrementalIndexTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("xxx") .granularity(Granularities.ALL) - .intervals(ImmutableList.of(new Interval("2000/2030"))) + .intervals(ImmutableList.of(Intervals.of("2000/2030"))) .aggregators(queryAggregatorFactories) .build(); @@ -569,7 +570,7 @@ public class IncrementalIndexTest ) ); final long timestamp = System.currentTimeMillis(); - final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); + final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); final List> indexFutures = Lists.newArrayListWithExpectedSize(concurrentThreads); final List> queryFutures = Lists.newArrayListWithExpectedSize(concurrentThreads); final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null); diff --git a/processing/src/test/java/io/druid/segment/data/IntersectingOffsetTest.java b/processing/src/test/java/io/druid/segment/data/IntersectingOffsetTest.java deleted file mode 100644 index 311f1cd80bb..00000000000 --- a/processing/src/test/java/io/druid/segment/data/IntersectingOffsetTest.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.segment.data; - -import com.google.common.collect.Lists; -import org.junit.Assert; -import org.junit.Test; - -import java.util.LinkedList; - -/** - */ -public class IntersectingOffsetTest -{ - @Test - public void testSanity() throws Exception - { - assertExpected( - new int[]{2, 3, 6, 7}, - new IntersectingOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{2, 3, 4, 5, 6, 7}) - ) - ); - assertExpected( - new int[]{2, 3, 6, 7}, - new IntersectingOffset( - new ArrayBasedOffset(new int[]{2, 3, 4, 5, 6, 7}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - - assertExpected( - new int[]{}, - new IntersectingOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{4, 5, 9, 10}) - ) - ); - - assertExpected( - new int[]{}, - new IntersectingOffset( - new ArrayBasedOffset(new int[]{4, 5, 9, 10}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - - assertExpected( - new int[]{}, - new IntersectingOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{}) - ) - ); - - assertExpected( - new int[]{}, - new IntersectingOffset( - new ArrayBasedOffset(new int[]{}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - } - - private static void assertExpected(int[] expectedValues, IntersectingOffset offset) - { - final LinkedList offsets = Lists.newLinkedList(); - offsets.add(offset); - - for (int expectedValue : expectedValues) { - for (Offset aClone : offsets) { - Assert.assertTrue(aClone.withinBounds()); - Assert.assertEquals(expectedValue, aClone.getOffset()); - aClone.increment(); - } - offsets.add(offsets.getFirst().clone()); - } - - for (Offset aClone : offsets) { - Assert.assertFalse(aClone.withinBounds()); - } - } -} diff --git a/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java b/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java deleted file mode 100644 index 8cca50a01e5..00000000000 --- a/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.segment.data; - -import com.google.common.collect.Lists; -import io.druid.java.util.common.StringUtils; -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; - -/** - */ -public class UnioningOffsetTest -{ - @Test - public void testSanity() throws Exception - { - assertExpected( - new int[]{1, 2, 3, 4, 5, 6, 7, 8}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{2, 3, 4, 5, 6, 7}) - ) - ); - assertExpected( - new int[]{1, 2, 3, 4, 5, 6, 7, 8}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{2, 3, 4, 5, 6, 7}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - - assertExpected( - new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{4, 5, 9, 10}) - ) - ); - - assertExpected( - new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{4, 5, 9, 10}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - - assertExpected( - new int[]{1, 2, 3, 6, 7, 8}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{}) - ) - ); - - assertExpected( - new int[]{1, 2, 3, 6, 7, 8}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - - assertExpected( - new int[]{1, 2, 3, 6, 7, 8}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{1}), - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}) - ) - ); - - assertExpected( - new int[]{1, 2, 3, 6, 7, 8}, - new UnioningOffset( - new ArrayBasedOffset(new int[]{1, 2, 3, 6, 7, 8}), - new ArrayBasedOffset(new int[]{1}) - ) - ); - } - - private static void assertExpected(int[] expectedValues, UnioningOffset offset) - { - final ArrayList offsets = Lists.newArrayList(); - offsets.add(offset); - - for (int expectedValue : expectedValues) { - for (int j = 0; j < offsets.size(); ++j) { - Offset aClone = offsets.get(j); - Assert.assertTrue(StringUtils.format("Clone[%d] out of bounds", j), aClone.withinBounds()); - Assert.assertEquals(StringUtils.format("Clone[%d] not right", j), expectedValue, aClone.getOffset()); - aClone.increment(); - } - offsets.add(offsets.get(0).clone()); - } - - for (Offset aClone : offsets) { - Assert.assertFalse(aClone.withinBounds()); - } - } -} diff --git a/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java b/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java index ed7a773de81..2d8c2440249 100644 --- a/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/AndFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.DimFilter; @@ -35,7 +36,6 @@ import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -52,7 +52,7 @@ public class AndFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index 7411c44e388..58bc2bcec7c 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -27,7 +27,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.common.guava.SettableSupplier; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.data.input.InputRow; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; @@ -66,7 +66,6 @@ import io.druid.segment.data.RoaringBitmapSerdeFactory; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; import io.druid.segment.virtual.ExpressionVirtualColumn; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -302,7 +301,7 @@ public abstract class BaseFilterTest { return adapter.makeCursors( filter, - new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT), + Intervals.ETERNITY, VIRTUAL_COLUMNS, Granularities.ALL, false, @@ -323,9 +322,9 @@ public abstract class BaseFilterTest @Override public List apply(Cursor input) { - final DimensionSelector selector = input.makeDimensionSelector( - new DefaultDimensionSpec(selectColumn, selectColumn) - ); + final DimensionSelector selector = input + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec(selectColumn, selectColumn)); final List values = Lists.newArrayList(); @@ -356,7 +355,7 @@ public abstract class BaseFilterTest Aggregator agg = new FilteredAggregatorFactory( new CountAggregatorFactory("count"), maybeOptimize(filter) - ).factorize(input); + ).factorize(input.getColumnSelectorFactory()); for (; !input.isDone(); input.advance()) { agg.aggregate(); @@ -418,9 +417,9 @@ public abstract class BaseFilterTest @Override public List apply(Cursor input) { - final DimensionSelector selector = input.makeDimensionSelector( - new DefaultDimensionSpec(selectColumn, selectColumn) - ); + final DimensionSelector selector = input + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec(selectColumn, selectColumn)); final List values = Lists.newArrayList(); diff --git a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java index 0e11e4ed531..f94e8769ba4 100644 --- a/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BoundFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -36,7 +37,6 @@ import io.druid.query.filter.BoundDimFilter; import io.druid.query.ordering.StringComparators; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -53,7 +53,7 @@ public class BoundFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java index 9b14dac1fab..8aa0e588255 100644 --- a/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/ColumnComparisonFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; @@ -38,7 +39,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -55,7 +55,7 @@ public class ColumnComparisonFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java index 2ac8e3bbbaa..8e3837e5c7e 100644 --- a/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/ExpressionFilterTest.java @@ -31,13 +31,13 @@ import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.expression.TestExprMacroTable; import io.druid.query.filter.ExpressionDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -55,7 +55,7 @@ public class ExpressionFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( ImmutableList.of( new StringDimensionSchema("dim0"), diff --git a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java index 5c5b96e39fc..309a9b90845 100644 --- a/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FilterPartitionTest.java @@ -30,6 +30,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -46,7 +47,6 @@ import io.druid.query.filter.OrDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -157,7 +157,7 @@ public class FilterPartitionTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java index 4ae510a720b..b737be71623 100644 --- a/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/FloatAndDoubleFilteringTest.java @@ -35,6 +35,7 @@ import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.MapLookupExtractor; @@ -52,7 +53,6 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -79,7 +79,7 @@ public class FloatAndDoubleFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( ImmutableList.of( new StringDimensionSchema("dim0"), diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java index 54a136569de..74d5c58adfb 100644 --- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java @@ -29,6 +29,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -40,7 +41,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -57,7 +57,7 @@ public class InFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java index 5c8f52bee0a..8f28cb99e6f 100644 --- a/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InvalidFilteringTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; @@ -37,7 +38,6 @@ import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -56,7 +56,7 @@ public class InvalidFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java index c923545e186..fd140811a42 100644 --- a/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/JavaScriptFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -37,7 +38,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,7 +54,7 @@ public class JavaScriptFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java index 07dd395dd89..728bdc9e738 100644 --- a/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LikeFilterTest.java @@ -28,12 +28,12 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.extraction.SubstringDimExtractionFn; import io.druid.query.filter.LikeDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -50,7 +50,7 @@ public class LikeFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java index b20c7f7575e..8e621bb10f0 100644 --- a/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/LongFilteringTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -50,7 +51,6 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -76,7 +76,7 @@ public class LongFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java index d581d999bf4..6fdb7191fc8 100644 --- a/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/NotFilterTest.java @@ -28,12 +28,12 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -50,7 +50,7 @@ public class NotFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec(null, null, null) ) ); diff --git a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java index 5b177928c86..cb62975b846 100644 --- a/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/RegexFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -35,7 +36,6 @@ import io.druid.query.extraction.JavaScriptExtractionFn; import io.druid.query.filter.RegexDimFilter; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -52,7 +52,7 @@ public class RegexFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java index d3c24c5f6aa..49fd96b2b68 100644 --- a/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SearchQueryFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -37,7 +38,6 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.query.search.search.SearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,7 +54,7 @@ public class SearchQueryFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java index e3bd81b4727..2f41178aee5 100644 --- a/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SelectorFilterTest.java @@ -28,6 +28,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.query.extraction.MapLookupExtractor; import io.druid.query.extraction.TimeDimExtractionFn; @@ -38,7 +39,6 @@ import io.druid.query.lookup.LookupExtractionFn; import io.druid.query.lookup.LookupExtractor; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; -import org.joda.time.DateTime; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Test; @@ -57,7 +57,7 @@ public class SelectorFilterTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "iso", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "iso", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3", "dim6")), null, diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index 2279836aba7..f2def054670 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -28,6 +28,8 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; @@ -56,7 +58,6 @@ import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; @@ -77,7 +78,7 @@ import java.util.Set; public class SpatialFilterBonusTest { public static final int NUM_POINTS = 5000; - private static Interval DATA_INTERVAL = new Interval("2013-01-01/2013-01-07"); + private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("val", "val") @@ -142,10 +143,10 @@ public class SpatialFilterBonusTest theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", "val", 17L @@ -154,10 +155,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", "val", 29L @@ -166,10 +167,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", "val", 13L @@ -178,10 +179,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", "val", 91L @@ -190,10 +191,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", "val", 47L @@ -202,10 +203,10 @@ public class SpatialFilterBonusTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -230,10 +231,10 @@ public class SpatialFilterBonusTest } theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", coord, "val", i @@ -333,10 +334,10 @@ public class SpatialFilterBonusTest first.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "dim.geo", "0.0,0.0", "val", 17L @@ -345,10 +346,10 @@ public class SpatialFilterBonusTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "dim.geo", "1.0,3.0", "val", 29L @@ -357,10 +358,10 @@ public class SpatialFilterBonusTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "dim.geo", "4.0,2.0", "val", 13L @@ -369,10 +370,10 @@ public class SpatialFilterBonusTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -381,10 +382,10 @@ public class SpatialFilterBonusTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "dim.geo", "7.0,3.0", "val", 91L @@ -393,10 +394,10 @@ public class SpatialFilterBonusTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "8.0,6.0", "val", 47L @@ -409,10 +410,10 @@ public class SpatialFilterBonusTest for (int i = 6; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "dim.geo", StringUtils.format( "%s,%s", @@ -474,7 +475,7 @@ public class SpatialFilterBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -491,7 +492,7 @@ public class SpatialFilterBonusTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -526,7 +527,7 @@ public class SpatialFilterBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -543,7 +544,7 @@ public class SpatialFilterBonusTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -552,7 +553,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -561,7 +562,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -570,7 +571,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -579,7 +580,7 @@ public class SpatialFilterBonusTest ) ), new Result( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -614,7 +615,7 @@ public class SpatialFilterBonusTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .aggregators( Arrays.asList( new CountAggregatorFactory("rows"), @@ -632,7 +633,7 @@ public class SpatialFilterBonusTest List> expectedResults = Arrays.asList( new Result<>( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 4995L) @@ -642,7 +643,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -652,7 +653,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -662,7 +663,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -672,7 +673,7 @@ public class SpatialFilterBonusTest ) ), new Result<>( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 2L) diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java index 365d7be7d1a..7e8b1871815 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterTest.java @@ -28,6 +28,8 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; @@ -54,7 +56,6 @@ import io.druid.segment.Segment; import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Test; import org.junit.runner.RunWith; @@ -76,7 +77,7 @@ public class SpatialFilterTest private static IndexIO INDEX_IO = TestHelper.getTestIndexIO(); public static final int NUM_POINTS = 5000; - private static Interval DATA_INTERVAL = new Interval("2013-01-01/2013-01-07"); + private static Interval DATA_INTERVAL = Intervals.of("2013-01-01/2013-01-07"); private static AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new CountAggregatorFactory("rows"), @@ -139,10 +140,10 @@ public class SpatialFilterTest theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -152,10 +153,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -165,10 +166,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -178,10 +179,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -191,10 +192,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -204,10 +205,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -217,10 +218,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -229,10 +230,10 @@ public class SpatialFilterTest ); theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -245,10 +246,10 @@ public class SpatialFilterTest for (int i = 8; i < NUM_POINTS; i++) { theIndex.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -359,10 +360,10 @@ public class SpatialFilterTest first.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "foo", "lat", 0.0f, "long", 0.0f, @@ -372,10 +373,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-02").getMillis(), + DateTimes.of("2013-01-02").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-02").toString(), + "timestamp", DateTimes.of("2013-01-02").toString(), "dim", "foo", "lat", 1.0f, "long", 3.0f, @@ -385,10 +386,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-03").getMillis(), + DateTimes.of("2013-01-03").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-03").toString(), + "timestamp", DateTimes.of("2013-01-03").toString(), "dim", "foo", "lat", 4.0f, "long", 2.0f, @@ -398,10 +399,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", "_mmx.unknown", "long", "_mmx.unknown", @@ -411,10 +412,10 @@ public class SpatialFilterTest ); first.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "dim.geo", "_mmx.unknown", "val", 501L @@ -423,10 +424,10 @@ public class SpatialFilterTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-04").getMillis(), + DateTimes.of("2013-01-04").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-04").toString(), + "timestamp", DateTimes.of("2013-01-04").toString(), "dim", "foo", "lat", 7.0f, "long", 3.0f, @@ -436,10 +437,10 @@ public class SpatialFilterTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "dim", "foo", "lat", 8.0f, "long", 6.0f, @@ -449,10 +450,10 @@ public class SpatialFilterTest ); second.add( new MapBasedInputRow( - new DateTime("2013-01-05").getMillis(), + DateTimes.of("2013-01-05").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-05").toString(), + "timestamp", DateTimes.of("2013-01-05").toString(), "lat2", 0.0f, "long2", 0.0f, "val", 13L @@ -465,10 +466,10 @@ public class SpatialFilterTest for (int i = 8; i < NUM_POINTS; i++) { third.add( new MapBasedInputRow( - new DateTime("2013-01-01").getMillis(), + DateTimes.of("2013-01-01").getMillis(), DIMS, ImmutableMap.of( - "timestamp", new DateTime("2013-01-01").toString(), + "timestamp", DateTimes.of("2013-01-01").toString(), "dim", "boo", "lat", (float) (rand.nextFloat() * 10 + 10.0), "long", (float) (rand.nextFloat() * 10 + 10.0), @@ -530,7 +531,7 @@ public class SpatialFilterTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -547,7 +548,7 @@ public class SpatialFilterTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 3L) @@ -583,7 +584,7 @@ public class SpatialFilterTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.ALL) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "spatialIsRad", @@ -600,7 +601,7 @@ public class SpatialFilterTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -635,7 +636,7 @@ public class SpatialFilterTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity(Granularities.DAY) - .intervals(Arrays.asList(new Interval("2013-01-01/2013-01-07"))) + .intervals(Arrays.asList(Intervals.of("2013-01-01/2013-01-07"))) .filters( new SpatialDimFilter( "dim.geo", @@ -652,7 +653,7 @@ public class SpatialFilterTest List> expectedResults = Arrays.asList( new Result( - new DateTime("2013-01-01T00:00:00.000Z"), + DateTimes.of("2013-01-01T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -661,7 +662,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-02T00:00:00.000Z"), + DateTimes.of("2013-01-02T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -670,7 +671,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-03T00:00:00.000Z"), + DateTimes.of("2013-01-03T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -679,7 +680,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-04T00:00:00.000Z"), + DateTimes.of("2013-01-04T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) @@ -688,7 +689,7 @@ public class SpatialFilterTest ) ), new Result( - new DateTime("2013-01-05T00:00:00.000Z"), + DateTimes.of("2013-01-05T00:00:00.000Z"), new TimeseriesResultValue( ImmutableMap.builder() .put("rows", 1L) diff --git a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java index aa0a2d7e6a3..4c816704912 100644 --- a/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java +++ b/processing/src/test/java/io/druid/segment/filter/TimeFilteringTest.java @@ -28,6 +28,8 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimeAndDimsParseSpec; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.js.JavaScriptConfig; import io.druid.query.extraction.ExtractionFn; @@ -48,9 +50,7 @@ import io.druid.query.search.search.ContainsSearchQuerySpec; import io.druid.segment.IndexBuilder; import io.druid.segment.StorageAdapter; import io.druid.segment.column.Column; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import org.joda.time.Interval; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; @@ -70,7 +70,7 @@ public class TimeFilteringTest extends BaseFilterTest private static final InputRowParser> PARSER = new MapInputRowParser( new TimeAndDimsParseSpec( - new TimestampSpec(TIMESTAMP_COLUMN, "millis", new DateTime("2000")), + new TimestampSpec(TIMESTAMP_COLUMN, "millis", DateTimes.of("2000")), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim0", "dim1", "dim2", "dim3")), null, @@ -238,7 +238,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - Arrays.asList(Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), null ), ImmutableList.of("1", "2", "3", "4") @@ -248,8 +248,8 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), - Interval.parse("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), + Intervals.of("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") ), null ), @@ -260,9 +260,9 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( Column.TIME_COLUMN_NAME, Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), - Interval.parse("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), - Interval.parse("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), + Intervals.of("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), + Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") ), null ), @@ -275,7 +275,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - Arrays.asList(Interval.parse("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), exFn ), ImmutableList.of("1", "2", "3", "4") @@ -288,7 +288,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( "dim0", - Arrays.asList(Interval.parse("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T00:00:00.001Z/1970-01-01T00:00:00.005Z")), null ), ImmutableList.of("1", "2", "3", "4") @@ -298,8 +298,8 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( "dim0", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), - Interval.parse("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.003Z"), + Intervals.of("1970-01-01T00:00:00.004Z/1970-01-01T00:00:00.006Z") ), null ), @@ -310,9 +310,9 @@ public class TimeFilteringTest extends BaseFilterTest new IntervalDimFilter( "dim0", Arrays.asList( - Interval.parse("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), - Interval.parse("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), - Interval.parse("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") + Intervals.of("1970-01-01T00:00:00.000Z/1970-01-01T00:00:00.001Z"), + Intervals.of("1970-01-01T00:00:00.003Z/1970-01-01T00:00:00.006Z"), + Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.005Z") ), null ), @@ -322,7 +322,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( "dim1", - Arrays.asList(Interval.parse("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.011Z")), + Arrays.asList(Intervals.of("1970-01-01T00:00:00.002Z/1970-01-01T00:00:00.011Z")), null ), ImmutableList.of("1", "2") @@ -334,7 +334,7 @@ public class TimeFilteringTest extends BaseFilterTest assertFilterMatches( new IntervalDimFilter( "dim0", - Arrays.asList(Interval.parse("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), + Arrays.asList(Intervals.of("1970-01-01T02:00:00.001Z/1970-01-01T02:00:00.005Z")), exFn ), ImmutableList.of("1", "2", "3", "4") diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java index 97e14777d31..d3cf377e442 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexStorageAdapterTest.java @@ -29,6 +29,8 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -114,14 +116,14 @@ public class IncrementalIndexStorageAdapterTest IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy"), ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("sally"), ImmutableMap.of("sally", "bo") ) @@ -133,7 +135,7 @@ public class IncrementalIndexStorageAdapterTest GroupByQuery.builder() .setDataSource("test") .setGranularity(Granularities.ALL) - .setInterval(new Interval(0, new DateTime().getMillis())) + .setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())) .addDimension("billy") .addDimension("sally") .addAggregator(new LongSumAggregatorFactory("cnt", "cnt")) @@ -158,14 +160,14 @@ public class IncrementalIndexStorageAdapterTest IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( - new DateTime("2014-09-01T00:00:00"), + DateTimes.of("2014-09-01T00:00:00"), Lists.newArrayList("billy"), ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( - new DateTime("2014-09-01T01:00:00"), + DateTimes.of("2014-09-01T01:00:00"), Lists.newArrayList("billy", "sally"), ImmutableMap.of( "billy", "hip", @@ -180,7 +182,7 @@ public class IncrementalIndexStorageAdapterTest GroupByQuery.builder() .setDataSource("test") .setGranularity(Granularities.ALL) - .setInterval(new Interval(0, new DateTime().getMillis())) + .setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())) .addDimension("billy") .addDimension("sally") .addAggregator( @@ -243,7 +245,7 @@ public class IncrementalIndexStorageAdapterTest { IncrementalIndex index = indexCreator.createIndex(); - DateTime t = DateTime.now(); + DateTime t = DateTimes.nowUtc(); Interval interval = new Interval(t.minusMinutes(1), t.plusMinutes(1)); index.add( @@ -276,7 +278,9 @@ public class IncrementalIndexStorageAdapterTest Cursor cursor = Sequences.toList(Sequences.limit(cursorSequence, 1), Lists.newArrayList()).get(0); DimensionSelector dimSelector; - dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally")); + dimSelector = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("sally", "sally")); Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0))); index.add( @@ -290,7 +294,9 @@ public class IncrementalIndexStorageAdapterTest // Cursor reset should not be affected by out of order values cursor.reset(); - dimSelector = cursor.makeDimensionSelector(new DefaultDimensionSpec("sally", "sally")); + dimSelector = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("sally", "sally")); Assert.assertEquals("bo", dimSelector.lookupName(dimSelector.getRow().get(0))); } } @@ -299,7 +305,7 @@ public class IncrementalIndexStorageAdapterTest public void testSingleValueTopN() throws IOException { IncrementalIndex index = indexCreator.createIndex(); - DateTime t = DateTime.now(); + DateTime t = DateTimes.nowUtc(); index.add( new MapBasedInputRow( t.minus(1).getMillis(), @@ -326,7 +332,7 @@ public class IncrementalIndexStorageAdapterTest engine.query( new TopNQueryBuilder().dataSource("test") .granularity(Granularities.ALL) - .intervals(Lists.newArrayList(new Interval(0, new DateTime().getMillis()))) + .intervals(Lists.newArrayList(new Interval(DateTimes.EPOCH, DateTimes.nowUtc()))) .dimension("sally") .metric("cnt") .threshold(10) @@ -355,14 +361,14 @@ public class IncrementalIndexStorageAdapterTest IncrementalIndex index = indexCreator.createIndex(); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy"), ImmutableMap.of("billy", "hi") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("sally"), ImmutableMap.of("sally", "bo") ) @@ -374,7 +380,7 @@ public class IncrementalIndexStorageAdapterTest GroupByQuery.builder() .setDataSource("test") .setGranularity(Granularities.ALL) - .setInterval(new Interval(0, new DateTime().getMillis())) + .setInterval(new Interval(DateTimes.EPOCH, DateTimes.nowUtc())) .addDimension("billy") .addDimension("sally") .addAggregator(new LongSumAggregatorFactory("cnt", "cnt")) @@ -411,7 +417,7 @@ public class IncrementalIndexStorageAdapterTest Sequence cursors = sa.makeCursors( null, - new Interval(timestamp - 60_000, timestamp + 60_000), + Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, @@ -428,12 +434,9 @@ public class IncrementalIndexStorageAdapterTest @Override public Object apply(Cursor cursor) { - DimensionSelector dimSelector = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - "billy", - "billy" - ) - ); + DimensionSelector dimSelector = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("billy", "billy")); int cardinality = dimSelector.getValueCardinality(); //index gets more rows at this point, while other thread is iterating over the cursor @@ -494,7 +497,7 @@ public class IncrementalIndexStorageAdapterTest Sequence cursors = sa.makeCursors( null, - new Interval(timestamp - 60_000, timestamp + 60_000), + Intervals.utc(timestamp - 60_000, timestamp + 60_000), VirtualColumns.EMPTY, Granularities.ALL, false, @@ -511,12 +514,9 @@ public class IncrementalIndexStorageAdapterTest @Override public Object apply(Cursor cursor) { - DimensionSelector dimSelector1A = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - "billy", - "billy" - ) - ); + DimensionSelector dimSelector1A = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("billy", "billy")); int cardinalityA = dimSelector1A.getValueCardinality(); //index gets more rows at this point, while other thread is iterating over the cursor @@ -533,12 +533,9 @@ public class IncrementalIndexStorageAdapterTest throw new RuntimeException(ex); } - DimensionSelector dimSelector1B = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - "billy", - "billy" - ) - ); + DimensionSelector dimSelector1B = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("billy", "billy")); //index gets more rows at this point, while other thread is iterating over the cursor try { index.add( @@ -560,19 +557,13 @@ public class IncrementalIndexStorageAdapterTest throw new RuntimeException(ex); } - DimensionSelector dimSelector1C = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - "billy", - "billy" - ) - ); + DimensionSelector dimSelector1C = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("billy", "billy")); - DimensionSelector dimSelector2D = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - "billy2", - "billy2" - ) - ); + DimensionSelector dimSelector2D = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("billy2", "billy2")); //index gets more rows at this point, while other thread is iterating over the cursor try { index.add( @@ -594,12 +585,9 @@ public class IncrementalIndexStorageAdapterTest throw new RuntimeException(ex); } - DimensionSelector dimSelector3E = cursor.makeDimensionSelector( - new DefaultDimensionSpec( - "billy3", - "billy3" - ) - ); + DimensionSelector dimSelector3E = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec("billy3", "billy3")); int rowNumInCursor = 0; // and then, cursoring continues in the other thread diff --git a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java index aaee24f5353..e283efed090 100644 --- a/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/incremental/IncrementalIndexTest.java @@ -37,7 +37,6 @@ import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.CloserRule; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -152,14 +151,14 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) @@ -172,7 +171,7 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) @@ -185,21 +184,21 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "C", "joe", "B") ) ); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ) @@ -212,7 +211,7 @@ public class IncrementalIndexTest IncrementalIndex index = closer.closeLater(indexCreator.createIndex()); index.add( new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("string", "float", "long", "double"), ImmutableMap.of( "string", Arrays.asList("A", null, ""), @@ -235,7 +234,7 @@ public class IncrementalIndexTest public void sameRow() throws IndexSizeExceededException { MapBasedInputRow row = new MapBasedInputRow( - new DateTime().minus(1).getMillis(), + System.currentTimeMillis() - 1, Lists.newArrayList("billy", "joe"), ImmutableMap.of("billy", "A", "joe", "B") ); diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index 446d5a2089f..f6ecc26c13c 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -33,6 +33,7 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -338,7 +339,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark ) ); final long timestamp = System.currentTimeMillis(); - final Interval queryInterval = new Interval("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); + final Interval queryInterval = Intervals.of("1900-01-01T00:00:00Z/2900-01-01T00:00:00Z"); final List> indexFutures = new LinkedList<>(); final List> queryFutures = new LinkedList<>(); final Segment incrementalIndexSegment = new IncrementalIndexSegment(incrementalIndex, null); diff --git a/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java b/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java index ef860cc3a4d..d2a27b3c833 100644 --- a/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java +++ b/processing/src/test/java/io/druid/segment/virtual/VirtualColumnsTest.java @@ -65,6 +65,28 @@ public class VirtualColumnsTest @Rule public ExpectedException expectedException = ExpectedException.none(); + @Test + public void testExists() + { + final VirtualColumns virtualColumns = makeVirtualColumns(); + + Assert.assertTrue(virtualColumns.exists("expr")); + Assert.assertTrue(virtualColumns.exists("foo")); + Assert.assertTrue(virtualColumns.exists("foo.5")); + Assert.assertFalse(virtualColumns.exists("bar")); + } + + @Test + public void testNonExistentSelector() + { + final VirtualColumns virtualColumns = makeVirtualColumns(); + + expectedException.expect(IllegalArgumentException.class); + expectedException.expectMessage("No such virtual column[bar]"); + + virtualColumns.makeObjectColumnSelector("bar", null); + } + @Test public void testMakeSelectors() { @@ -406,7 +428,8 @@ public class VirtualColumnsTest public DoubleColumnSelector makeDoubleColumnSelector(String columnName, ColumnSelectorFactory factory) { final ColumnValueSelector selector = makeLongColumnSelector(columnName, factory); - return new TestDoubleColumnSelector() { + return new TestDoubleColumnSelector() + { @Override public double getDouble() diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 8f845acd1a5..e843963e00c 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -48,6 +48,7 @@ import io.druid.client.selector.ServerSelector; import io.druid.concurrent.Execs; import io.druid.guice.annotations.BackgroundCaching; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.BaseSequence; @@ -329,7 +330,7 @@ public class CachingClusteredClient implements QuerySegmentWalker long intervalStart = holderInterval.getStartMillis(); if (!uncoveredIntervalsOverflowed && startMillis != intervalStart) { if (uncoveredIntervalsLimit > uncoveredIntervals.size()) { - uncoveredIntervals.add(new Interval(startMillis, intervalStart)); + uncoveredIntervals.add(Intervals.utc(startMillis, intervalStart)); } else { uncoveredIntervalsOverflowed = true; } @@ -339,7 +340,7 @@ public class CachingClusteredClient implements QuerySegmentWalker if (!uncoveredIntervalsOverflowed && startMillis < endMillis) { if (uncoveredIntervalsLimit > uncoveredIntervals.size()) { - uncoveredIntervals.add(new Interval(startMillis, endMillis)); + uncoveredIntervals.add(Intervals.utc(startMillis, endMillis)); } else { uncoveredIntervalsOverflowed = true; } diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index 5c1dfef58d9..4cc00a07196 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -22,7 +22,6 @@ package io.druid.client; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.ObjectCodec; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.type.TypeFactory; @@ -51,6 +50,7 @@ import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.logger.Logger; import io.druid.query.BySegmentResultValueClass; import io.druid.query.Query; @@ -117,7 +117,10 @@ public class DirectDruidClient implements QueryRunner private final AtomicInteger openConnections; private final boolean isSmile; - public static > QueryType withDefaultTimeoutAndMaxScatterGatherBytes(final QueryType query, ServerConfig serverConfig) + public static > QueryType withDefaultTimeoutAndMaxScatterGatherBytes( + final QueryType query, + ServerConfig serverConfig + ) { return (QueryType) QueryContexts.withMaxScatterGatherBytes( QueryContexts.withDefaultTimeout( @@ -251,9 +254,7 @@ public class DirectDruidClient implements QueryRunner if (responseContext != null) { context.putAll( objectMapper.>readValue( - responseContext, new TypeReference>() - { - } + responseContext, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ) ); } diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index 201276ed473..71adf3493ad 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -68,6 +68,7 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.net.URL; +import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; @@ -104,10 +105,10 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer private volatile ExecutorService executor; - // a queue of queryable server names for which worker threads in executor initiate the segment list call i.e. - // DruidServerHolder.updateSegmentsListAsync(..) which updates the segment list asynchronously and adds itself - // to this queue again for next update. - private final BlockingQueue queue = new LinkedBlockingDeque<>(); + // the work queue, all items in this are sequentially processed by main thread setup in start() + // used to call inventoryInitialized on all SegmentCallbacks and + // for keeping segment list for each queryable server uptodate. + private final BlockingQueue queue = new LinkedBlockingDeque<>(); private final HttpClient httpClient; private final ObjectMapper smileMapper; @@ -161,10 +162,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer while (!Thread.interrupted() && lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS)) { try { - DruidServerHolder holder = servers.get(queue.take()); - if (holder != null) { - holder.updateSegmentsListAsync(); - } + queue.take().run(); } catch (InterruptedException ex) { log.info("main thread interrupted, served segments list is not synced anymore."); @@ -184,17 +182,27 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer druidNodeDiscovery.registerListener( new DruidNodeDiscovery.Listener() { + private volatile boolean initialized = false; @Override - public void nodeAdded(DiscoveryDruidNode node) + public void nodesAdded(List nodes) { - serverAddedOrUpdated(toDruidServer(node)); + nodes.forEach( + node -> serverAddedOrUpdated(toDruidServer(node)) + ); + + if (!initialized) { + initialized = true; + queue.add(HttpServerInventoryView.this::serverInventoryInitialized); + } } @Override - public void nodeRemoved(DiscoveryDruidNode node) + public void nodesRemoved(List nodes) { - serverRemoved(toDruidServer(node)); + nodes.forEach( + node -> serverRemoved(toDruidServer(node)) + ); } private DruidServer toDruidServer(DiscoveryDruidNode node) @@ -572,7 +580,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer log.error(ex, "error processing segment list response from server [%s]", druidServer.getName()); } finally { - queue.add(druidServer.getName()); + addNextSyncToWorkQueue(druidServer.getName()); } } @@ -611,7 +619,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer } } finally { - queue.add(druidServer.getName()); + addNextSyncToWorkQueue(druidServer.getName()); } } }, @@ -621,7 +629,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer return future; } catch (Throwable th) { - queue.add(druidServer.getName()); + addNextSyncToWorkQueue(druidServer.getName()); String logMsg = StringUtils.nonStrictFormat( "Fatal error while fetching segment list from server [%s].", druidServer.getName() @@ -646,6 +654,18 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer } } + private void addNextSyncToWorkQueue(final String serverId) + { + queue.add( + () -> { + DruidServerHolder holder = servers.get(serverId); + if (holder != null) { + holder.updateSegmentsListAsync(); + } + } + ); + } + private boolean hasUnstabilityTimeoutPassed() { if (isUnstable && (System.currentTimeMillis() - unstableStartTime) > config.getServerUnstabilityTimeout()) { diff --git a/server/src/main/java/io/druid/curator/discovery/CuratorDruidLeaderSelector.java b/server/src/main/java/io/druid/curator/discovery/CuratorDruidLeaderSelector.java new file mode 100644 index 00000000000..415360c6f17 --- /dev/null +++ b/server/src/main/java/io/druid/curator/discovery/CuratorDruidLeaderSelector.java @@ -0,0 +1,206 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.curator.discovery; + +import com.google.common.base.Preconditions; +import com.google.common.base.Throwables; +import com.metamx.emitter.EmittingLogger; +import io.druid.concurrent.Execs; +import io.druid.concurrent.LifecycleLock; +import io.druid.discovery.DruidLeaderSelector; +import io.druid.guice.annotations.Self; +import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.guava.CloseQuietly; +import io.druid.server.DruidNode; +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.leader.LeaderLatch; +import org.apache.curator.framework.recipes.leader.LeaderLatchListener; +import org.apache.curator.framework.recipes.leader.Participant; + +import javax.annotation.Nullable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +/** + */ +public class CuratorDruidLeaderSelector implements DruidLeaderSelector +{ + private static final EmittingLogger log = new EmittingLogger(CuratorDruidLeaderSelector.class); + + private final LifecycleLock lifecycleLock = new LifecycleLock(); + + private final DruidNode self; + private final CuratorFramework curator; + private final String latchPath; + + private ExecutorService listenerExecutor; + + private DruidLeaderSelector.Listener listener = null; + private final AtomicReference leaderLatch = new AtomicReference<>(); + + private volatile boolean leader = false; + private volatile int term = 0; + + public CuratorDruidLeaderSelector(CuratorFramework curator, @Self DruidNode self, String latchPath) + { + this.curator = curator; + this.self = self; + this.latchPath = latchPath; + } + + private LeaderLatch createNewLeaderLatch() + { + final LeaderLatch newLeaderLatch = new LeaderLatch( + curator, latchPath, self.getHostAndPortToUse() + ); + + newLeaderLatch.addListener( + new LeaderLatchListener() + { + @Override + public void isLeader() + { + try { + if (leader) { + log.warn("I'm being asked to become leader. But I am already the leader. Ignored event."); + return; + } + + leader = true; + term++; + listener.becomeLeader(); + } + catch (Exception ex) { + log.makeAlert(ex, "listener becomeLeader() failed. Unable to become leader").emit(); + + // give others a chance to become leader. + final LeaderLatch oldLatch = createNewLeaderLatch(); + CloseQuietly.close(oldLatch); + leader = false; + try { + //Small delay before starting the latch so that others waiting are chosen to become leader. + Thread.sleep(ThreadLocalRandom.current().nextInt(1000, 5000)); + leaderLatch.get().start(); + } + catch (Exception e) { + // If an exception gets thrown out here, then the node will zombie out 'cause it won't be looking for + // the latch anymore. I don't believe it's actually possible for an Exception to throw out here, but + // Curator likes to have "throws Exception" on methods so it might happen... + log.makeAlert(e, "I am a zombie").emit(); + } + } + } + + @Override + public void notLeader() + { + try { + if (!leader) { + log.warn("I'm being asked to stop being leader. But I am not the leader. Ignored event."); + return; + } + + leader = false; + listener.stopBeingLeader(); + } + catch (Exception ex) { + log.makeAlert(ex, "listener.stopBeingLeader() failed. Unable to stopBeingLeader").emit(); + } + } + }, + listenerExecutor + ); + + return leaderLatch.getAndSet(newLeaderLatch); + } + + @Nullable + @Override + public String getCurrentLeader() + { + if (!lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS)) { + throw new ISE("not started"); + } + + try { + final LeaderLatch latch = leaderLatch.get(); + + Participant participant = latch.getLeader(); + if (participant.isLeader()) { + return participant.getId(); + } + + return null; + } + catch (Exception e) { + throw Throwables.propagate(e); + } + } + + @Override + public boolean isLeader() + { + return leader; + } + + @Override + public int localTerm() + { + return term; + } + + @Override + public void registerListener(DruidLeaderSelector.Listener listener) + { + Preconditions.checkArgument(listener != null, "listener is null."); + + if (!lifecycleLock.canStart()) { + throw new ISE("can't start."); + } + try { + this.listener = listener; + this.listenerExecutor = Execs.singleThreaded(StringUtils.format("LeaderSelector[%s]", latchPath)); + + createNewLeaderLatch(); + leaderLatch.get().start(); + + lifecycleLock.started(); + } + catch (Exception ex) { + throw Throwables.propagate(ex); + } + finally { + lifecycleLock.exitStart(); + } + } + + @Override + public void unregisterListener() + { + if (!lifecycleLock.canStop()) { + throw new ISE("can't stop."); + } + CloseQuietly.close(leaderLatch.get()); + listenerExecutor.shutdownNow(); + } +} diff --git a/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java b/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java index 5996f500adb..b4e4827760a 100644 --- a/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java +++ b/server/src/main/java/io/druid/curator/discovery/CuratorDruidNodeDiscoveryProvider.java @@ -22,6 +22,7 @@ package io.druid.curator.discovery; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import io.druid.concurrent.Execs; import io.druid.concurrent.LifecycleLock; @@ -31,6 +32,7 @@ import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; @@ -166,6 +168,8 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide private final Object lock = new Object(); + private boolean cacheInitialized = false; + NodeTypeWatcher( ExecutorService listenerExecutor, CuratorFramework curatorFramework, @@ -180,7 +184,7 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide this.jsonMapper = jsonMapper; // This is required to be single threaded from Docs in PathChildrenCache; - this.cacheExecutor = Execs.singleThreaded(String.format("NodeTypeWatcher[%s]", nodeType)); + this.cacheExecutor = Execs.singleThreaded(StringUtils.format("NodeTypeWatcher[%s]", nodeType)); this.cache = new PathChildrenCache( curatorFramework, ZKPaths.makePath(basePath, nodeType), @@ -200,20 +204,14 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide public void registerListener(DruidNodeDiscovery.Listener listener) { synchronized (lock) { - for (DiscoveryDruidNode node : nodes.values()) { - listenerExecutor.submit(() -> { - try { - listener.nodeAdded(node); - } - catch (Exception ex) { - log.error( - ex, - "Exception occured in DiscoveryDruidNode.nodeAdded(node=[%s]) in listener [%s].", - node, - listener - ); - } - }); + if (cacheInitialized) { + ImmutableList currNodes = ImmutableList.copyOf(nodes.values()); + safeSchedule( + () -> { + listener.nodesAdded(currNodes); + }, + "Exception occured in nodesAdded([%s]) in listener [%s].", currNodes, listener + ); } nodeListeners.add(listener); } @@ -279,8 +277,30 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide break; } + case INITIALIZED: { + if (cacheInitialized) { + log.warn("cache is already initialized. ignoring [%s] event, nodeType [%s].", event.getType(), nodeType); + return; + } + + log.info("Received INITIALIZED in node watcher for type [%s].", nodeType); + + cacheInitialized = true; + + ImmutableList currNodes = ImmutableList.copyOf(nodes.values()); + for (Listener l : nodeListeners) { + safeSchedule( + () -> { + l.nodesAdded(currNodes); + }, + "Exception occured in nodesAdded([%s]) in listener [%s].", currNodes, l + ); + } + + break; + } default: { - log.error("Ignored event type [%s] for nodeType [%s] watcher.", event.getType(), nodeType); + log.info("Ignored event type [%s] for nodeType [%s] watcher.", event.getType(), nodeType); } } } @@ -290,56 +310,59 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide } } + private void safeSchedule( + Runnable runnable, + String errMsgFormat, Object... args + ) + { + listenerExecutor.submit(() -> { + try { + runnable.run(); + } + catch (Exception ex) { + log.error(errMsgFormat, args); + } + }); + } + private void addNode(DiscoveryDruidNode druidNode) { - synchronized (lock) { - DiscoveryDruidNode prev = nodes.putIfAbsent(druidNode.getDruidNode().getHostAndPortToUse(), druidNode); - if (prev == null) { - for (DruidNodeDiscovery.Listener l : nodeListeners) { - listenerExecutor.submit(() -> { - try { - l.nodeAdded(druidNode); - } - catch (Exception ex) { - log.error( - ex, - "Exception occured in DiscoveryDruidNode.nodeAdded(node=[%s]) in listener [%s].", - druidNode, - l - ); - } - }); + DiscoveryDruidNode prev = nodes.putIfAbsent(druidNode.getDruidNode().getHostAndPortToUse(), druidNode); + if (prev == null) { + if (cacheInitialized) { + List newNode = ImmutableList.of(druidNode); + for (Listener l : nodeListeners) { + safeSchedule( + () -> { + l.nodesAdded(newNode); + }, + "Exception occured in nodeAdded(node=[%s]) in listener [%s].", druidNode, l + ); } - } else { - log.warn("Node[%s] discovered but existed already [%s].", druidNode, prev); } + } else { + log.warn("Node[%s] discovered but existed already [%s].", druidNode, prev); } } private void removeNode(DiscoveryDruidNode druidNode) { - synchronized (lock) { - DiscoveryDruidNode prev = nodes.remove(druidNode.getDruidNode().getHostAndPortToUse()); + DiscoveryDruidNode prev = nodes.remove(druidNode.getDruidNode().getHostAndPortToUse()); - if (prev == null) { - log.warn("Noticed disappearance of unknown druid node [%s:%s].", druidNode.getNodeType(), druidNode); - return; - } + if (prev == null) { + log.warn("Noticed disappearance of unknown druid node [%s:%s].", druidNode.getNodeType(), druidNode); + return; + } - for (DruidNodeDiscovery.Listener l : nodeListeners) { - listenerExecutor.submit(() -> { - try { - l.nodeRemoved(druidNode); - } - catch (Exception ex) { - log.error( - ex, - "Exception occured in DiscoveryDruidNode.nodeRemoved(node=[%s]) in listener [%s].", - druidNode, - l - ); - } - }); + if (cacheInitialized) { + List nodeRemoved = ImmutableList.of(druidNode); + for (Listener l : nodeListeners) { + safeSchedule( + () -> { + l.nodesRemoved(nodeRemoved); + }, + "Exception occured in nodeRemoved(node=[%s]) in listener [%s].", druidNode, l + ); } } } @@ -350,7 +373,7 @@ public class CuratorDruidNodeDiscoveryProvider extends DruidNodeDiscoveryProvide cache.getListenable().addListener( (client, event) -> handleChildEvent(client, event) ); - cache.start(); + cache.start(PathChildrenCache.StartMode.POST_INITIALIZED_EVENT); } catch (Exception ex) { throw Throwables.propagate(ex); diff --git a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java index f30a2060e42..086e166d403 100644 --- a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java +++ b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java @@ -23,13 +23,18 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Binder; +import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; +import com.google.inject.Provider; import com.google.inject.Provides; import com.google.inject.TypeLiteral; import com.google.inject.name.Named; import com.google.inject.name.Names; +import io.druid.client.coordinator.Coordinator; +import io.druid.client.indexing.IndexingService; +import io.druid.discovery.DruidLeaderSelector; import io.druid.discovery.DruidNodeAnnouncer; import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.DruidBinders; @@ -38,11 +43,14 @@ import io.druid.guice.KeyHolder; import io.druid.guice.LazySingleton; import io.druid.guice.LifecycleModule; import io.druid.guice.PolyBind; +import io.druid.guice.annotations.Self; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.server.DruidNode; import io.druid.server.initialization.CuratorDiscoveryConfig; +import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.utils.CloseableExecutorService; +import org.apache.curator.utils.ZKPaths; import org.apache.curator.x.discovery.DownInstancePolicy; import org.apache.curator.x.discovery.InstanceFilter; import org.apache.curator.x.discovery.ProviderStrategy; @@ -64,6 +72,7 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadFactory; +import java.util.function.Function; /** * The DiscoveryModule allows for the registration of Keys of DruidNode objects, which it intends to be @@ -161,6 +170,14 @@ public class DiscoveryModule implements Module binder, INTERNAL_DISCOVERY_PROP, Key.get(DruidNodeDiscoveryProvider.class), CURATOR_KEY ); + PolyBind.createChoiceWithDefault( + binder, INTERNAL_DISCOVERY_PROP, Key.get(DruidLeaderSelector.class, () -> Coordinator.class), CURATOR_KEY + ); + + PolyBind.createChoiceWithDefault( + binder, INTERNAL_DISCOVERY_PROP, Key.get(DruidLeaderSelector.class, () -> IndexingService.class), CURATOR_KEY + ); + PolyBind.optionBinder(binder, Key.get(DruidNodeDiscoveryProvider.class)) .addBinding(CURATOR_KEY) .to(CuratorDruidNodeDiscoveryProvider.class) @@ -170,6 +187,20 @@ public class DiscoveryModule implements Module .addBinding(CURATOR_KEY) .to(CuratorDruidNodeAnnouncer.class) .in(LazySingleton.class); + + PolyBind.optionBinder(binder, Key.get(DruidLeaderSelector.class, Coordinator.class)) + .addBinding(CURATOR_KEY) + .toProvider(new DruidLeaderSelectorProvider( + (zkPathsConfig) -> ZKPaths.makePath(zkPathsConfig.getCoordinatorPath(), "_COORDINATOR")) + ) + .in(LazySingleton.class); + + PolyBind.optionBinder(binder, Key.get(DruidLeaderSelector.class, IndexingService.class)) + .addBinding(CURATOR_KEY) + .toProvider(new DruidLeaderSelectorProvider( + (zkPathsConfig) -> ZKPaths.makePath(zkPathsConfig.getOverlordPath(), "_OVERLORD")) + ) + .in(LazySingleton.class); } @Provides @@ -476,4 +507,34 @@ public class DiscoveryModule implements Module // nothing } } + + private static class DruidLeaderSelectorProvider implements Provider + { + @Inject + private CuratorFramework curatorFramework; + + @Inject + @Self + private DruidNode druidNode; + + @Inject + private ZkPathsConfig zkPathsConfig; + + private final Function latchPathFn; + + DruidLeaderSelectorProvider(Function latchPathFn) + { + this.latchPathFn = latchPathFn; + } + + @Override + public DruidLeaderSelector get() + { + return new CuratorDruidLeaderSelector( + curatorFramework, + druidNode, + latchPathFn.apply(zkPathsConfig) + ); + } + } } diff --git a/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java b/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java index eb356271a27..e174b2ee14b 100644 --- a/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java +++ b/server/src/main/java/io/druid/curator/discovery/ServerDiscoverySelector.java @@ -54,11 +54,18 @@ public class ServerDiscoverySelector implements DiscoverySelector public Server apply(final ServiceInstance instance) { Preconditions.checkState( - instance.getPort() >= 0 || instance.getSslPort() >= 0, + instance.getPort() >= 0 || (instance.getSslPort() != null && instance.getSslPort() >= 0), "WTH?! Both port and sslPort not set" ); - final int port = instance.getSslPort() >= 0 ? instance.getSslPort() : instance.getPort(); - final String scheme = instance.getSslPort() >= 0 ? "https" : "http"; + final int port; + final String scheme; + if (instance.getSslPort() == null) { + port = instance.getPort(); + scheme = "http"; + } else { + port = instance.getSslPort() >= 0 ? instance.getSslPort() : instance.getPort(); + scheme = instance.getSslPort() >= 0 ? "https" : "http"; + } return new Server() { @Override diff --git a/server/src/main/java/io/druid/discovery/DruidLeaderSelector.java b/server/src/main/java/io/druid/discovery/DruidLeaderSelector.java new file mode 100644 index 00000000000..02d63dd32a8 --- /dev/null +++ b/server/src/main/java/io/druid/discovery/DruidLeaderSelector.java @@ -0,0 +1,87 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.discovery; + +import javax.annotation.Nullable; + +/** + * Interface for supporting Overlord and Coordinator Leader Elections in TaskMaster and DruidCoordinator + * which expect appropriate implementation available in guice annotated with @IndexingService and @Coordinator + * respectively. + * + * Usage is as follow. + * On lifecycle start: + * druidLeaderSelector.registerListener(myListener); + * + * On lifecycle stop: + * druidLeaderSelector.unregisterListener(); + */ +public interface DruidLeaderSelector +{ + + /** + * Get ID of current Leader. Returns NULL if it can't find the leader. + * Note that it is possible for leadership to change right after this call returns, so caller would get wrong + * leader. + */ + @Nullable + String getCurrentLeader(); + + /** + * Returns true if this node is elected leader from underlying system's point of view. For example if curator + * is used to implement this then true would be returned when curator believes this node to be the leader. + * Note that it is possible for leadership to change right after this call returns, so caller would get wrong + * status. + */ + boolean isLeader(); + + /** + * Implementation would increment it everytime it becomes leader. This allows users to start a long running + * task when they become leader and be able to intermittently check that they are still leader from same + * term when they started. DruidCoordinator class uses it to do intermittent checks and stop the activity + * as needed. + */ + int localTerm(); + + /** + * Register the listener for watching leadership notifications. It should only be called once. + */ + void registerListener(Listener listener); + + /** + * Unregisters the listener. + */ + void unregisterListener(); + + interface Listener + { + /** + * Notification that this node should start activities to be done by the leader. if this method throws + * exception then implementation would try to resign its leadership in the underlying system such as curator. + */ + void becomeLeader(); + + /** + * Notification that shid node should stop acitivities which are done by the leader. If this method throws + * exception then an alert is created. + */ + void stopBeingLeader(); + } +} diff --git a/server/src/main/java/io/druid/discovery/DruidNodeDiscovery.java b/server/src/main/java/io/druid/discovery/DruidNodeDiscovery.java index 7b051ccbf00..75753695517 100644 --- a/server/src/main/java/io/druid/discovery/DruidNodeDiscovery.java +++ b/server/src/main/java/io/druid/discovery/DruidNodeDiscovery.java @@ -20,6 +20,7 @@ package io.druid.discovery; import java.util.Collection; +import java.util.List; /** * Interface for discovering Druid Nodes announced by DruidNodeAnnouncer. @@ -29,9 +30,23 @@ public interface DruidNodeDiscovery Collection getAllNodes(); void registerListener(Listener listener); + /** + * Listener for watching nodes in a DruidNodeDiscovery instance obtained via DruidNodeDiscoveryProvider.getXXX(). + * DruidNodeDiscovery implementation should assume that Listener is not threadsafe and never call methods in + * Listener concurrently. + * + * Implementation of Listener must ensure to not do any time consuming work or block in any of the methods. + */ interface Listener { - void nodeAdded(DiscoveryDruidNode node); - void nodeRemoved(DiscoveryDruidNode node); + /** + * List of nodes added. + * First call to this method is also a signal that underlying cache in the DruidNodeDiscovery implementation + * has been initialized. + * @param nodes + */ + void nodesAdded(List nodes); + + void nodesRemoved(List nodes); } } diff --git a/server/src/main/java/io/druid/discovery/DruidNodeDiscoveryProvider.java b/server/src/main/java/io/druid/discovery/DruidNodeDiscoveryProvider.java index 03584095836..0f86bfc391f 100644 --- a/server/src/main/java/io/druid/discovery/DruidNodeDiscoveryProvider.java +++ b/server/src/main/java/io/druid/discovery/DruidNodeDiscoveryProvider.java @@ -19,6 +19,7 @@ package io.druid.discovery; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.java.util.common.IAE; @@ -27,6 +28,7 @@ import io.druid.java.util.common.logger.Logger; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -63,7 +65,8 @@ public abstract class DruidNodeDiscoveryProvider WorkerNodeService.DISCOVERY_SERVICE_KEY, ImmutableSet.of(NODE_TYPE_MM) ); - private final Map serviceDiscoveryMap = new ConcurrentHashMap<>(SERVICE_TO_NODE_TYPES.size()); + private final ConcurrentHashMap serviceDiscoveryMap = new ConcurrentHashMap<>( + SERVICE_TO_NODE_TYPES.size()); /** * Get DruidNodeDiscovery instance to discover nodes of given nodeType. @@ -73,69 +76,47 @@ public abstract class DruidNodeDiscoveryProvider /** * Get DruidNodeDiscovery instance to discover nodes that announce given service in its metadata. */ - public synchronized DruidNodeDiscovery getForService(String serviceName) + public DruidNodeDiscovery getForService(String serviceName) { - ServiceListener nodeDiscovery = serviceDiscoveryMap.get(serviceName); + return serviceDiscoveryMap.compute( + serviceName, + (k, v) -> { + if (v != null) { + return v; + } - if (nodeDiscovery == null) { - Set nodeTypesToWatch = DruidNodeDiscoveryProvider.SERVICE_TO_NODE_TYPES.get(serviceName); - if (nodeTypesToWatch == null) { - throw new IAE("Unknown service [%s].", serviceName); - } + Set nodeTypesToWatch = DruidNodeDiscoveryProvider.SERVICE_TO_NODE_TYPES.get(serviceName); + if (nodeTypesToWatch == null) { + throw new IAE("Unknown service [%s].", serviceName); + } - nodeDiscovery = new ServiceListener(serviceName); - for (String nodeType : nodeTypesToWatch) { - getForNodeType(nodeType).registerListener(nodeDiscovery); - } - serviceDiscoveryMap.put(serviceName, nodeDiscovery); - } - - return nodeDiscovery; + ServiceDruidNodeDiscovery serviceDiscovery = new ServiceDruidNodeDiscovery(serviceName); + for (String nodeType : nodeTypesToWatch) { + getForNodeType(nodeType).registerListener(serviceDiscovery.nodeTypeListener()); + } + return serviceDiscovery; + } + ); } - private static class ServiceListener implements DruidNodeDiscovery, DruidNodeDiscovery.Listener + private static class ServiceDruidNodeDiscovery implements DruidNodeDiscovery { + private static final Logger log = new Logger(ServiceDruidNodeDiscovery.class); + private final String service; private final Map nodes = new ConcurrentHashMap<>(); private final List listeners = new ArrayList<>(); - ServiceListener(String service) + private final Object lock = new Object(); + + private Set uninitializedNodeTypeListeners = new HashSet<>(); + + ServiceDruidNodeDiscovery(String service) { this.service = service; } - @Override - public synchronized void nodeAdded(DiscoveryDruidNode node) - { - if (node.getServices().containsKey(service)) { - DiscoveryDruidNode prev = nodes.putIfAbsent(node.getDruidNode().getHostAndPortToUse(), node); - - if (prev == null) { - for (Listener listener : listeners) { - listener.nodeAdded(node); - } - } else { - log.warn("Node[%s] discovered but already exists [%s].", node, prev); - } - } else { - log.warn("Node[%s] discovered but doesn't have service[%s]. Ignored.", node, service); - } - } - - @Override - public synchronized void nodeRemoved(DiscoveryDruidNode node) - { - DiscoveryDruidNode prev = nodes.remove(node.getDruidNode().getHostAndPortToUse()); - if (prev != null) { - for (Listener listener : listeners) { - listener.nodeRemoved(node); - } - } else { - log.warn("Node[%s] disappeared but was unknown for service listener [%s].", node, service); - } - } - @Override public Collection getAllNodes() { @@ -143,12 +124,91 @@ public abstract class DruidNodeDiscoveryProvider } @Override - public synchronized void registerListener(Listener listener) + public void registerListener(Listener listener) { - for (DiscoveryDruidNode node : nodes.values()) { - listener.nodeAdded(node); + synchronized (lock) { + if (uninitializedNodeTypeListeners.isEmpty()) { + listener.nodesAdded(ImmutableList.copyOf(nodes.values())); + } + listeners.add(listener); + } + } + + NodeTypeListener nodeTypeListener() + { + NodeTypeListener nodeListener = new NodeTypeListener(); + uninitializedNodeTypeListeners.add(nodeListener); + return nodeListener; + } + + class NodeTypeListener implements DruidNodeDiscovery.Listener + { + @Override + public void nodesAdded(List nodesDiscovered) + { + synchronized (lock) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (DiscoveryDruidNode node : nodesDiscovered) { + if (node.getServices().containsKey(service)) { + DiscoveryDruidNode prev = nodes.putIfAbsent(node.getDruidNode().getHostAndPortToUse(), node); + + if (prev == null) { + builder.add(node); + } else { + log.warn("Node[%s] discovered but already exists [%s].", node, prev); + } + } else { + log.warn("Node[%s] discovered but doesn't have service[%s]. Ignored.", node, service); + } + } + + ImmutableList newNodesAdded = null; + if (uninitializedNodeTypeListeners.isEmpty()) { + newNodesAdded = builder.build(); + } else if (uninitializedNodeTypeListeners.remove(this) && uninitializedNodeTypeListeners.isEmpty()) { + newNodesAdded = ImmutableList.copyOf(nodes.values()); + } + + if (newNodesAdded != null) { + for (Listener listener : listeners) { + try { + listener.nodesAdded(newNodesAdded); + } + catch (Exception ex) { + log.error(ex, "Listener[%s].nodesAdded(%s) threw exception. Ignored.", listener, newNodesAdded); + } + } + } + } + } + + @Override + public void nodesRemoved(List nodesDisappeared) + { + synchronized (lock) { + ImmutableList.Builder builder = ImmutableList.builder(); + for (DiscoveryDruidNode node : nodesDisappeared) { + DiscoveryDruidNode prev = nodes.remove(node.getDruidNode().getHostAndPortToUse()); + if (prev != null) { + builder.add(node); + } else { + log.warn("Node[%s] disappeared but was unknown for service listener [%s].", node, service); + } + } + + if (uninitializedNodeTypeListeners.isEmpty()) { + ImmutableList nodesRemoved = builder.build(); + for (Listener listener : listeners) { + try { + listener.nodesRemoved(nodesRemoved); + } + catch (Exception ex) { + log.error(ex, "Listener[%s].nodesRemoved(%s) threw exception. Ignored.", listener, nodesRemoved); + } + } + } + } } - listeners.add(listener); } } } diff --git a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java index 034bd6ad2ab..6f1a140ded1 100644 --- a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java +++ b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java @@ -22,13 +22,12 @@ package io.druid.indexer; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; - +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.SQLMetadataConnector; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.IDBI; import org.skife.jdbi.v2.PreparedBatch; @@ -71,7 +70,7 @@ public class SQLMetadataStorageUpdaterJobHandler implements MetadataStorageUpdat new ImmutableMap.Builder() .put("id", segment.getIdentifier()) .put("dataSource", segment.getDataSource()) - .put("created_date", new DateTime().toString()) + .put("created_date", DateTimes.nowUtc().toString()) .put("start", segment.getInterval().getStart().toString()) .put("end", segment.getInterval().getEnd().toString()) .put("partitioned", (segment.getShardSpec() instanceof NoneShardSpec) ? false : true) diff --git a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index c1750101c1e..53035f94120 100644 --- a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -32,12 +32,13 @@ import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; import com.google.inject.Inject; -import io.druid.common.utils.JodaUtils; import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.indexing.overlord.SegmentPublishResult; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.logger.Logger; @@ -49,7 +50,6 @@ import io.druid.timeline.partition.LinearShardSpec; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.PartitionChunk; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; @@ -315,8 +315,9 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor // Find which segments are used (i.e. not overshadowed). final Set usedSegments = Sets.newHashSet(); - for (TimelineObjectHolder holder : VersionedIntervalTimeline.forSegments(segments) - .lookupWithIncompletePartitions(JodaUtils.ETERNITY)) { + List> segmentHolders = + VersionedIntervalTimeline.forSegments(segments).lookupWithIncompletePartitions(Intervals.ETERNITY); + for (TimelineObjectHolder holder : segmentHolders) { for (PartitionChunk chunk : holder.getObject()) { usedSegments.add(chunk.getObject()); } @@ -568,7 +569,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ) .bind("id", newIdentifier.getIdentifierAsString()) .bind("dataSource", dataSource) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", interval.getStart().toString()) .bind("end", interval.getEnd().toString()) .bind("sequence_name", sequenceName) @@ -622,7 +623,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ) .bind("id", segment.getIdentifier()) .bind("dataSource", segment.getDataSource()) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("start", segment.getInterval().getStart().toString()) .bind("end", segment.getInterval().getEnd().toString()) .bind("partitioned", (segment.getShardSpec() instanceof NoneShardSpec) ? false : true) @@ -765,7 +766,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ) ) .bind("dataSource", dataSource) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("commit_metadata_payload", newCommitMetadataBytes) .bind("commit_metadata_sha1", newCommitMetadataSha1) .execute(); diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java index a3fa59e0def..7d3f972d8bb 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java @@ -39,6 +39,7 @@ import io.druid.client.DruidServer; import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -108,7 +109,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager ) ) ); - final String version = new DateTime().toString(); + final String version = DateTimes.nowUtc().toString(); handle.createStatement( StringUtils.format( "INSERT INTO %s (id, dataSource, version, payload) VALUES (:id, :dataSource, :version, :payload)", @@ -365,7 +366,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager @Override public Void inTransaction(Handle handle, TransactionStatus transactionStatus) throws Exception { - final DateTime auditTime = DateTime.now(); + final DateTime auditTime = DateTimes.nowUtc(); auditManager.doAudit( AuditEntry.builder() .key(dataSource) diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java index 0c0255384ef..7308e2a46cd 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java @@ -37,6 +37,8 @@ import com.metamx.emitter.EmittingLogger; import io.druid.client.DruidDataSource; import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -223,11 +225,10 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager ); final List segments = Lists.newArrayList(); - for (TimelineObjectHolder objectHolder : segmentTimeline.lookup( - new Interval( - "0000-01-01/3000-01-01" - ) - )) { + List> timelineObjectHolders = segmentTimeline.lookup( + Intervals.of("0000-01-01/3000-01-01") + ); + for (TimelineObjectHolder objectHolder : timelineObjectHolders) { for (PartitionChunk partitionChunk : objectHolder.getObject()) { segments.add(partitionChunk.getObject()); } @@ -504,7 +505,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager if (dataSource == null) { dataSource = new DruidDataSource( datasourceName, - ImmutableMap.of("created", new DateTime().toString()) + ImmutableMap.of("created", DateTimes.nowUtc().toString()) ); Object shouldBeNull = newDataSources.put( diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java index 4f95a2ad898..6bee04c6468 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java @@ -22,12 +22,11 @@ package io.druid.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Inject; - +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; import org.skife.jdbi.v2.DBI; import org.skife.jdbi.v2.Handle; import org.skife.jdbi.v2.tweak.HandleCallback; @@ -68,7 +67,7 @@ public class SQLMetadataSegmentPublisher implements MetadataSegmentPublisher publishSegment( segment.getIdentifier(), segment.getDataSource(), - new DateTime().toString(), + DateTimes.nowUtc().toString(), segment.getInterval().getStart().toString(), segment.getInterval().getEnd().toString(), (segment.getShardSpec() instanceof NoneShardSpec) ? false : true, diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java index 6d65fbf9ca5..c7436e7961b 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java @@ -27,16 +27,14 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.Inject; - import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.indexing.overlord.supervisor.SupervisorSpec; import io.druid.indexing.overlord.supervisor.VersionedSupervisorSpec; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; - -import org.joda.time.DateTime; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; import org.skife.jdbi.v2.Handle; @@ -95,7 +93,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager ) ) .bind("spec_id", id) - .bind("created_date", new DateTime().toString()) + .bind("created_date", DateTimes.nowUtc().toString()) .bind("payload", jsonMapper.writeValueAsBytes(spec)) .execute(); diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index 118b53221fe..398511460d8 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -27,7 +27,7 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; import com.google.common.collect.Sets; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -104,7 +104,7 @@ public class ArbitraryGranularitySpec implements GranularitySpec public Optional bucketInterval(DateTime dt) { // First interval with start time ≤ dt - final Interval interval = intervals.floor(new Interval(dt, new DateTime(JodaUtils.MAX_INSTANT))); + final Interval interval = intervals.floor(new Interval(dt, DateTimes.MAX)); if (interval != null && interval.contains(dt)) { return Optional.of(interval); diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java index a2492e483f5..fe3c4886f5f 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java @@ -45,6 +45,7 @@ import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; @@ -71,7 +72,6 @@ import io.druid.server.coordination.DataSegmentAnnouncer; import io.druid.timeline.DataSegment; import io.druid.timeline.VersionedIntervalTimeline; import org.apache.commons.io.FileUtils; -import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nullable; @@ -716,7 +716,7 @@ public class AppenderatorImpl implements Appenderator private void resetNextFlush() { - nextFlush = new DateTime().plus(tuningConfig.getIntermediatePersistPeriod()).getMillis(); + nextFlush = DateTimes.nowUtc().plus(tuningConfig.getIntermediatePersistPeriod()).getMillis(); } /** diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java index 678fb2e7761..c3e8678ba30 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -35,6 +35,7 @@ import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; @@ -243,14 +244,15 @@ public class AppenderatorPlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); - final long truncatedTime = segmentGranularity.bucketStart(new DateTime(timestamp)).getMillis(); + DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); + final long truncatedTime = truncatedDateTime.getMillis(); SegmentIdentifier retVal = segments.get(truncatedTime); if (retVal == null) { final Interval interval = new Interval( - new DateTime(truncatedTime), - segmentGranularity.increment(new DateTime(truncatedTime)) + truncatedDateTime, + segmentGranularity.increment(truncatedDateTime) ); retVal = new SegmentIdentifier( @@ -335,12 +337,12 @@ public class AppenderatorPlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final Period windowPeriod = config.getWindowPeriod(); - final DateTime truncatedNow = segmentGranularity.bucketStart(new DateTime()); + final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc()); final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info( "Expect to run at [%s]", - new DateTime().plus( + DateTimes.nowUtc().plus( new Duration( System.currentTimeMillis(), segmentGranularity.increment(truncatedNow).getMillis() + windowMillis @@ -393,14 +395,7 @@ public class AppenderatorPlumber implements Plumber final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info("Starting merge and push."); DateTime minTimestampAsDate = segmentGranularity.bucketStart( - new DateTime( - Math.max( - windowMillis, - rejectionPolicy.getCurrMaxTime() - .getMillis() - ) - - windowMillis - ) + DateTimes.utc(Math.max(windowMillis, rejectionPolicy.getCurrMaxTime().getMillis()) - windowMillis) ); long minTimestamp = minTimestampAsDate.getMillis(); @@ -426,7 +421,7 @@ public class AppenderatorPlumber implements Plumber log.info( "Skipping persist and merge for entry [%s] : Start time [%s] >= [%s] min timestamp required in this run. Segment will be picked up in a future run.", segment, - new DateTime(intervalStart), + DateTimes.utc(intervalStart), minTimestampAsDate ); } diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index 6e462f6b309..8b280dd4ec4 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -41,6 +41,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.impl.MapInputRowParser; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; +import io.druid.java.util.common.DateTimes; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import org.joda.time.DateTime; @@ -323,7 +324,7 @@ public class EventReceiverFirehoseFactory implements FirehoseFactory apply(final Cursor cursor) { - final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME); + final LongColumnSelector timestampColumnSelector = + cursor.getColumnSelectorFactory().makeLongColumnSelector(Column.TIME_COLUMN_NAME); final Map dimSelectors = Maps.newHashMap(); for (String dim : dims) { - final DimensionSelector dimSelector = cursor.makeDimensionSelector( - new DefaultDimensionSpec(dim, dim) - ); + final DimensionSelector dimSelector = cursor + .getColumnSelectorFactory() + .makeDimensionSelector(new DefaultDimensionSpec(dim, dim)); // dimSelector is null if the dimension is not present if (dimSelector != null) { dimSelectors.put(dim, dimSelector); @@ -100,7 +101,8 @@ public class IngestSegmentFirehose implements Firehose final Map metSelectors = Maps.newHashMap(); for (String metric : metrics) { - final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric); + final ObjectColumnSelector metricSelector = + cursor.getColumnSelectorFactory().makeObjectColumnSelector(metric); if (metricSelector != null) { metSelectors.put(metric, metricSelector); } @@ -125,7 +127,7 @@ public class IngestSegmentFirehose implements Firehose { final Map theEvent = Maps.newLinkedHashMap(); final long timestamp = timestampColumnSelector.getLong(); - theEvent.put(EventHolder.timestampKey, new DateTime(timestamp)); + theEvent.put(EventHolder.timestampKey, DateTimes.utc(timestamp)); for (Map.Entry dimSelector : dimSelectors.entrySet()) { final String dim = dimSelector.getKey(); diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java index 099b7c12a3e..d1fd632672c 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/IrcFirehoseFactory.java @@ -33,6 +33,7 @@ import com.ircclouds.irc.api.state.IIRCState; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.logger.Logger; import org.joda.time.DateTime; @@ -112,7 +113,7 @@ public class IrcFirehoseFactory implements FirehoseFactory public void onChannelMessage(ChannelPrivMsg aMsg) { try { - queue.put(Pair.of(DateTime.now(), aMsg)); + queue.put(Pair.of(DateTimes.nowUtc(), aMsg)); } catch (InterruptedException e) { throw new RuntimeException("interrupted adding message to queue", e); diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java b/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java index 9a1de9d331b..1bc6d664811 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/CustomVersioningPolicy.java @@ -21,7 +21,7 @@ package io.druid.segment.realtime.plumber; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import org.joda.time.DateTime; +import io.druid.java.util.common.DateTimes; import org.joda.time.Interval; /** @@ -35,7 +35,7 @@ public class CustomVersioningPolicy implements VersioningPolicy @JsonProperty("version") String version ) { - this.version = version == null ? new DateTime().toString() : version; + this.version = version == null ? DateTimes.nowUtc().toString() : version; } @Override @@ -43,4 +43,10 @@ public class CustomVersioningPolicy implements VersioningPolicy { return version; } + + @JsonProperty("version") + public String getVersion() + { + return version; + } } diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java index 0294103b132..3938d017a92 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java @@ -27,6 +27,7 @@ import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.concurrent.ScheduledExecutors; @@ -121,7 +122,7 @@ public class FlushingPlumber extends RealtimePlumber log.info( "Abandoning segment %s at %s", sink.getSegment().getIdentifier(), - new DateTime().plusMillis((int) flushDuration.getMillis()) + DateTimes.nowUtc().plusMillis((int) flushDuration.getMillis()) ); ScheduledExecutors.scheduleWithFixedDelay( @@ -143,12 +144,12 @@ public class FlushingPlumber extends RealtimePlumber private void startFlushThread() { final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); - final DateTime truncatedNow = segmentGranularity.bucketStart(new DateTime()); + final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc()); final long windowMillis = config.getWindowPeriod().toStandardDuration().getMillis(); log.info( "Expect to run at [%s]", - new DateTime().plus( + DateTimes.nowUtc().plus( new Duration( System.currentTimeMillis(), schema.getGranularitySpec().getSegmentGranularity().increment(truncatedNow).getMillis() + windowMillis diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java index 1773abdf695..083f6384cd1 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java @@ -19,7 +19,8 @@ package io.druid.segment.realtime.plumber; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.Period; @@ -53,7 +54,7 @@ public class MessageTimeRejectionPolicyFactory implements RejectionPolicyFactory @Override public DateTime getCurrMaxTime() { - return new DateTime(maxTimestamp); + return DateTimes.utc(maxTimestamp); } @Override diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java index de572665a09..678516b7585 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/NoopRejectionPolicyFactory.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; @@ -32,7 +33,7 @@ public class NoopRejectionPolicyFactory implements RejectionPolicyFactory @Override public DateTime getCurrMaxTime() { - return new DateTime(0); + return DateTimes.EPOCH; } @Override diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java index 538440b2d30..3fe234db39a 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java @@ -42,7 +42,9 @@ import io.druid.concurrent.Execs; import io.druid.concurrent.TaskThreadPriority; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; @@ -230,14 +232,15 @@ public class RealtimePlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final VersioningPolicy versioningPolicy = config.getVersioningPolicy(); - final long truncatedTime = segmentGranularity.bucketStart(new DateTime(timestamp)).getMillis(); + DateTime truncatedDateTime = segmentGranularity.bucketStart(DateTimes.utc(timestamp)); + final long truncatedTime = truncatedDateTime.getMillis(); Sink retVal = sinks.get(truncatedTime); if (retVal == null) { final Interval sinkInterval = new Interval( - new DateTime(truncatedTime), - segmentGranularity.increment(new DateTime(truncatedTime)) + truncatedDateTime, + segmentGranularity.increment(truncatedDateTime) ); retVal = new Sink( @@ -354,7 +357,7 @@ public class RealtimePlumber implements Plumber private void persistAndMerge(final long truncatedTime, final Sink sink) { final String threadName = StringUtils.format( - "%s-%s-persist-n-merge", schema.getDataSource(), new DateTime(truncatedTime) + "%s-%s-persist-n-merge", schema.getDataSource(), DateTimes.utc(truncatedTime) ); mergeExecutor.execute( new ThreadRenamingRunnable(threadName) @@ -542,7 +545,7 @@ public class RealtimePlumber implements Plumber private void resetNextFlush() { - nextFlush = new DateTime().plus(config.getIntermediatePersistPeriod()).getMillis(); + nextFlush = DateTimes.nowUtc().plus(config.getIntermediatePersistPeriod()).getMillis(); } protected void initializeExecutors() @@ -598,7 +601,7 @@ public class RealtimePlumber implements Plumber Object metadata = null; long latestCommitTime = 0; for (File sinkDir : files) { - final Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/")); + final Interval sinkInterval = Intervals.of(sinkDir.getName().replace("_", "/")); //final File[] sinkFiles = sinkDir.listFiles(); // To avoid reading and listing of "merged" dir @@ -739,12 +742,12 @@ public class RealtimePlumber implements Plumber final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity(); final Period windowPeriod = config.getWindowPeriod(); - final DateTime truncatedNow = segmentGranularity.bucketStart(new DateTime()); + final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc()); final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info( "Expect to run at [%s]", - new DateTime().plus( + DateTimes.nowUtc().plus( new Duration( System.currentTimeMillis(), segmentGranularity.increment(truncatedNow).getMillis() + windowMillis @@ -797,14 +800,7 @@ public class RealtimePlumber implements Plumber final long windowMillis = windowPeriod.toStandardDuration().getMillis(); log.info("Starting merge and push."); DateTime minTimestampAsDate = segmentGranularity.bucketStart( - new DateTime( - Math.max( - windowMillis, - rejectionPolicy.getCurrMaxTime() - .getMillis() - ) - - windowMillis - ) + DateTimes.utc(Math.max(windowMillis, rejectionPolicy.getCurrMaxTime().getMillis()) - windowMillis) ); long minTimestamp = minTimestampAsDate.getMillis(); @@ -824,7 +820,7 @@ public class RealtimePlumber implements Plumber log.info( "Skipping persist and merge for entry [%s] : Start time [%s] >= [%s] min timestamp required in this run. Segment will be picked up in a future run.", entry, - new DateTime(intervalStart), + DateTimes.utc(intervalStart), minTimestampAsDate ); } diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java index a52639b02c3..878b1164b94 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.Period; @@ -35,7 +36,7 @@ public class ServerTimeRejectionPolicyFactory implements RejectionPolicyFactory @Override public DateTime getCurrMaxTime() { - return new DateTime(); + return DateTimes.nowUtc(); } @Override diff --git a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java index 3af0b2e2d86..849dfd82e68 100644 --- a/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java +++ b/server/src/main/java/io/druid/server/AsyncQueryForwardingServlet.java @@ -28,9 +28,11 @@ import com.google.inject.Inject; import com.google.inject.Provider; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; +import io.druid.client.selector.Server; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.guice.http.DruidHttpClientConfig; +import io.druid.java.util.common.DateTimes; import io.druid.query.DruidMetrics; import io.druid.query.GenericQueryMetricsFactory; import io.druid.query.Query; @@ -47,7 +49,6 @@ import org.eclipse.jetty.client.api.Result; import org.eclipse.jetty.client.util.BytesContentProvider; import org.eclipse.jetty.http.HttpMethod; import org.eclipse.jetty.proxy.AsyncProxyServlet; -import org.joda.time.DateTime; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -72,6 +73,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu private static final String APPLICATION_SMILE = "application/smile"; private static final String HOST_ATTRIBUTE = "io.druid.proxy.to.host"; + private static final String SCHEME_ATTRIBUTE = "io.druid.proxy.to.host.scheme"; private static final String QUERY_ATTRIBUTE = "io.druid.proxy.query"; private static final String OBJECTMAPPER_ATTRIBUTE = "io.druid.proxy.objectMapper"; @@ -169,35 +171,31 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu final ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper; request.setAttribute(OBJECTMAPPER_ATTRIBUTE, objectMapper); - final String defaultHost = hostFinder.getDefaultHost(); - request.setAttribute(HOST_ATTRIBUTE, defaultHost); + final Server defaultServer = hostFinder.getDefaultServer(); + request.setAttribute(HOST_ATTRIBUTE, defaultServer.getHost()); + request.setAttribute(SCHEME_ATTRIBUTE, defaultServer.getScheme()); final boolean isQueryEndpoint = request.getRequestURI().startsWith("/druid/v2"); if (isQueryEndpoint && HttpMethod.DELETE.is(request.getMethod())) { // query cancellation request - for (final String host : hostFinder.getAllHosts()) { + for (final Server server: hostFinder.getAllServers()) { // send query cancellation to all brokers this query may have gone to // to keep the code simple, the proxy servlet will also send a request to one of the default brokers - if (!host.equals(defaultHost)) { + if (!server.getHost().equals(defaultServer.getHost())) { // issue async requests broadcastClient - .newRequest(rewriteURI(request, host)) + .newRequest(rewriteURI(request, server.getScheme(), server.getHost())) .method(HttpMethod.DELETE) .timeout(CANCELLATION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS) .send( - new Response.CompleteListener() - { - @Override - public void onComplete(Result result) - { - if (result.isFailed()) { - log.warn( - result.getFailure(), - "Failed to forward cancellation request to [%s]", - host - ); - } + result -> { + if (result.isFailed()) { + log.warn( + result.getFailure(), + "Failed to forward cancellation request to [%s]", + server.getHost() + ); } } ); @@ -209,7 +207,9 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu try { Query inputQuery = objectMapper.readValue(request.getInputStream(), Query.class); if (inputQuery != null) { - request.setAttribute(HOST_ATTRIBUTE, hostFinder.getHost(inputQuery)); + final Server server = hostFinder.getServer(inputQuery); + request.setAttribute(HOST_ATTRIBUTE, server.getHost()); + request.setAttribute(SCHEME_ATTRIBUTE, server.getScheme()); if (inputQuery.getId() == null) { inputQuery = inputQuery.withId(UUID.randomUUID().toString()); } @@ -221,7 +221,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu final String errorMessage = e.getMessage() == null ? "no error message" : e.getMessage(); requestLogger.log( new RequestLogLine( - new DateTime(), + DateTimes.nowUtc(), request.getRemoteAddr(), null, new QueryStats(ImmutableMap.of("success", false, "exception", errorMessage)) @@ -289,19 +289,19 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu @Override protected String rewriteTarget(HttpServletRequest request) { - return rewriteURI(request, (String) request.getAttribute(HOST_ATTRIBUTE)).toString(); + return rewriteURI(request, (String) request.getAttribute(SCHEME_ATTRIBUTE), (String) request.getAttribute(HOST_ATTRIBUTE)).toString(); } - protected URI rewriteURI(HttpServletRequest request, String host) + protected URI rewriteURI(HttpServletRequest request, String scheme, String host) { - return makeURI(host, request.getRequestURI(), request.getQueryString()); + return makeURI(scheme, host, request.getRequestURI(), request.getQueryString()); } - protected static URI makeURI(String host, String requestURI, String rawQueryString) + protected static URI makeURI(String scheme, String host, String requestURI, String rawQueryString) { try { return new URI( - "http", + scheme, host, requestURI, rawQueryString == null ? null : URLDecoder.decode(rawQueryString, "UTF-8"), @@ -394,7 +394,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu emitQueryTime(requestTimeNs, success); requestLogger.log( new RequestLogLine( - new DateTime(), + DateTimes.nowUtc(), req.getRemoteAddr(), query, new QueryStats( @@ -427,7 +427,7 @@ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements Qu emitQueryTime(System.nanoTime() - startNs, false); requestLogger.log( new RequestLogLine( - new DateTime(), + DateTimes.nowUtc(), req.getRemoteAddr(), query, new QueryStats( diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index b32091960c9..32e678ea86a 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -34,7 +34,9 @@ import io.druid.client.FilteredServerInventoryView; import io.druid.client.ServerViewUtil; import io.druid.client.TimelineServerView; import io.druid.client.selector.ServerSelector; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.logger.Logger; import io.druid.query.LocatedSegmentDescriptor; @@ -172,7 +174,7 @@ public class ClientInfoResource DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { - theInterval = new Interval(interval); + theInterval = Intervals.of(interval); } TimelineLookup timeline = timelineServerView.getTimeline(new TableDataSource(dataSourceName)); @@ -259,7 +261,7 @@ public class ClientInfoResource DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { - theInterval = new Interval(interval); + theInterval = Intervals.of(interval); } for (DataSegment segment : segments) { @@ -292,7 +294,7 @@ public class ClientInfoResource DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { - theInterval = new Interval(interval); + theInterval = Intervals.of(interval); } for (DataSegment segment : segments) { @@ -317,7 +319,7 @@ public class ClientInfoResource { List intervalList = Lists.newArrayList(); for (String interval : intervals.split(",")) { - intervalList.add(Interval.parse(interval.trim())); + intervalList.add(Intervals.of(interval.trim())); } List condensed = JodaUtils.condenseIntervals(intervalList); return ServerViewUtil.getTargetLocations(timelineServerView, datasource, condensed, numCandidates); @@ -325,7 +327,7 @@ public class ClientInfoResource protected DateTime getCurrentTime() { - return new DateTime(); + return DateTimes.nowUtc(); } diff --git a/server/src/main/java/io/druid/server/DruidNode.java b/server/src/main/java/io/druid/server/DruidNode.java index a67cf1015b9..4047b511cbf 100644 --- a/server/src/main/java/io/druid/server/DruidNode.java +++ b/server/src/main/java/io/druid/server/DruidNode.java @@ -213,6 +213,15 @@ public class DruidNode return null; } + public int getPortToUse() + { + if (serverConfig.isTls()) { + return getTlsPort(); + } else { + return getPlaintextPort(); + } + } + public String getHostAndPortToUse() { return getHostAndTlsPort() != null ? getHostAndTlsPort() : getHostAndPort(); diff --git a/server/src/main/java/io/druid/server/QueryLifecycle.java b/server/src/main/java/io/druid/server/QueryLifecycle.java index 4104e1638e2..193daa19a95 100644 --- a/server/src/main/java/io/druid/server/QueryLifecycle.java +++ b/server/src/main/java/io/druid/server/QueryLifecycle.java @@ -22,6 +22,7 @@ package io.druid.server; import com.google.common.base.Strings; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DirectDruidClient; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; @@ -44,7 +45,6 @@ import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; import io.druid.server.security.Resource; import io.druid.server.security.ResourceType; -import org.joda.time.DateTime; import javax.annotation.Nullable; import java.util.LinkedHashMap; @@ -303,7 +303,7 @@ public class QueryLifecycle requestLogger.log( new RequestLogLine( - new DateTime(startMs), + DateTimes.utc(startMs), Strings.nullToEmpty(remoteAddress), queryPlus.getQuery(), new QueryStats(statsMap) diff --git a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java index e1de8612e43..da221868960 100644 --- a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java +++ b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java @@ -29,6 +29,7 @@ import io.druid.audit.AuditEntry; import io.druid.audit.AuditManager; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; @@ -165,7 +166,7 @@ public class SQLAuditManager implements AuditManager { final Interval theInterval; if (interval == null) { - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); theInterval = new Interval(now.minus(config.getAuditHistoryMillis()), now); } else { theInterval = interval; diff --git a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java index e8c3ee051d6..e5df3079760 100644 --- a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java +++ b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java @@ -32,6 +32,7 @@ import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; import io.druid.common.utils.UUIDUtils; import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -39,7 +40,6 @@ import io.druid.server.initialization.BatchDataSegmentAnnouncerConfig; import io.druid.server.initialization.ZkPathsConfig; import io.druid.timeline.DataSegment; import org.apache.curator.utils.ZKPaths; -import org.joda.time.DateTime; import javax.annotation.Nullable; import java.io.IOException; @@ -313,7 +313,7 @@ public class BatchDataSegmentAnnouncer implements DataSegmentAnnouncer server.getHost(), server.getType().toString(), server.getTier(), - new DateTime().toString() + DateTimes.nowUtc().toString() ) ); } diff --git a/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java b/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java index 2c7c9f84e0e..0be3020ae87 100644 --- a/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java +++ b/server/src/main/java/io/druid/server/coordination/broker/DruidBroker.java @@ -20,12 +20,16 @@ package io.druid.server.coordination.broker; import com.google.common.base.Predicates; +import com.google.common.collect.ImmutableMap; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; - import io.druid.client.FilteredServerInventoryView; import io.druid.client.ServerView; import io.druid.curator.discovery.ServiceAnnouncer; +import io.druid.discovery.DiscoveryDruidNode; +import io.druid.discovery.DruidNodeAnnouncer; +import io.druid.discovery.DruidNodeDiscoveryProvider; +import io.druid.discovery.LookupNodeService; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Self; import io.druid.java.util.common.Pair; @@ -40,17 +44,28 @@ public class DruidBroker { private final DruidNode self; private final ServiceAnnouncer serviceAnnouncer; + private final DruidNodeAnnouncer druidNodeAnnouncer; + private final DiscoveryDruidNode discoveryDruidNode; + private volatile boolean started = false; @Inject public DruidBroker( final FilteredServerInventoryView serverInventoryView, final @Self DruidNode self, - final ServiceAnnouncer serviceAnnouncer - ) + final ServiceAnnouncer serviceAnnouncer, + final DruidNodeAnnouncer druidNodeAnnouncer, + final LookupNodeService lookupNodeService + ) { this.self = self; this.serviceAnnouncer = serviceAnnouncer; + this.druidNodeAnnouncer = druidNodeAnnouncer; + this.discoveryDruidNode = new DiscoveryDruidNode( + self, + DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, + ImmutableMap.of(lookupNodeService.getName(), lookupNodeService) + ); serverInventoryView.registerSegmentCallback( MoreExecutors.sameThreadExecutor(), @@ -60,6 +75,7 @@ public class DruidBroker public ServerView.CallbackAction segmentViewInitialized() { serviceAnnouncer.announce(self); + druidNodeAnnouncer.announce(discoveryDruidNode); return ServerView.CallbackAction.UNREGISTER; } }, @@ -87,6 +103,7 @@ public class DruidBroker return; } serviceAnnouncer.unannounce(self); + druidNodeAnnouncer.unannounce(discoveryDruidNode); started = false; } } diff --git a/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java b/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java index e1ad95f6b5e..b9a82fe5468 100644 --- a/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java +++ b/server/src/main/java/io/druid/server/coordinator/CostBalancerStrategy.java @@ -311,9 +311,12 @@ public class CostBalancerStrategy implements BalancerStrategy ) ); - // plus the costs of segments that will be loaded + // plus the costs of segments that will be loaded cost += computeJointSegmentsCost(proposalSegment, server.getPeon().getSegmentsToLoad()); + // minus the costs of segments that are marked to be dropped + cost -= computeJointSegmentsCost(proposalSegment, server.getPeon().getSegmentsMarkedToDrop()); + return cost; } diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 9e963062606..3e5d2c68fdd 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -38,18 +38,20 @@ import io.druid.client.DruidServer; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; import io.druid.client.ServerInventoryView; +import io.druid.client.coordinator.Coordinator; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; import io.druid.concurrent.Execs; import io.druid.curator.discovery.ServiceAnnouncer; +import io.druid.discovery.DruidLeaderSelector; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.CoordinatorIndexingServiceHelper; import io.druid.guice.annotations.Self; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.java.util.common.concurrent.ScheduledExecutors; -import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.guava.Comparators; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -72,15 +74,11 @@ import io.druid.timeline.DataSegment; import it.unimi.dsi.fastutil.objects.Object2LongMap; import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap; import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.framework.recipes.leader.LeaderLatch; -import org.apache.curator.framework.recipes.leader.LeaderLatchListener; -import org.apache.curator.framework.recipes.leader.Participant; import org.apache.curator.utils.ZKPaths; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Interval; -import java.io.IOException; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -89,15 +87,12 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.atomic.AtomicReference; /** */ @ManageLifecycle public class DruidCoordinator { - public static final String COORDINATOR_OWNER_NODE = "_COORDINATOR"; - public static Comparator SEGMENT_COMPARATOR = Ordering.from(Comparators.intervalsByEndThenStart()) .onResultOf( new Function() @@ -125,16 +120,14 @@ public class DruidCoordinator private final ScheduledExecutorService exec; private final LoadQueueTaskMaster taskMaster; private final Map loadManagementPeons; - private final AtomicReference leaderLatch; private final ServiceAnnouncer serviceAnnouncer; private final DruidNode self; private final Set indexingServiceHelpers; private volatile boolean started = false; - private volatile int leaderCounter = 0; - private volatile boolean leader = false; private volatile SegmentReplicantLookup segmentReplicantLookup = null; private final BalancerStrategyFactory factory; private final LookupCoordinatorManager lookupCoordinatorManager; + private final DruidLeaderSelector coordLeaderSelector; @Inject public DruidCoordinator( @@ -153,7 +146,8 @@ public class DruidCoordinator @Self DruidNode self, @CoordinatorIndexingServiceHelper Set indexingServiceHelpers, BalancerStrategyFactory factory, - LookupCoordinatorManager lookupCoordinatorManager + LookupCoordinatorManager lookupCoordinatorManager, + @Coordinator DruidLeaderSelector coordLeaderSelector ) { this( @@ -173,7 +167,8 @@ public class DruidCoordinator Maps.newConcurrentMap(), indexingServiceHelpers, factory, - lookupCoordinatorManager + lookupCoordinatorManager, + coordLeaderSelector ); } @@ -194,7 +189,8 @@ public class DruidCoordinator ConcurrentMap loadQueuePeonMap, Set indexingServiceHelpers, BalancerStrategyFactory factory, - LookupCoordinatorManager lookupCoordinatorManager + LookupCoordinatorManager lookupCoordinatorManager, + DruidLeaderSelector coordLeaderSelector ) { this.config = config; @@ -214,15 +210,15 @@ public class DruidCoordinator this.exec = scheduledExecutorFactory.create(1, "Coordinator-Exec--%d"); - this.leaderLatch = new AtomicReference<>(null); this.loadManagementPeons = loadQueuePeonMap; this.factory = factory; this.lookupCoordinatorManager = lookupCoordinatorManager; + this.coordLeaderSelector = coordLeaderSelector; } public boolean isLeader() { - return leader; + return coordLeaderSelector.isLeader(); } public Map getLoadManagementPeons() @@ -238,7 +234,7 @@ public class DruidCoordinator return retVal; } - final DateTime now = new DateTime(); + final DateTime now = DateTimes.nowUtc(); for (final DataSegment segment : getAvailableDataSegments()) { final List rules = metadataRuleManager.getRulesWithDefault(segment.getDataSource()); @@ -264,7 +260,6 @@ public class DruidCoordinator return retVal; } - public Object2LongMap getSegmentAvailability() { final Object2LongOpenHashMap retVal = new Object2LongOpenHashMap<>(); @@ -344,23 +339,7 @@ public class DruidCoordinator public String getCurrentLeader() { - try { - final LeaderLatch latch = leaderLatch.get(); - - if (latch == null) { - return null; - } - - Participant participant = latch.getLeader(); - if (participant.isLeader()) { - return participant.getId(); - } - - return null; - } - catch (Exception e) { - throw Throwables.propagate(e); - } + return coordLeaderSelector.getCurrentLeader(); } public void moveSegment( @@ -422,28 +401,39 @@ public class DruidCoordinator ), segmentName ); - loadPeon.loadSegment( - segmentToLoad, - new LoadPeonCallback() - { - @Override - public void execute() - { + final LoadPeonCallback loadPeonCallback = () -> { + dropPeon.unmarkSegmentToDrop(segmentToLoad); + if (callback != null) { + callback.execute(); + } + }; + + // mark segment to drop before it is actually loaded on server + // to be able to account this information in DruidBalancerStrategy immediately + dropPeon.markSegmentToDrop(segmentToLoad); + try { + loadPeon.loadSegment( + segmentToLoad, + () -> { try { if (serverInventoryView.isSegmentLoadedByServer(toServer.getName(), segment) && curator.checkExists().forPath(toLoadQueueSegPath) == null && !dropPeon.getSegmentsToDrop().contains(segment)) { - dropPeon.dropSegment(segment, callback); - } else if (callback != null) { - callback.execute(); + dropPeon.dropSegment(segment, loadPeonCallback); + } else { + loadPeonCallback.execute(); } } catch (Exception e) { throw Throwables.propagate(e); } } - } - ); + ); + } + catch (Exception e) { + dropPeon.unmarkSegmentToDrop(segmentToLoad); + Throwables.propagate(e); + } } catch (Exception e) { log.makeAlert(e, "Exception moving segment %s", segmentName).emit(); @@ -497,43 +487,25 @@ public class DruidCoordinator } started = true; - createNewLeaderLatch(); - try { - leaderLatch.get().start(); - } - catch (Exception e) { - throw Throwables.propagate(e); - } + coordLeaderSelector.registerListener( + new DruidLeaderSelector.Listener() + { + @Override + public void becomeLeader() + { + DruidCoordinator.this.becomeLeader(); + } + + @Override + public void stopBeingLeader() + { + DruidCoordinator.this.stopBeingLeader(); + } + } + ); } } - private LeaderLatch createNewLeaderLatch() - { - final LeaderLatch newLeaderLatch = new LeaderLatch( - curator, ZKPaths.makePath(zkPaths.getCoordinatorPath(), COORDINATOR_OWNER_NODE), self.getHostAndPortToUse() - ); - - newLeaderLatch.addListener( - new LeaderLatchListener() - { - @Override - public void isLeader() - { - DruidCoordinator.this.becomeLeader(); - } - - @Override - public void notLeader() - { - DruidCoordinator.this.stopBeingLeader(); - } - }, - Execs.singleThreaded("CoordinatorLeader-%s") - ); - - return leaderLatch.getAndSet(newLeaderLatch); - } - @LifecycleStop public void stop() { @@ -542,14 +514,7 @@ public class DruidCoordinator return; } - stopBeingLeader(); - - try { - leaderLatch.get().close(); - } - catch (IOException e) { - log.warn(e, "Unable to close leaderLatch, ignoring"); - } + coordLeaderSelector.unregisterListener(); started = false; @@ -566,103 +531,76 @@ public class DruidCoordinator log.info("I am the leader of the coordinators, all must bow!"); log.info("Starting coordination in [%s]", config.getCoordinatorStartDelay()); - try { - leaderCounter++; - leader = true; - metadataSegmentManager.start(); - metadataRuleManager.start(); - serviceAnnouncer.announce(self); - final int startingLeaderCounter = leaderCounter; - final List> coordinatorRunnables = Lists.newArrayList(); + metadataSegmentManager.start(); + metadataRuleManager.start(); + serviceAnnouncer.announce(self); + final int startingLeaderCounter = coordLeaderSelector.localTerm(); + + final List> coordinatorRunnables = Lists.newArrayList(); + coordinatorRunnables.add( + Pair.of( + new CoordinatorHistoricalManagerRunnable(startingLeaderCounter), + config.getCoordinatorPeriod() + ) + ); + if (indexingServiceClient != null) { coordinatorRunnables.add( Pair.of( - new CoordinatorHistoricalManagerRunnable(startingLeaderCounter), - config.getCoordinatorPeriod() + new CoordinatorIndexingServiceRunnable( + makeIndexingServiceHelpers(), + startingLeaderCounter + ), + config.getCoordinatorIndexingPeriod() ) ); - if (indexingServiceClient != null) { - coordinatorRunnables.add( - Pair.of( - new CoordinatorIndexingServiceRunnable( - makeIndexingServiceHelpers(), - startingLeaderCounter - ), - config.getCoordinatorIndexingPeriod() - ) - ); - } + } - for (final Pair coordinatorRunnable : coordinatorRunnables) { - ScheduledExecutors.scheduleWithFixedDelay( - exec, - config.getCoordinatorStartDelay(), - coordinatorRunnable.rhs, - new Callable() + for (final Pair coordinatorRunnable : coordinatorRunnables) { + ScheduledExecutors.scheduleWithFixedDelay( + exec, + config.getCoordinatorStartDelay(), + coordinatorRunnable.rhs, + new Callable() + { + private final CoordinatorRunnable theRunnable = coordinatorRunnable.lhs; + + @Override + public ScheduledExecutors.Signal call() { - private final CoordinatorRunnable theRunnable = coordinatorRunnable.lhs; - - @Override - public ScheduledExecutors.Signal call() - { - if (leader && startingLeaderCounter == leaderCounter) { - theRunnable.run(); - } - if (leader && startingLeaderCounter == leaderCounter) { // (We might no longer be leader) - return ScheduledExecutors.Signal.REPEAT; - } else { - return ScheduledExecutors.Signal.STOP; - } + if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { + theRunnable.run(); + } + if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { // (We might no longer be leader) + return ScheduledExecutors.Signal.REPEAT; + } else { + return ScheduledExecutors.Signal.STOP; } } - ); - } + } + ); + } - lookupCoordinatorManager.start(); - } - catch (Exception e) { - log.makeAlert(e, "Unable to become leader") - .emit(); - final LeaderLatch oldLatch = createNewLeaderLatch(); - CloseQuietly.close(oldLatch); - try { - leaderLatch.get().start(); - } - catch (Exception e1) { - // If an exception gets thrown out here, then the coordinator will zombie out 'cause it won't be looking for - // the latch anymore. I don't believe it's actually possible for an Exception to throw out here, but - // Curator likes to have "throws Exception" on methods so it might happen... - log.makeAlert(e1, "I am a zombie") - .emit(); - } - } + lookupCoordinatorManager.start(); } } private void stopBeingLeader() { synchronized (lock) { - try { - leaderCounter++; - log.info("I am no longer the leader..."); + log.info("I am no longer the leader..."); - for (String server : loadManagementPeons.keySet()) { - LoadQueuePeon peon = loadManagementPeons.remove(server); - peon.stop(); - } - loadManagementPeons.clear(); - - serviceAnnouncer.unannounce(self); - metadataRuleManager.stop(); - metadataSegmentManager.stop(); - lookupCoordinatorManager.stop(); - - leader = false; - } - catch (Exception e) { - log.makeAlert(e, "Unable to stopBeingLeader").emit(); + for (String server : loadManagementPeons.keySet()) { + LoadQueuePeon peon = loadManagementPeons.remove(server); + peon.stop(); } + loadManagementPeons.clear(); + + serviceAnnouncer.unannounce(self); + metadataRuleManager.stop(); + metadataSegmentManager.stop(); + lookupCoordinatorManager.stop(); } } @@ -694,9 +632,8 @@ public class DruidCoordinator ListeningExecutorService balancerExec = null; try { synchronized (lock) { - final LeaderLatch latch = leaderLatch.get(); - if (latch == null || !latch.hasLeadership()) { - log.info("LEGGO MY EGGO. [%s] is leader.", latch == null ? null : latch.getLeader().getId()); + if (!coordLeaderSelector.isLeader()) { + log.info("LEGGO MY EGGO. [%s] is leader.", coordLeaderSelector.getCurrentLeader()); stopBeingLeader(); return; } @@ -731,7 +668,7 @@ public class DruidCoordinator .build(); for (DruidCoordinatorHelper helper : helpers) { // Don't read state and run state in the same helper otherwise racy conditions may exist - if (leader && startingLeaderCounter == leaderCounter) { + if (coordLeaderSelector.isLeader() && startingLeaderCounter == coordLeaderSelector.localTerm()) { params = helper.run(params); } } @@ -828,7 +765,7 @@ public class DruidCoordinator .withDatabaseRuleManager(metadataRuleManager) .withLoadManagementPeons(loadManagementPeons) .withSegmentReplicantLookup(segmentReplicantLookup) - .withBalancerReferenceTimestamp(DateTime.now()) + .withBalancerReferenceTimestamp(DateTimes.nowUtc()) .build(); } }, diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java index 54afd9d7148..183cb781e8a 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinatorRuntimeParams.java @@ -23,6 +23,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidDataSource; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; @@ -223,7 +224,7 @@ public class DruidCoordinatorRuntimeParams this.emitter = null; this.stats = new CoordinatorStats(); this.coordinatorDynamicConfig = new CoordinatorDynamicConfig.Builder().build(); - this.balancerReferenceTimestamp = DateTime.now(); + this.balancerReferenceTimestamp = DateTimes.nowUtc(); } Builder( diff --git a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java index 846069fb334..aff7093cf84 100644 --- a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java +++ b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java @@ -43,6 +43,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentSkipListMap; +import java.util.concurrent.ConcurrentSkipListSet; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; @@ -82,6 +83,9 @@ public class LoadQueuePeon private final ConcurrentSkipListMap segmentsToDrop = new ConcurrentSkipListMap<>( DruidCoordinator.SEGMENT_COMPARATOR ); + private final ConcurrentSkipListSet segmentsMarkedToDrop = new ConcurrentSkipListSet<>( + DruidCoordinator.SEGMENT_COMPARATOR + ); private final Object lock = new Object(); @@ -117,6 +121,12 @@ public class LoadQueuePeon return segmentsToDrop.keySet(); } + @JsonProperty + public Set getSegmentsMarkedToDrop() + { + return segmentsMarkedToDrop; + } + public long getLoadQueueSize() { return queuedSize.get(); @@ -191,6 +201,16 @@ public class LoadQueuePeon segmentsToDrop.put(segment, new SegmentHolder(segment, DROP, Collections.singletonList(callback))); } + public void markSegmentToDrop(DataSegment dataSegment) + { + segmentsMarkedToDrop.add(dataSegment); + } + + public void unmarkSegmentToDrop(DataSegment dataSegment) + { + segmentsMarkedToDrop.remove(dataSegment); + } + private void processSegmentChangeRequest() { if (currentlyProcessing != null) { diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java index 679eabe7fe2..27360fcd749 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java @@ -102,7 +102,6 @@ public class DruidCoordinatorBalancer implements DruidCoordinatorHelper return; } - final List serverHolderList = Lists.newArrayList(servers); if (serverHolderList.size() <= 1) { diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java index c4a93feff6c..7924e45adbb 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorRuleRunner.java @@ -22,6 +22,7 @@ package io.druid.server.coordinator.helper; import com.google.common.collect.Lists; import com.metamx.common.guava.Comparators; import com.metamx.emitter.EmittingLogger; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; @@ -126,7 +127,7 @@ public class DruidCoordinatorRuleRunner implements DruidCoordinatorHelper .build(); // Run through all matched rules for available segments - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); MetadataRuleManager databaseRuleManager = paramsWithReplicationManager.getDatabaseRuleManager(); final List segmentsWithMissingRules = Lists.newArrayListWithCapacity(MAX_MISSING_RULES); diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java index ccdcf36b7f5..a2b05a60732 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKiller.java @@ -24,7 +24,8 @@ import com.google.common.base.Preconditions; import com.google.inject.Inject; import io.druid.client.indexing.IndexingServiceClient; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.JodaUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.coordinator.DruidCoordinatorConfig; @@ -121,11 +122,7 @@ public class DruidCoordinatorSegmentKiller implements DruidCoordinatorHelper { List unusedSegmentIntervals = segmentManager.getUnusedSegmentIntervals( dataSource, - new Interval( - 0, - System.currentTimeMillis() - - retainDuration - ), + new Interval(DateTimes.EPOCH, DateTimes.nowUtc().minus(retainDuration)), limit ); diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java index 275758f4b2f..708e73287a6 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java @@ -32,6 +32,7 @@ import com.google.inject.Inject; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.FunctionalIterable; @@ -99,7 +100,7 @@ public class DruidCoordinatorSegmentMerger implements DruidCoordinatorHelper // Get serviced segments from the timeline VersionedIntervalTimeline timeline = entry.getValue(); List> timelineObjects = - timeline.lookup(new Interval(new DateTime(0), new DateTime("3000-01-01"))); + timeline.lookup(new Interval(DateTimes.EPOCH, DateTimes.of("3000-01-01"))); // Accumulate timelineObjects greedily until we reach our limits, then backtrack to the maximum complete set SegmentsToMerge segmentsToMerge = new SegmentsToMerge(); diff --git a/server/src/main/java/io/druid/server/http/ClusterResource.java b/server/src/main/java/io/druid/server/http/ClusterResource.java index 4092ed34a38..1de5eb55531 100644 --- a/server/src/main/java/io/druid/server/http/ClusterResource.java +++ b/server/src/main/java/io/druid/server/http/ClusterResource.java @@ -25,6 +25,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.discovery.DiscoveryDruidNode; import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.LazySingleton; +import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.StateResourceFilter; import javax.ws.rs.GET; @@ -98,7 +99,7 @@ public class ClusterResource if (nodeType == null || !DruidNodeDiscoveryProvider.ALL_NODE_TYPES.contains(nodeType)) { return Response.serverError() .status(Response.Status.BAD_REQUEST) - .entity(String.format( + .entity(StringUtils.format( "Invalid nodeType [%s]. Valid node types are %s .", nodeType, DruidNodeDiscoveryProvider.ALL_NODE_TYPES diff --git a/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java b/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java index 2f6e9973f53..208dcf387be 100644 --- a/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java +++ b/server/src/main/java/io/druid/server/http/CoordinatorDynamicConfigsResource.java @@ -24,6 +24,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.Intervals; import io.druid.server.coordinator.CoordinatorDynamicConfig; import io.druid.server.http.security.ConfigResourceFilter; import org.joda.time.Interval; @@ -105,7 +106,7 @@ public class CoordinatorDynamicConfigsResource @QueryParam("count") final Integer count ) { - Interval theInterval = interval == null ? null : new Interval(interval); + Interval theInterval = interval == null ? null : Intervals.of(interval); if (theInterval == null && count != null) { try { return Response.ok( diff --git a/server/src/main/java/io/druid/server/http/DatasourcesResource.java b/server/src/main/java/io/druid/server/http/DatasourcesResource.java index df2d725d7e9..27f6b5a239d 100644 --- a/server/src/main/java/io/druid/server/http/DatasourcesResource.java +++ b/server/src/main/java/io/druid/server/http/DatasourcesResource.java @@ -33,6 +33,8 @@ import io.druid.client.DruidServer; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.client.SegmentLoadInfo; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.Comparators; @@ -208,7 +210,7 @@ public class DatasourcesResource if (kill != null && Boolean.valueOf(kill)) { try { - indexingServiceClient.killSegments(dataSourceName, new Interval(interval)); + indexingServiceClient.killSegments(dataSourceName, Intervals.of(interval)); } catch (IllegalArgumentException e) { return Response.status(Response.Status.BAD_REQUEST) @@ -253,9 +255,9 @@ public class DatasourcesResource if (indexingServiceClient == null) { return Response.ok(ImmutableMap.of("error", "no indexing service found")).build(); } - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); try { - indexingServiceClient.killSegments(dataSourceName, new Interval(theInterval)); + indexingServiceClient.killSegments(dataSourceName, theInterval); } catch (Exception e) { return Response.serverError() @@ -343,7 +345,7 @@ public class DatasourcesResource ) { final DruidDataSource dataSource = getDataSource(dataSourceName); - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); if (dataSource == null) { return Response.noContent().build(); @@ -589,8 +591,8 @@ public class DatasourcesResource Map> tierDistinctSegments = Maps.newHashMap(); long totalSegmentSize = 0; - long minTime = Long.MAX_VALUE; - long maxTime = Long.MIN_VALUE; + DateTime minTime = DateTimes.MAX; + DateTime maxTime = DateTimes.MIN; String tier; for (DruidServer druidServer : serverInventoryView.getInventory()) { DruidDataSource druidDataSource = druidServer.getDataSource(dataSourceName); @@ -616,12 +618,8 @@ public class DatasourcesResource totalSegmentSize += dataSegment.getSize(); totalDistinctSegments.add(dataSegment.getIdentifier()); - if (dataSegment.getInterval().getStartMillis() < minTime) { - minTime = dataSegment.getInterval().getStartMillis(); - } - if (dataSegment.getInterval().getEndMillis() > maxTime) { - maxTime = dataSegment.getInterval().getEndMillis(); - } + minTime = DateTimes.min(minTime, dataSegment.getInterval().getStart()); + maxTime = DateTimes.max(maxTime, dataSegment.getInterval().getEnd()); } } @@ -639,8 +637,8 @@ public class DatasourcesResource segments.put("count", totalDistinctSegments.size()); segments.put("size", totalSegmentSize); - segments.put("minTime", new DateTime(minTime)); - segments.put("maxTime", new DateTime(maxTime)); + segments.put("minTime", minTime); + segments.put("maxTime", maxTime); return retVal; } @@ -661,7 +659,7 @@ public class DatasourcesResource TimelineLookup timeline = serverInventoryView.getTimeline( new TableDataSource(dataSourceName) ); - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); if (timeline == null) { log.debug("No timeline found for datasource[%s]", dataSourceName); return Response.ok(Lists.newArrayList()).build(); diff --git a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java index c3552e10d91..81636dbe261 100644 --- a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java +++ b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java @@ -21,6 +21,7 @@ package io.druid.server.http; import com.google.common.base.Preconditions; import com.google.common.net.HostAndPort; +import io.druid.java.util.common.StringUtils; public class HostAndPortWithScheme { @@ -56,8 +57,9 @@ public class HostAndPortWithScheme private static String checkAndGetScheme(String scheme) { - Preconditions.checkState(scheme.toLowerCase().equals("http") || scheme.toLowerCase().equals("https")); - return scheme.toLowerCase(); + String schemeLowerCase = StringUtils.toLowerCase(scheme); + Preconditions.checkState(schemeLowerCase.equals("http") || schemeLowerCase.equals("https")); + return schemeLowerCase; } public String getScheme() @@ -88,7 +90,7 @@ public class HostAndPortWithScheme @Override public String toString() { - return String.format("%s:%s", scheme, hostAndPort.toString()); + return StringUtils.format("%s:%s", scheme, hostAndPort.toString()); } @Override diff --git a/server/src/main/java/io/druid/server/http/IntervalsResource.java b/server/src/main/java/io/druid/server/http/IntervalsResource.java index 630531252c2..66478a116e5 100644 --- a/server/src/main/java/io/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/io/druid/server/http/IntervalsResource.java @@ -21,9 +21,9 @@ package io.druid.server.http; import com.google.common.collect.Maps; import com.google.inject.Inject; - import io.druid.client.DruidDataSource; import io.druid.client.InventoryView; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.guava.Comparators; import io.druid.server.security.AuthConfig; @@ -99,7 +99,7 @@ public class IntervalsResource @Context final HttpServletRequest req ) { - final Interval theInterval = new Interval(interval.replace("_", "/")); + final Interval theInterval = Intervals.of(interval.replace("_", "/")); final Set datasources = authConfig.isEnabled() ? InventoryViewUtils.getSecuredDataSources( serverInventoryView, diff --git a/server/src/main/java/io/druid/server/http/RouterResource.java b/server/src/main/java/io/druid/server/http/RouterResource.java index 5e98e668ba9..9377f403836 100644 --- a/server/src/main/java/io/druid/server/http/RouterResource.java +++ b/server/src/main/java/io/druid/server/http/RouterResource.java @@ -20,7 +20,7 @@ package io.druid.server.http; import com.google.inject.Inject; -import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.client.selector.Server; import io.druid.server.router.TieredBrokerHostSelector; import javax.ws.rs.GET; @@ -50,12 +50,12 @@ public class RouterResource @Produces(MediaType.APPLICATION_JSON) public Map> getBrokers() { - Map brokerSelectorMap = tieredBrokerHostSelector.getAllBrokers(); + Map> brokerSelectorMap = tieredBrokerHostSelector.getAllBrokers(); Map> brokersMap = new HashMap<>(brokerSelectorMap.size()); - for (Map.Entry e : brokerSelectorMap.entrySet()) { - brokersMap.put(e.getKey(), e.getValue().getAll().stream().map(s -> s.getHost()).collect(Collectors.toList())); + for (Map.Entry> e : brokerSelectorMap.entrySet()) { + brokersMap.put(e.getKey(), e.getValue().stream().map(s -> s.getHost()).collect(Collectors.toList())); } return brokersMap; diff --git a/server/src/main/java/io/druid/server/http/RulesResource.java b/server/src/main/java/io/druid/server/http/RulesResource.java index e7eabccb577..d56333470a8 100644 --- a/server/src/main/java/io/druid/server/http/RulesResource.java +++ b/server/src/main/java/io/druid/server/http/RulesResource.java @@ -25,6 +25,7 @@ import com.sun.jersey.spi.container.ResourceFilters; import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.rules.Rule; import io.druid.server.http.security.RulesResourceFilter; @@ -164,7 +165,7 @@ public class RulesResource return auditManager.fetchAuditHistory("rules", count); } - Interval theInterval = interval == null ? null : new Interval(interval); + Interval theInterval = interval == null ? null : Intervals.of(interval); if (dataSourceName != null) { return auditManager.fetchAuditHistory(dataSourceName, "rules", theInterval); } diff --git a/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java b/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java index 3e8006a029d..879876ee6d5 100644 --- a/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java +++ b/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java @@ -80,6 +80,11 @@ public class ZkPathsConfig return (null == coordinatorPath) ? defaultPath("coordinator") : coordinatorPath; } + public String getOverlordPath() + { + return defaultPath("overlord"); + } + public String getLoadQueuePath() { return (null == loadQueuePath) ? defaultPath("loadQueue") : loadQueuePath; diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java b/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java index d40a9bfba54..5566e98405e 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java @@ -19,6 +19,8 @@ package io.druid.server.initialization.jetty; +import io.druid.guice.annotations.ExtensionPoint; + import javax.servlet.DispatcherType; import javax.servlet.Filter; import java.util.EnumSet; @@ -33,6 +35,7 @@ import java.util.Map; * Note that some of the druid nodes (router for example) use async servlets and your filter * implementation should be able to handle those requests properly. */ +@ExtensionPoint public interface ServletFilterHolder { diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java deleted file mode 100644 index 63243c14a7a..00000000000 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerDiscoverer.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.server.listener.announcer; - -import com.google.common.base.Predicate; -import com.google.common.base.Strings; -import com.google.common.base.Throwables; -import com.google.common.collect.Collections2; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.inject.Inject; -import io.druid.java.util.common.lifecycle.LifecycleStart; -import io.druid.java.util.common.lifecycle.LifecycleStop; -import io.druid.java.util.common.logger.Logger; -import io.druid.server.http.HostAndPortWithScheme; -import org.apache.curator.framework.CuratorFramework; -import org.apache.curator.utils.ZKPaths; -import org.apache.zookeeper.KeeperException; - -import javax.annotation.Nullable; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; - -public class ListenerDiscoverer -{ - private static final Logger LOG = new Logger(ListenerDiscoverer.class); - private volatile Map lastSeenMap = ImmutableMap.of(); - private final CuratorFramework cf; - private final ListeningAnnouncerConfig listeningAnnouncerConfig; - private final Object startStopSync = new Object(); - private volatile boolean started = false; - - @Inject - public ListenerDiscoverer( - CuratorFramework cf, - ListeningAnnouncerConfig listeningAnnouncerConfig - ) - { - this.cf = cf; - this.listeningAnnouncerConfig = listeningAnnouncerConfig; - } - - @LifecycleStart - public void start() - { - synchronized (startStopSync) { - if (started) { - LOG.debug("Already started"); - return; - } - started = true; - LOG.info("Started"); - } - } - - @LifecycleStop - public void stop() - { - synchronized (startStopSync) { - if (!started) { - LOG.debug("Already stopped"); - return; - } - LOG.info("Stopped"); - started = false; - } - } - - /** - * Get nodes at a particular listener. - * This method lazily adds service discovery - * - * @param listener_key The Listener's service key - * - * @return A collection of druid nodes as established by the service discovery - * - * @throws IOException if there was an error refreshing the zookeeper cache - */ - public Collection getNodes(final String listener_key) throws IOException - { - return getCurrentNodes(listener_key).keySet(); - } - - Map getCurrentNodes(final String listener_key) throws IOException - { - final HashMap retVal = new HashMap<>(); - final String zkPath = listeningAnnouncerConfig.getAnnouncementPath(listener_key); - final Collection children; - try { - children = cf.getChildren().forPath(zkPath); - } - catch (KeeperException.NoNodeException e) { - LOG.debug(e, "No path found at [%s]", zkPath); - return ImmutableMap.of(); - } - catch (Exception e) { - throw new IOException("Error getting children for " + zkPath, e); - } - for (String child : children) { - final String childPath = ZKPaths.makePath(zkPath, child); - try { - final byte[] data; - try { - data = cf.getData().decompressed().forPath(childPath); - } - catch (Exception e) { - throw new IOException("Error getting data for " + childPath, e); - } - if (data == null) { - LOG.debug("Lost data at path [%s]", childPath); - continue; - } - final HostAndPortWithScheme hostAndPortWithScheme = HostAndPortWithScheme.fromString(child); - final Long l = ByteBuffer.wrap(data).getLong(); - retVal.put(hostAndPortWithScheme, l); - } - catch (IllegalArgumentException iae) { - LOG.warn(iae, "Error parsing [%s]", childPath); - } - } - return ImmutableMap.copyOf(retVal); - } - - /** - * Get only nodes that are new since the last time getNewNodes was called (or all nodes if it has never been called) - * - * @param listener_key The listener key to look for - * - * @return A collection of nodes that are new - * - * @throws IOException If there was an error in refreshing the Zookeeper cache - */ - public synchronized Collection getNewNodes(final String listener_key) throws IOException - { - final Map priorSeenMap = lastSeenMap; - final Map currentMap = getCurrentNodes(listener_key); - final Collection retVal = Collections2.filter( - currentMap.keySet(), - new Predicate() - { - @Override - public boolean apply(HostAndPortWithScheme input) - { - final Long l = priorSeenMap.get(input); - return l == null || l < currentMap.get(input); - } - } - ); - lastSeenMap = currentMap; - return retVal; - } - - /** - * Discovers children of the listener key - * - * @param key_base The base of the listener key, or null or empty string to get all immediate children of the listener path - * - * @return A collection of the names of the children, or empty list on NoNodeException from Curator - * - * @throws IOException from Curator - * @throws RuntimeException for other exceptions from Curator. - */ - public Collection discoverChildren(@Nullable final String key_base) throws IOException - { - final String zkPath = Strings.isNullOrEmpty(key_base) - ? listeningAnnouncerConfig.getListenersPath() - : listeningAnnouncerConfig.getAnnouncementPath(key_base); - try { - return cf.getChildren().forPath(zkPath); - } - catch (KeeperException.NoNodeException | KeeperException.NoChildrenForEphemeralsException e) { - LOG.warn(e, "Path [%s] not discoverable", zkPath); - return ImmutableList.of(); - } - catch (Exception e) { - Throwables.propagateIfInstanceOf(e, IOException.class); - throw Throwables.propagate(e); - } - } -} diff --git a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java index f87c825d8c9..809e4e1827c 100644 --- a/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java +++ b/server/src/main/java/io/druid/server/listener/announcer/ListenerResourceAnnouncer.java @@ -27,7 +27,6 @@ import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; import io.druid.server.http.HostAndPortWithScheme; import org.apache.curator.utils.ZKPaths; -import org.joda.time.DateTime; import java.nio.ByteBuffer; @@ -38,7 +37,7 @@ public abstract class ListenerResourceAnnouncer { private static final byte[] ANNOUNCE_BYTES = ByteBuffer .allocate(Longs.BYTES) - .putLong(DateTime.now().getMillis()) + .putLong(System.currentTimeMillis()) .array(); private static final Logger LOG = new Logger(ListenerResourceAnnouncer.class); private final Object startStopSync = new Object(); diff --git a/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java b/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java index 0d31a6b4974..78360c6af1e 100644 --- a/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java +++ b/server/src/main/java/io/druid/server/listener/resource/AbstractListenerHandler.java @@ -27,6 +27,7 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import io.druid.common.utils.ServletResourceUtils; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.java.util.common.logger.Logger; import javax.annotation.Nullable; @@ -82,9 +83,7 @@ public abstract class AbstractListenerHandler implements ListenerHandle try { // This actually fails to properly convert due to type erasure. We'll try again in a second // This effectively just parses - final Map tempMap = mapper.readValue(inputStream, new TypeReference>() - { - }); + final Map tempMap = mapper.readValue(inputStream, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT); // Now do the ACTUAL conversion inObjMap = ImmutableMap.copyOf(Maps.transformValues( tempMap, diff --git a/server/src/main/java/io/druid/server/log/FileRequestLogger.java b/server/src/main/java/io/druid/server/log/FileRequestLogger.java index b0beed59445..9f29f33e236 100644 --- a/server/src/main/java/io/druid/server/log/FileRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/FileRequestLogger.java @@ -22,6 +22,7 @@ package io.druid.server.log; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Throwables; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.CloseQuietly; @@ -31,6 +32,7 @@ import io.druid.server.RequestLogLine; import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.MutableDateTime; +import org.joda.time.chrono.ISOChronology; import java.io.File; import java.io.FileNotFoundException; @@ -66,15 +68,15 @@ public class FileRequestLogger implements RequestLogger try { baseDir.mkdirs(); - MutableDateTime mutableDateTime = new DateTime().toMutableDateTime(); + MutableDateTime mutableDateTime = DateTimes.nowUtc().toMutableDateTime(ISOChronology.getInstanceUTC()); mutableDateTime.setMillisOfDay(0); synchronized (lock) { - currentDay = mutableDateTime.toDateTime(); + currentDay = mutableDateTime.toDateTime(ISOChronology.getInstanceUTC()); fileWriter = getFileWriter(); } long nextDay = currentDay.plusDays(1).getMillis(); - Duration initialDelay = new Duration(nextDay - new DateTime().getMillis()); + Duration initialDelay = new Duration(nextDay - System.currentTimeMillis()); ScheduledExecutors.scheduleWithFixedDelay( exec, diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java index d0e6d363b70..53770dfa701 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -25,7 +25,6 @@ import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.base.Throwables; import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; @@ -47,6 +46,7 @@ import io.druid.audit.AuditInfo; import io.druid.common.config.JacksonConfigManager; import io.druid.concurrent.Execs; import io.druid.concurrent.LifecycleLock; +import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.IAE; @@ -54,10 +54,8 @@ import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StreamUtils; import io.druid.java.util.common.StringUtils; -import io.druid.query.lookup.LookupModule; import io.druid.query.lookup.LookupsState; import io.druid.server.http.HostAndPortWithScheme; -import io.druid.server.listener.announcer.ListenerDiscoverer; import io.druid.server.listener.resource.ListenerResource; import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -109,7 +107,9 @@ public class LookupCoordinatorManager private static final EmittingLogger LOG = new EmittingLogger(LookupCoordinatorManager.class); - private final ListenerDiscoverer listenerDiscoverer; + private final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider; + private LookupNodeDiscovery lookupNodeDiscovery; + private final JacksonConfigManager configManager; private final LookupCoordinatorManagerConfig lookupCoordinatorManagerConfig; private final LookupsCommunicator lookupsCommunicator; @@ -134,32 +134,35 @@ public class LookupCoordinatorManager @Inject public LookupCoordinatorManager( final @Global HttpClient httpClient, - final ListenerDiscoverer listenerDiscoverer, + final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, final @Smile ObjectMapper smileMapper, final JacksonConfigManager configManager, final LookupCoordinatorManagerConfig lookupCoordinatorManagerConfig ) { this( - listenerDiscoverer, + druidNodeDiscoveryProvider, configManager, lookupCoordinatorManagerConfig, - new LookupsCommunicator(httpClient, lookupCoordinatorManagerConfig, smileMapper) + new LookupsCommunicator(httpClient, lookupCoordinatorManagerConfig, smileMapper), + null ); } @VisibleForTesting LookupCoordinatorManager( - final ListenerDiscoverer listenerDiscoverer, + final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, final JacksonConfigManager configManager, final LookupCoordinatorManagerConfig lookupCoordinatorManagerConfig, - final LookupsCommunicator lookupsCommunicator + final LookupsCommunicator lookupsCommunicator, + final LookupNodeDiscovery lookupNodeDiscovery ) { - this.listenerDiscoverer = listenerDiscoverer; + this.druidNodeDiscoveryProvider = druidNodeDiscoveryProvider; this.configManager = configManager; this.lookupCoordinatorManagerConfig = lookupCoordinatorManagerConfig; this.lookupsCommunicator = lookupsCommunicator; + this.lookupNodeDiscovery = lookupNodeDiscovery; } public boolean updateLookup( @@ -275,36 +278,26 @@ public class LookupCoordinatorManager } } - public Collection discoverTiers() + public Set discoverTiers() { - try { - Preconditions.checkState(lifecycleLock.awaitStarted(5, TimeUnit.SECONDS), "not started"); - return listenerDiscoverer.discoverChildren(LookupCoordinatorManager.LOOKUP_LISTEN_ANNOUNCE_KEY); - } - catch (IOException e) { - throw Throwables.propagate(e); - } + Preconditions.checkState(lifecycleLock.awaitStarted(5, TimeUnit.SECONDS), "not started"); + return lookupNodeDiscovery.getAllTiers(); } public Collection discoverNodesInTier(String tier) { - try { - Preconditions.checkState(lifecycleLock.awaitStarted(5, TimeUnit.SECONDS), "not started"); - return Collections2.transform( - listenerDiscoverer.getNodes(LookupModule.getTierListenerPath(tier)), - new Function() + Preconditions.checkState(lifecycleLock.awaitStarted(5, TimeUnit.SECONDS), "not started"); + return Collections2.transform( + lookupNodeDiscovery.getNodesInTier(tier), + new Function() + { + @Override + public HostAndPort apply(HostAndPortWithScheme input) { - @Override - public HostAndPort apply(HostAndPortWithScheme input) - { - return input.getHostAndPort(); - } + return input.getHostAndPort(); } - ); - } - catch (IOException e) { - throw new RuntimeException(e); - } + } + ); } public Map> getLastKnownLookupsStateOnNodes() @@ -348,6 +341,10 @@ public class LookupCoordinatorManager try { LOG.debug("Starting."); + if (lookupNodeDiscovery == null) { + lookupNodeDiscovery = new LookupNodeDiscovery(druidNodeDiscoveryProvider); + } + //first ensure that previous executorService from last cycle of start/stop has finished completely. //so that we don't have multiple live executorService instances lying around doing lookup management. if (executorService != null && @@ -522,7 +519,7 @@ public class LookupCoordinatorManager LOG.debug("Starting lookup mgmt for tier [%s].", tierEntry.getKey()); final Map tierLookups = tierEntry.getValue(); - for (final HostAndPortWithScheme node : listenerDiscoverer.getNodes(LookupModule.getTierListenerPath(tierEntry.getKey()))) { + for (final HostAndPortWithScheme node : lookupNodeDiscovery.getNodesInTier(tierEntry.getKey())) { LOG.debug( "Starting lookup mgmt for tier [%s] and host [%s:%s:%s].", diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupNodeDiscovery.java b/server/src/main/java/io/druid/server/lookup/cache/LookupNodeDiscovery.java new file mode 100644 index 00000000000..b4b56abca35 --- /dev/null +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupNodeDiscovery.java @@ -0,0 +1,88 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.lookup.cache; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.collect.Collections2; +import com.google.common.collect.ImmutableSet; +import io.druid.discovery.DiscoveryDruidNode; +import io.druid.discovery.DruidNodeDiscovery; +import io.druid.discovery.DruidNodeDiscoveryProvider; +import io.druid.discovery.LookupNodeService; +import io.druid.server.http.HostAndPortWithScheme; + +import javax.annotation.Nullable; +import java.util.Collection; +import java.util.Set; + +/** + * A Helper class that uses DruidNodeDiscovery to discover lookup nodes and tiers. + */ +public class LookupNodeDiscovery +{ + private final DruidNodeDiscovery druidNodeDiscovery; + + LookupNodeDiscovery(DruidNodeDiscoveryProvider druidNodeDiscoveryProvider) + { + this.druidNodeDiscovery = druidNodeDiscoveryProvider.getForService(LookupNodeService.DISCOVERY_SERVICE_KEY); + } + + public Collection getNodesInTier(String tier) + { + return Collections2.transform( + Collections2.filter( + druidNodeDiscovery.getAllNodes(), + new Predicate() + { + @Override + public boolean apply(@Nullable DiscoveryDruidNode node) + { + return tier.equals(((LookupNodeService) node.getServices() + .get(LookupNodeService.DISCOVERY_SERVICE_KEY)).getLookupTier()); + } + } + ), + new Function() + { + @Override + public HostAndPortWithScheme apply(@Nullable DiscoveryDruidNode input) + { + return HostAndPortWithScheme.fromString( + input.getDruidNode().getServiceScheme(), + input.getDruidNode().getHostAndPortToUse() + ); + } + } + ); + } + + public Set getAllTiers() + { + ImmutableSet.Builder builder = new ImmutableSet.Builder<>(); + + druidNodeDiscovery.getAllNodes().stream().forEach( + node -> builder.add(((LookupNodeService) node.getServices() + .get(LookupNodeService.DISCOVERY_SERVICE_KEY)).getLookupTier()) + ); + + return builder.build(); + } +} diff --git a/server/src/main/java/io/druid/server/router/QueryHostFinder.java b/server/src/main/java/io/druid/server/router/QueryHostFinder.java index f311aeb6897..244bd523583 100644 --- a/server/src/main/java/io/druid/server/router/QueryHostFinder.java +++ b/server/src/main/java/io/druid/server/router/QueryHostFinder.java @@ -19,18 +19,17 @@ package io.druid.server.router; -import com.google.common.base.Function; -import com.google.common.collect.FluentIterable; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; import io.druid.client.selector.Server; -import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.query.Query; import java.util.Collection; +import java.util.List; import java.util.concurrent.ConcurrentHashMap; +import java.util.stream.Collectors; /** */ @@ -52,40 +51,24 @@ public class QueryHostFinder public Server findServer(Query query) { - final Pair selected = hostSelector.select(query); + final Pair selected = hostSelector.select(query); return findServerInner(selected); } public Server findDefaultServer() { - final Pair selected = hostSelector.getDefaultLookup(); + final Pair selected = hostSelector.getDefaultLookup(); return findServerInner(selected); } - public Collection getAllHosts() + public Collection getAllServers() { - return FluentIterable - .from((Collection) hostSelector.getAllBrokers().values()) - .transformAndConcat( - new Function>() - { - @Override - public Iterable apply(ServerDiscoverySelector input) - { - return input.getAll(); - } - } - ).transform(new Function() - { - @Override - public String apply(Server input) - { - return input.getHost(); - } - }).toList(); + return ((Collection>) hostSelector.getAllBrokers().values()).stream() + .flatMap(Collection::stream) + .collect(Collectors.toList()); } - public String getHost(Query query) + public Server getServer(Query query) { Server server = findServer(query); @@ -97,13 +80,12 @@ public class QueryHostFinder throw new ISE("No server found for query[%s]", query); } - final String host = server.getHost(); - log.debug("Selected [%s]", host); + log.debug("Selected [%s]", server.getHost()); - return host; + return server; } - public String getDefaultHost() + public Server getDefaultServer() { Server server = findDefaultServer(); @@ -115,19 +97,18 @@ public class QueryHostFinder throw new ISE("No default server found!"); } - return server.getHost(); + return server; } - private Server findServerInner(final Pair selected) + private Server findServerInner(final Pair selected) { if (selected == null) { log.error("Danger, Will Robinson! Unable to find any brokers!"); } final String serviceName = selected == null ? hostSelector.getDefaultServiceName() : selected.lhs; - final ServerDiscoverySelector selector = selected == null ? null : selected.rhs; + Server server = selected == null ? null : selected.rhs; - Server server = selector == null ? null : selector.pick(); if (server == null) { log.error( "WTF?! No server found for serviceName[%s]. Using backup", diff --git a/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java b/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java index 72f0cd8c9e5..f82d8a3e4a0 100644 --- a/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java +++ b/server/src/main/java/io/druid/server/router/TieredBrokerHostSelector.java @@ -19,14 +19,18 @@ package io.druid.server.router; +import com.google.common.base.Function; import com.google.common.base.Optional; -import com.google.common.base.Throwables; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; +import com.google.common.collect.Maps; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; -import io.druid.client.selector.HostSelector; -import io.druid.curator.discovery.ServerDiscoveryFactory; -import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.client.selector.Server; +import io.druid.discovery.DiscoveryDruidNode; +import io.druid.discovery.DruidNodeDiscovery; +import io.druid.discovery.DruidNodeDiscoveryProvider; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -36,38 +40,75 @@ import io.druid.server.coordinator.rules.Rule; import org.joda.time.DateTime; import org.joda.time.Interval; -import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** */ -public class TieredBrokerHostSelector implements HostSelector +public class TieredBrokerHostSelector { private static EmittingLogger log = new EmittingLogger(TieredBrokerHostSelector.class); private final CoordinatorRuleManager ruleManager; private final TieredBrokerConfig tierConfig; - private final ServerDiscoveryFactory serverDiscoveryFactory; - private final ConcurrentHashMap selectorMap = new ConcurrentHashMap<>(); private final List strategies; + // brokerService -> broker-nodes-holder + private final ConcurrentHashMap servers = new ConcurrentHashMap<>(); + + private final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider; + private final Object lock = new Object(); private volatile boolean started = false; + private static final Function TO_SERVER = new Function() + { + @Override + public Server apply(final DiscoveryDruidNode instance) + { + return new Server() + { + @Override + public String getHost() + { + return instance.getDruidNode().getHostAndPortToUse(); + } + + @Override + public String getAddress() + { + return instance.getDruidNode().getHost(); + } + + @Override + public int getPort() + { + return instance.getDruidNode().getPortToUse(); + } + + @Override + public String getScheme() + { + return instance.getDruidNode().getServiceScheme(); + } + }; + } + }; + @Inject public TieredBrokerHostSelector( CoordinatorRuleManager ruleManager, TieredBrokerConfig tierConfig, - ServerDiscoveryFactory serverDiscoveryFactory, + DruidNodeDiscoveryProvider druidNodeDiscoveryProvider, List strategies ) { this.ruleManager = ruleManager; this.tierConfig = tierConfig; - this.serverDiscoveryFactory = serverDiscoveryFactory; + this.druidNodeDiscoveryProvider = druidNodeDiscoveryProvider; this.strategies = strategies; } @@ -79,17 +120,42 @@ public class TieredBrokerHostSelector implements HostSelector return; } - try { - for (Map.Entry entry : tierConfig.getTierToBrokerMap().entrySet()) { - ServerDiscoverySelector selector = serverDiscoveryFactory.createSelector(entry.getValue()); - selector.start(); - selectorMap.put(entry.getValue(), selector); - } - } - catch (Exception e) { - throw Throwables.propagate(e); + for (Map.Entry entry : tierConfig.getTierToBrokerMap().entrySet()) { + servers.put(entry.getValue(), new NodesHolder()); } + DruidNodeDiscovery druidNodeDiscovery = druidNodeDiscoveryProvider.getForNodeType(DruidNodeDiscoveryProvider.NODE_TYPE_BROKER); + druidNodeDiscovery.registerListener( + new DruidNodeDiscovery.Listener() + { + @Override + public void nodesAdded(List nodes) + { + nodes.forEach( + (node) -> { + NodesHolder nodesHolder = servers.get(node.getDruidNode().getServiceName()); + if (nodesHolder != null) { + nodesHolder.add(node.getDruidNode().getHostAndPortToUse(), TO_SERVER.apply(node)); + } + } + ); + } + + @Override + public void nodesRemoved(List nodes) + { + nodes.forEach( + (node) -> { + NodesHolder nodesHolder = servers.get(node.getDruidNode().getServiceName()); + if (nodesHolder != null) { + nodesHolder.remove(node.getDruidNode().getHostAndPortToUse()); + } + } + ); + } + } + ); + started = true; } } @@ -103,27 +169,16 @@ public class TieredBrokerHostSelector implements HostSelector return; } - try { - for (ServerDiscoverySelector selector : selectorMap.values()) { - selector.stop(); - } - } - catch (Exception e) { - throw Throwables.propagate(e); - } - started = false; } } - @Override public String getDefaultServiceName() { return tierConfig.getDefaultBrokerServiceName(); } - @Override - public Pair select(final Query query) + public Pair select(final Query query) { synchronized (lock) { if (!ruleManager.isStarted() || !started) { @@ -146,7 +201,7 @@ public class TieredBrokerHostSelector implements HostSelector List rules = ruleManager.getRulesWithDefault(Iterables.getFirst(query.getDataSource().getNames(), null)); // find the rule that can apply to the entire set of intervals - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); int lastRulePosition = -1; LoadRule baseRule = null; @@ -185,29 +240,83 @@ public class TieredBrokerHostSelector implements HostSelector brokerServiceName = tierConfig.getDefaultBrokerServiceName(); } - ServerDiscoverySelector retVal = selectorMap.get(brokerServiceName); + NodesHolder nodesHolder = servers.get(brokerServiceName); - if (retVal == null) { + if (nodesHolder == null) { log.error( - "WTF?! No selector found for brokerServiceName[%s]. Using default selector for[%s]", + "WTF?! No nodesHolder found for brokerServiceName[%s]. Using default selector for[%s]", brokerServiceName, tierConfig.getDefaultBrokerServiceName() ); - retVal = selectorMap.get(tierConfig.getDefaultBrokerServiceName()); + nodesHolder = servers.get(tierConfig.getDefaultBrokerServiceName()); } - return new Pair<>(brokerServiceName, retVal); + return new Pair<>(brokerServiceName, nodesHolder.pick()); } - public Pair getDefaultLookup() + public Pair getDefaultLookup() { final String brokerServiceName = tierConfig.getDefaultBrokerServiceName(); - final ServerDiscoverySelector retVal = selectorMap.get(brokerServiceName); - return new Pair<>(brokerServiceName, retVal); + return new Pair<>(brokerServiceName, servers.get(brokerServiceName).pick()); } - public Map getAllBrokers() + public Map> getAllBrokers() { - return Collections.unmodifiableMap(selectorMap); + return Maps.transformValues( + servers, + new Function>() + { + @Override + public List apply(NodesHolder input) + { + return input.getAll(); + } + } + ); + } + + private static class NodesHolder + { + private int roundRobinIndex = 0; + + private Map nodesMap = new HashMap<>(); + private ImmutableList nodes = ImmutableList.of(); + + void add(String id, Server node) + { + synchronized (this) { + nodesMap.put(id, node); + nodes = ImmutableList.copyOf(nodesMap.values()); + } + } + + void remove(String id) + { + synchronized (this) { + if (nodesMap.remove(id) != null) { + nodes = ImmutableList.copyOf(nodesMap.values()); + } + } + } + + List getAll() + { + return nodes; + } + + Server pick() + { + ImmutableList currNodes = nodes; + + if (currNodes.size() == 0) { + return null; + } + + if (roundRobinIndex >= currNodes.size()) { + roundRobinIndex %= currNodes.size(); + } + + return currNodes.get(roundRobinIndex++); + } } } diff --git a/server/src/test/java/io/druid/client/BrokerServerViewTest.java b/server/src/test/java/io/druid/client/BrokerServerViewTest.java index b16c6684f45..2dae5d4310a 100644 --- a/server/src/test/java/io/druid/client/BrokerServerViewTest.java +++ b/server/src/test/java/io/druid/client/BrokerServerViewTest.java @@ -34,6 +34,7 @@ import io.druid.client.selector.RandomServerSelectorStrategy; import io.druid.client.selector.ServerSelector; import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.QueryToolChestWarehouse; import io.druid.query.QueryWatcher; @@ -114,14 +115,14 @@ public class BrokerServerViewTest extends CuratorTestBase TimelineLookup timeline = brokerServerView.getTimeline(new TableDataSource("test_broker_server_view")); List serverLookupRes = (List) timeline.lookup( - new Interval( + Intervals.of( "2014-10-20T00:00:00Z/P1D" ) ); Assert.assertEquals(1, serverLookupRes.size()); TimelineObjectHolder actualTimelineObjectHolder = serverLookupRes.get(0); - Assert.assertEquals(new Interval("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); + Assert.assertEquals(Intervals.of("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); Assert.assertEquals("v1", actualTimelineObjectHolder.getVersion()); PartitionHolder actualPartitionHolder = actualTimelineObjectHolder.getObject(); @@ -139,9 +140,9 @@ public class BrokerServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2014-10-20T00:00:00Z/P1D"))).size() + ((List) timeline.lookup(Intervals.of("2014-10-20T00:00:00Z/P1D"))).size() ); - Assert.assertNull(timeline.findEntry(new Interval("2014-10-20T00:00:00Z/P1D"), "v1")); + Assert.assertNull(timeline.findEntry(Intervals.of("2014-10-20T00:00:00Z/P1D"), "v1")); } @Test @@ -210,7 +211,7 @@ public class BrokerServerViewTest extends CuratorTestBase createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), (List) timeline.lookup( - new Interval( + Intervals.of( "2011-04-01/2011-04-09" ) ) @@ -232,7 +233,7 @@ public class BrokerServerViewTest extends CuratorTestBase createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), (List) timeline.lookup( - new Interval( + Intervals.of( "2011-04-01/2011-04-09" ) ) @@ -249,7 +250,7 @@ public class BrokerServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2011-04-01/2011-04-09"))).size() + ((List) timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))).size() ); } @@ -260,7 +261,7 @@ public class BrokerServerViewTest extends CuratorTestBase DataSegment segment ) { - return Pair.of(new Interval(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); + return Pair.of(Intervals.of(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); } private void assertValues( @@ -349,7 +350,7 @@ public class BrokerServerViewTest extends CuratorTestBase { return DataSegment.builder() .dataSource("test_broker_server_view") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java index e6814f166ed..936e538a811 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientFunctionalityTest.java @@ -29,6 +29,7 @@ import io.druid.client.cache.MapCache; import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.client.selector.TierSelectorStrategy; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.guava.Sequence; import io.druid.query.DataSource; import io.druid.query.Druids; @@ -81,9 +82,9 @@ public class CachingClusteredClientFunctionalityTest @Test public void testUncoveredInterval() throws Exception { - addToTimeline(new Interval("2015-01-02/2015-01-03"), "1"); - addToTimeline(new Interval("2015-01-04/2015-01-05"), "1"); - addToTimeline(new Interval("2015-02-04/2015-02-05"), "1"); + addToTimeline(Intervals.of("2015-01-02/2015-01-03"), "1"); + addToTimeline(Intervals.of("2015-01-04/2015-01-05"), "1"); + addToTimeline(Intervals.of("2015-02-04/2015-02-05"), "1"); final Druids.TimeseriesQueryBuilder builder = Druids.newTimeseriesQueryBuilder() .dataSource("test") @@ -145,7 +146,7 @@ public class CachingClusteredClientFunctionalityTest { List expectedList = Lists.newArrayListWithExpectedSize(intervals.length); for (String interval : intervals) { - expectedList.add(new Interval(interval)); + expectedList.add(Intervals.of(interval)); } Assert.assertEquals((Object) expectedList, context.get("uncoveredIntervals")); Assert.assertEquals(uncoveredIntervalsOverflowed, context.get("uncoveredIntervalsOverflowed")); diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index b764421048d..a68e5c573fe 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -55,7 +55,9 @@ import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.hll.HyperLogLogCollector; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; @@ -465,19 +467,19 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05"), 85, 102, - new DateTime("2011-01-06"), 412, 521, - new DateTime("2011-01-07"), 122, 21894, - new DateTime("2011-01-08"), 5, 20, - new DateTime("2011-01-09"), 18, 521 + DateTimes.of("2011-01-05"), 85, 102, + DateTimes.of("2011-01-06"), 412, 521, + DateTimes.of("2011-01-07"), 122, 21894, + DateTimes.of("2011-01-08"), 5, 20, + DateTimes.of("2011-01-09"), 18, 521 ), - new Interval("2011-01-10/2011-01-13"), + Intervals.of("2011-01-10/2011-01-13"), makeTimeResults( - new DateTime("2011-01-10"), 85, 102, - new DateTime("2011-01-11"), 412, 521, - new DateTime("2011-01-12"), 122, 21894 + DateTimes.of("2011-01-10"), 85, 102, + DateTimes.of("2011-01-11"), 412, 521, + DateTimes.of("2011-01-12"), 122, 21894 ) ); } @@ -504,26 +506,26 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000), - new Interval("2011-01-02/2011-01-03"), makeTimeResults(new DateTime("2011-01-02"), 30, 6000), - new Interval("2011-01-04/2011-01-05"), makeTimeResults(new DateTime("2011-01-04"), 23, 85312), + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), + Intervals.of("2011-01-02/2011-01-03"), makeTimeResults(DateTimes.of("2011-01-02"), 30, 6000), + Intervals.of("2011-01-04/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 23, 85312), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05"), 85, 102, - new DateTime("2011-01-06"), 412, 521, - new DateTime("2011-01-07"), 122, 21894, - new DateTime("2011-01-08"), 5, 20, - new DateTime("2011-01-09"), 18, 521 + DateTimes.of("2011-01-05"), 85, 102, + DateTimes.of("2011-01-06"), 412, 521, + DateTimes.of("2011-01-07"), 122, 21894, + DateTimes.of("2011-01-08"), 5, 20, + DateTimes.of("2011-01-09"), 18, 521 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05T01"), 80, 100, - new DateTime("2011-01-06T01"), 420, 520, - new DateTime("2011-01-07T01"), 12, 2194, - new DateTime("2011-01-08T01"), 59, 201, - new DateTime("2011-01-09T01"), 181, 52 + DateTimes.of("2011-01-05T01"), 80, 100, + DateTimes.of("2011-01-06T01"), 420, 520, + DateTimes.of("2011-01-07T01"), 12, 2194, + DateTimes.of("2011-01-08T01"), 59, 201, + DateTimes.of("2011-01-09T01"), 181, 52 ) ); @@ -535,19 +537,19 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTimeResults( - new DateTime("2011-01-01"), 50, 5000, - new DateTime("2011-01-02"), 30, 6000, - new DateTime("2011-01-04"), 23, 85312, - new DateTime("2011-01-05"), 85, 102, - new DateTime("2011-01-05T01"), 80, 100, - new DateTime("2011-01-06"), 412, 521, - new DateTime("2011-01-06T01"), 420, 520, - new DateTime("2011-01-07"), 122, 21894, - new DateTime("2011-01-07T01"), 12, 2194, - new DateTime("2011-01-08"), 5, 20, - new DateTime("2011-01-08T01"), 59, 201, - new DateTime("2011-01-09"), 18, 521, - new DateTime("2011-01-09T01"), 181, 52 + DateTimes.of("2011-01-01"), 50, 5000, + DateTimes.of("2011-01-02"), 30, 6000, + DateTimes.of("2011-01-04"), 23, 85312, + DateTimes.of("2011-01-05"), 85, 102, + DateTimes.of("2011-01-05T01"), 80, 100, + DateTimes.of("2011-01-06"), 412, 521, + DateTimes.of("2011-01-06T01"), 420, 520, + DateTimes.of("2011-01-07"), 122, 21894, + DateTimes.of("2011-01-07T01"), 12, 2194, + DateTimes.of("2011-01-08"), 5, 20, + DateTimes.of("2011-01-08T01"), 59, 201, + DateTimes.of("2011-01-09"), 18, 521, + DateTimes.of("2011-01-09T01"), 181, 52 ), runner.run(QueryPlus.wrap(query), context) ); @@ -559,7 +561,7 @@ public class CachingClusteredClientTest public void testCachingOverBulkLimitEnforcesLimit() throws Exception { final int limit = 10; - final Interval interval = new Interval("2011-01-01/2011-01-02"); + final Interval interval = Intervals.of("2011-01-01/2011-01-02"); final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(DATA_SOURCE) .intervals(new MultipleIntervalSegmentSpec(ImmutableList.of(interval))) @@ -631,21 +633,21 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05T02"), 80, 100, - new DateTime("2011-01-06T02"), 420, 520, - new DateTime("2011-01-07T02"), 12, 2194, - new DateTime("2011-01-08T02"), 59, 201, - new DateTime("2011-01-09T02"), 181, 52 + DateTimes.of("2011-01-05T02"), 80, 100, + DateTimes.of("2011-01-06T02"), 420, 520, + DateTimes.of("2011-01-07T02"), 12, 2194, + DateTimes.of("2011-01-08T02"), 59, 201, + DateTimes.of("2011-01-09T02"), 181, 52 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTimeResults( - new DateTime("2011-01-05T00"), 85, 102, - new DateTime("2011-01-06T00"), 412, 521, - new DateTime("2011-01-07T00"), 122, 21894, - new DateTime("2011-01-08T00"), 5, 20, - new DateTime("2011-01-09T00"), 18, 521 + DateTimes.of("2011-01-05T00"), 85, 102, + DateTimes.of("2011-01-06T00"), 412, 521, + DateTimes.of("2011-01-07T00"), 122, 21894, + DateTimes.of("2011-01-08T00"), 5, 20, + DateTimes.of("2011-01-09T00"), 18, 521 ) ); @@ -656,16 +658,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTimeResults( - new DateTime("2011-01-05T00"), 85, 102, - new DateTime("2011-01-05T02"), 80, 100, - new DateTime("2011-01-06T00"), 412, 521, - new DateTime("2011-01-06T02"), 420, 520, - new DateTime("2011-01-07T00"), 122, 21894, - new DateTime("2011-01-07T02"), 12, 2194, - new DateTime("2011-01-08T00"), 5, 20, - new DateTime("2011-01-08T02"), 59, 201, - new DateTime("2011-01-09T00"), 18, 521, - new DateTime("2011-01-09T02"), 181, 52 + DateTimes.of("2011-01-05T00"), 85, 102, + DateTimes.of("2011-01-05T02"), 80, 100, + DateTimes.of("2011-01-06T00"), 412, 521, + DateTimes.of("2011-01-06T02"), 420, 520, + DateTimes.of("2011-01-07T00"), 122, 21894, + DateTimes.of("2011-01-07T02"), 12, 2194, + DateTimes.of("2011-01-08T00"), 5, 20, + DateTimes.of("2011-01-08T02"), 59, 201, + DateTimes.of("2011-01-09T00"), 18, 521, + DateTimes.of("2011-01-09T02"), 181, 52 ), runner.run(QueryPlus.wrap(query), Maps.newHashMap()) ); @@ -693,7 +695,7 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-11-04/2011-11-08"), + Intervals.of("2011-11-04/2011-11-08"), makeTimeResults( new DateTime("2011-11-04", TIMEZONE), 50, 5000, new DateTime("2011-11-05", TIMEZONE), 30, 6000, @@ -744,7 +746,7 @@ public class CachingClusteredClientTest "populateCache", "true" ) ).build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000) ); Assert.assertEquals(1, cache.getStats().getNumEntries()); @@ -763,7 +765,7 @@ public class CachingClusteredClientTest "populateCache", "false" ) ).build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000) ); Assert.assertEquals(0, cache.getStats().getNumEntries()); @@ -780,7 +782,7 @@ public class CachingClusteredClientTest "populateCache", "false" ) ).build(), - new Interval("2011-01-01/2011-01-02"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000) + Intervals.of("2011-01-01/2011-01-02"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000) ); Assert.assertEquals(0, cache.getStats().getNumEntries()); @@ -815,28 +817,28 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeTopNResultsWithoutRename(new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998), + Intervals.of("2011-01-01/2011-01-02"), + makeTopNResultsWithoutRename(DateTimes.of("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998), - new Interval("2011-01-02/2011-01-03"), - makeTopNResultsWithoutRename(new DateTime("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995), + Intervals.of("2011-01-02/2011-01-03"), + makeTopNResultsWithoutRename(DateTimes.of("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); HashMap context = new HashMap(); @@ -848,18 +850,18 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTopNResults( - new DateTime("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, - new DateTime("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995, - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-01"), "a", 50, 5000, "b", 50, 4999, "c", 50, 4998, + DateTimes.of("2011-01-02"), "a", 50, 4997, "b", 50, 4996, "c", 50, 4995, + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), runner.run(QueryPlus.wrap(query), context) ); @@ -891,7 +893,7 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-11-04/2011-11-08"), + Intervals.of("2011-11-04/2011-11-08"), makeTopNResultsWithoutRename( new DateTime("2011-11-04", TIMEZONE), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, new DateTime("2011-11-05", TIMEZONE), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, @@ -925,30 +927,30 @@ public class CachingClusteredClientTest ImmutableList.of( Sequences.simple( makeTopNResultsWithoutRename( - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ), Sequences.simple( makeTopNResultsWithoutRename( - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ) ); TestHelper.assertExpectedResults( makeTopNResultsWithoutRename( - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), mergeSequences( new TopNQueryBuilder() @@ -995,28 +997,28 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); @@ -1029,16 +1031,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeRenamedTopNResults( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), runner.run(QueryPlus.wrap(query), context) ); @@ -1068,28 +1070,28 @@ public class CachingClusteredClientTest testQueryCaching( runner, builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeTopNResultsWithoutRename(), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeTopNResultsWithoutRename(), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeTopNResultsWithoutRename( - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ) ); @@ -1102,16 +1104,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedResults( makeTopNResultsWithoutRename( - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, - new DateTime("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-05T01"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-06T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07T01"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-08T01"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "c1", 50, 4985, "b", 50, 4984, "c", 50, 4983, + DateTimes.of("2011-01-09T01"), "c2", 50, 4985, "b", 50, 4984, "c", 50, 4983 ), runner.run(QueryPlus.wrap(query), context) ); @@ -1133,30 +1135,30 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), + Intervals.of("2011-01-01/2011-01-02"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), - new Interval("2011-01-02/2011-01-03"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), + Intervals.of("2011-01-02/2011-01-03"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ) ); @@ -1170,18 +1172,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSearchResults( TOP_DIM, - new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, - new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1203,30 +1205,30 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), + Intervals.of("2011-01-01/2011-01-02"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), - new Interval("2011-01-02/2011-01-03"), - makeSearchResults(TOP_DIM, new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), + Intervals.of("2011-01-02/2011-01-03"), + makeSearchResults(TOP_DIM, DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSearchResults( TOP_DIM, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ) ); @@ -1240,18 +1242,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSearchResults( TOP_DIM, - new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, - new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1262,18 +1264,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSearchResults( "new_dim", - new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, - new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, - new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, - new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, - new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, - new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, - new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, - new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 + DateTimes.of("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + DateTimes.of("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + DateTimes.of("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + DateTimes.of("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + DateTimes.of("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + DateTimes.of("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + DateTimes.of("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + DateTimes.of("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run(QueryPlus.wrap(query), context) ); @@ -1298,26 +1300,26 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), + Intervals.of("2011-01-01/2011-01-02"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), - new Interval("2011-01-02/2011-01-03"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), + Intervals.of("2011-01-02/2011-01-03"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), - new Interval("2011-01-05/2011-01-10"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) + Intervals.of("2011-01-05/2011-01-10"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) ), - new Interval("2011-01-05/2011-01-10"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + Intervals.of("2011-01-05/2011-01-10"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ) ); @@ -1331,18 +1333,18 @@ public class CachingClusteredClientTest ); HashMap context = new HashMap(); TestHelper.assertExpectedResults( - makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), - new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), - new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), + DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1367,30 +1369,30 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), + Intervals.of("2011-01-01/2011-01-02"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1)), - new Interval("2011-01-02/2011-01-03"), - makeSelectResults(dimensions, metrics, new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), + Intervals.of("2011-01-02/2011-01-03"), + makeSelectResults(dimensions, metrics, DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5)), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSelectResults( dimensions, metrics, - new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeSelectResults( dimensions, metrics, - new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ) ); @@ -1406,18 +1408,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSelectResults( dimensions, metrics, - new DateTime("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), - new DateTime("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), - new DateTime("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), - new DateTime("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) + DateTimes.of("2011-01-01"), ImmutableMap.of("a", "b", "rows", 1), + DateTimes.of("2011-01-02"), ImmutableMap.of("a", "c", "rows", 5), + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "h", "rows", 9), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "h", "rows", 9) ), runner.run(QueryPlus.wrap(builder.intervals("2011-01-01/2011-01-10").build()), context) ); @@ -1429,18 +1431,18 @@ public class CachingClusteredClientTest TestHelper.assertExpectedResults( makeSelectResults( dimensions, metrics, - new DateTime("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1), - new DateTime("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5), - new DateTime("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5), - new DateTime("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5), - new DateTime("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6), - new DateTime("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6), - new DateTime("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7), - new DateTime("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7), - new DateTime("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8), - new DateTime("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8), - new DateTime("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9), - new DateTime("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9) + DateTimes.of("2011-01-01"), ImmutableMap.of("a2", "b", "rows", 1), + DateTimes.of("2011-01-02"), ImmutableMap.of("a2", "c", "rows", 5), + DateTimes.of("2011-01-05"), ImmutableMap.of("a2", "d", "rows", 5), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a2", "d", "rows", 5), + DateTimes.of("2011-01-06"), ImmutableMap.of("a2", "e", "rows", 6), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a2", "e", "rows", 6), + DateTimes.of("2011-01-07"), ImmutableMap.of("a2", "f", "rows", 7), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a2", "f", "rows", 7), + DateTimes.of("2011-01-08"), ImmutableMap.of("a2", "g", "rows", 8), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a2", "g", "rows", 8), + DateTimes.of("2011-01-09"), ImmutableMap.of("a2", "h", "rows", 9), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a2", "h", "rows", 9) ), runner.run(QueryPlus.wrap(query), context) ); @@ -1473,43 +1475,43 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeGroupByResults( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), ImmutableMap.of("a", "a", "rows", 1, "imps", 1, "impers", 1, "uniques", collector) ), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeGroupByResults( - new DateTime("2011-01-02"), + DateTimes.of("2011-01-02"), ImmutableMap.of("a", "b", "rows", 2, "imps", 2, "impers", 2, "uniques", collector) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05"), + DateTimes.of("2011-01-05"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-06"), + DateTimes.of("2011-01-06"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-07"), + DateTimes.of("2011-01-07"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-08"), + DateTimes.of("2011-01-08"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-09"), + DateTimes.of("2011-01-09"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05T01"), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-06T01"), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-07T01"), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-08T01"), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-09T01"), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector) ) ); @@ -1521,25 +1523,25 @@ public class CachingClusteredClientTest HashMap context = new HashMap(); TestHelper.assertExpectedObjects( makeGroupByResults( - new DateTime("2011-01-05T"), + DateTimes.of("2011-01-05T"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-05T01"), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("a", "c", "rows", 3, "imps", 3, "impers", 3, "uniques", collector), - new DateTime("2011-01-06T"), + DateTimes.of("2011-01-06T"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-06T01"), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("a", "d", "rows", 4, "imps", 4, "impers", 4, "uniques", collector), - new DateTime("2011-01-07T"), + DateTimes.of("2011-01-07T"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-07T01"), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("a", "e", "rows", 5, "imps", 5, "impers", 5, "uniques", collector), - new DateTime("2011-01-08T"), + DateTimes.of("2011-01-08T"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-08T01"), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("a", "f", "rows", 6, "imps", 6, "impers", 6, "uniques", collector), - new DateTime("2011-01-09T"), + DateTimes.of("2011-01-09T"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector), - new DateTime("2011-01-09T01"), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("a", "g", "rows", 7, "imps", 7, "impers", 7, "uniques", collector) ), runner.run(QueryPlus.wrap(builder.setInterval("2011-01-05/2011-01-10").build()), context), @@ -1557,17 +1559,17 @@ public class CachingClusteredClientTest .intervals(CachingClusteredClientTest.SEG_SPEC) .context(CachingClusteredClientTest.CONTEXT) .build(), - new Interval("2011-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("2011-01-01"), new DateTime("2011-01-01"), new DateTime("2011-01-02")), + Intervals.of("2011-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("2011-01-01"), DateTimes.of("2011-01-01"), DateTimes.of("2011-01-02")), - new Interval("2011-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("2011-01-02"), new DateTime("2011-01-02"), new DateTime("2011-01-03")), + Intervals.of("2011-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("2011-01-02"), DateTimes.of("2011-01-02"), DateTimes.of("2011-01-03")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05"), new DateTime("2011-01-05"), new DateTime("2011-01-10")), + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05"), DateTimes.of("2011-01-05"), DateTimes.of("2011-01-10")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05T01"), new DateTime("2011-01-05T01"), new DateTime("2011-01-10")) + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05T01"), DateTimes.of("2011-01-05T01"), DateTimes.of("2011-01-10")) ); testQueryCaching( @@ -1578,17 +1580,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MAX_TIME) .build(), - new Interval("2011-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("2011-01-01"), null, new DateTime("2011-01-02")), + Intervals.of("2011-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("2011-01-01"), null, DateTimes.of("2011-01-02")), - new Interval("2011-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("2011-01-02"), null, new DateTime("2011-01-03")), + Intervals.of("2011-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("2011-01-02"), null, DateTimes.of("2011-01-03")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05"), null, new DateTime("2011-01-10")), + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05"), null, DateTimes.of("2011-01-10")), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05T01"), null, new DateTime("2011-01-10")) + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05T01"), null, DateTimes.of("2011-01-10")) ); testQueryCaching( @@ -1599,17 +1601,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MIN_TIME) .build(), - new Interval("2011-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("2011-01-01"), new DateTime("2011-01-01"), null), + Intervals.of("2011-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("2011-01-01"), DateTimes.of("2011-01-01"), null), - new Interval("2011-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("2011-01-02"), new DateTime("2011-01-02"), null), + Intervals.of("2011-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("2011-01-02"), DateTimes.of("2011-01-02"), null), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05"), new DateTime("2011-01-05"), null), + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05"), DateTimes.of("2011-01-05"), null), - new Interval("2011-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("2011-01-05T01"), new DateTime("2011-01-05T01"), null) + Intervals.of("2011-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("2011-01-05T01"), DateTimes.of("2011-01-05T01"), null) ); } @@ -1664,12 +1666,12 @@ public class CachingClusteredClientTest filter result in {[2,2]}, so segments [1,2] and [2,3] is needed */ List>> expectedResult = Arrays.asList( - makeTimeResults(new DateTime("2011-01-01"), 50, 5000, - new DateTime("2011-01-02"), 10, 1252, - new DateTime("2011-01-03"), 20, 6213, - new DateTime("2011-01-04"), 30, 743), - makeTimeResults(new DateTime("2011-01-07"), 60, 6020, - new DateTime("2011-01-08"), 70, 250) + makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000, + DateTimes.of("2011-01-02"), 10, 1252, + DateTimes.of("2011-01-03"), 20, 6213, + DateTimes.of("2011-01-04"), 30, 743), + makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020, + DateTimes.of("2011-01-08"), 70, 250) ); testQueryCachingWithFilter( @@ -1677,16 +1679,16 @@ public class CachingClusteredClientTest 3, builder.build(), expectedResult, - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-01"), 50, 5000), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-02"), 10, 1252), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-03"), 20, 6213), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-04"), 30, 743), - new Interval("2011-01-01/2011-01-05"), makeTimeResults(new DateTime("2011-01-05"), 40, 6000), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-06"), 50, 425), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-07"), 60, 6020), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-08"), 70, 250), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-09"), 23, 85312), - new Interval("2011-01-06/2011-01-10"), makeTimeResults(new DateTime("2011-01-10"), 100, 512) + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-01"), 50, 5000), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-02"), 10, 1252), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-03"), 20, 6213), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-04"), 30, 743), + Intervals.of("2011-01-01/2011-01-05"), makeTimeResults(DateTimes.of("2011-01-05"), 40, 6000), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-06"), 50, 425), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-07"), 60, 6020), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-08"), 70, 250), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-09"), 23, 85312), + Intervals.of("2011-01-06/2011-01-10"), makeTimeResults(DateTimes.of("2011-01-10"), 100, 512) ); } @@ -1733,9 +1735,9 @@ public class CachingClusteredClientTest TimeseriesQuery query = builder.build(); Map context = new HashMap<>(); - final Interval interval1 = new Interval("2011-01-06/2011-01-07"); - final Interval interval2 = new Interval("2011-01-07/2011-01-08"); - final Interval interval3 = new Interval("2011-01-08/2011-01-09"); + final Interval interval1 = Intervals.of("2011-01-06/2011-01-07"); + final Interval interval2 = Intervals.of("2011-01-07/2011-01-08"); + final Interval interval3 = Intervals.of("2011-01-08/2011-01-09"); QueryRunner runner = new FinalizeResultsQueryRunner( getDefaultQueryRunner(), new TimeseriesQueryQueryToolChest( @@ -2817,7 +2819,7 @@ public class CachingClusteredClientTest { super( "", - new Interval(0, 1), + Intervals.utc(0, 1), "", null, null, @@ -2983,17 +2985,17 @@ public class CachingClusteredClientTest .intervals(CachingClusteredClientTest.SEG_SPEC) .context(CachingClusteredClientTest.CONTEXT) .build(), - new Interval("1970-01-01/1970-01-02"), - makeTimeBoundaryResult(new DateTime("1970-01-01"), new DateTime("1970-01-01"), new DateTime("1970-01-02")), + Intervals.of("1970-01-01/1970-01-02"), + makeTimeBoundaryResult(DateTimes.of("1970-01-01"), DateTimes.of("1970-01-01"), DateTimes.of("1970-01-02")), - new Interval("1970-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("1970-01-02"), new DateTime("1970-01-02"), new DateTime("1970-01-03")), + Intervals.of("1970-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("1970-01-02"), DateTimes.of("1970-01-02"), DateTimes.of("1970-01-03")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05"), new DateTime("1970-01-05"), new DateTime("1970-01-10")), + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05"), DateTimes.of("1970-01-05"), DateTimes.of("1970-01-10")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05T01"), new DateTime("1970-01-05T01"), new DateTime("1970-01-10")) + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05T01"), DateTimes.of("1970-01-05T01"), DateTimes.of("1970-01-10")) ); testQueryCaching( @@ -3004,17 +3006,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MAX_TIME) .build(), - new Interval("1970-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("1970-01-01"), null, new DateTime("1970-01-02")), + Intervals.of("1970-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("1970-01-01"), null, DateTimes.of("1970-01-02")), - new Interval("1970-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("1970-01-02"), null, new DateTime("1970-01-03")), + Intervals.of("1970-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("1970-01-02"), null, DateTimes.of("1970-01-03")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05"), null, new DateTime("1970-01-10")), + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05"), null, DateTimes.of("1970-01-10")), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05T01"), null, new DateTime("1970-01-10")) + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05T01"), null, DateTimes.of("1970-01-10")) ); testQueryCaching( @@ -3025,17 +3027,17 @@ public class CachingClusteredClientTest .context(CachingClusteredClientTest.CONTEXT) .bound(TimeBoundaryQuery.MIN_TIME) .build(), - new Interval("1970-01-01/2011-01-02"), - makeTimeBoundaryResult(new DateTime("1970-01-01"), new DateTime("1970-01-01"), null), + Intervals.of("1970-01-01/2011-01-02"), + makeTimeBoundaryResult(DateTimes.of("1970-01-01"), DateTimes.of("1970-01-01"), null), - new Interval("1970-01-01/2011-01-03"), - makeTimeBoundaryResult(new DateTime("1970-01-02"), new DateTime("1970-01-02"), null), + Intervals.of("1970-01-01/2011-01-03"), + makeTimeBoundaryResult(DateTimes.of("1970-01-02"), DateTimes.of("1970-01-02"), null), - new Interval("1970-01-01/1970-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05"), new DateTime("1970-01-05"), null), + Intervals.of("1970-01-01/1970-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05"), DateTimes.of("1970-01-05"), null), - new Interval("1970-01-01/2011-01-10"), - makeTimeBoundaryResult(new DateTime("1970-01-05T01"), new DateTime("1970-01-05T01"), null) + Intervals.of("1970-01-01/2011-01-10"), + makeTimeBoundaryResult(DateTimes.of("1970-01-05T01"), DateTimes.of("1970-01-05T01"), null) ); } @@ -3054,34 +3056,34 @@ public class CachingClusteredClientTest testQueryCaching( getDefaultQueryRunner(), builder.build(), - new Interval("2011-01-01/2011-01-02"), + Intervals.of("2011-01-01/2011-01-02"), makeGroupByResults( - new DateTime("2011-01-01"), + DateTimes.of("2011-01-01"), ImmutableMap.of("output", "a", "rows", 1, "imps", 1, "impers", 1) ), - new Interval("2011-01-02/2011-01-03"), + Intervals.of("2011-01-02/2011-01-03"), makeGroupByResults( - new DateTime("2011-01-02"), + DateTimes.of("2011-01-02"), ImmutableMap.of("output", "b", "rows", 2, "imps", 2, "impers", 2) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) + DateTimes.of("2011-01-05"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-06"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-07"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-08"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-09"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) ), - new Interval("2011-01-05/2011-01-10"), + Intervals.of("2011-01-05/2011-01-10"), makeGroupByResults( - new DateTime("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) + DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) ) ); @@ -3092,16 +3094,16 @@ public class CachingClusteredClientTest HashMap context = new HashMap(); TestHelper.assertExpectedObjects( makeGroupByResults( - new DateTime("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), - new DateTime("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), - new DateTime("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), - new DateTime("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), - new DateTime("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), - new DateTime("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) + DateTimes.of("2011-01-05T"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("output", "c", "rows", 3, "imps", 3, "impers", 3), + DateTimes.of("2011-01-06T"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("output", "d", "rows", 4, "imps", 4, "impers", 4), + DateTimes.of("2011-01-07T"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("output", "e", "rows", 5, "imps", 5, "impers", 5), + DateTimes.of("2011-01-08T"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("output", "f", "rows", 6, "imps", 6, "impers", 6), + DateTimes.of("2011-01-09T"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("output", "g", "rows", 7, "imps", 7, "impers", 7) ), runner.run(QueryPlus.wrap(builder.setInterval("2011-01-05/2011-01-10").build()), context), "" @@ -3114,16 +3116,16 @@ public class CachingClusteredClientTest .build(); TestHelper.assertExpectedObjects( makeGroupByResults( - new DateTime("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), - new DateTime("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), - new DateTime("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), - new DateTime("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), - new DateTime("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), - new DateTime("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), - new DateTime("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), - new DateTime("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), - new DateTime("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), - new DateTime("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7) + DateTimes.of("2011-01-05T"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), + DateTimes.of("2011-01-05T01"), ImmutableMap.of("output2", "c", "rows", 3, "imps", 3, "impers2", 3), + DateTimes.of("2011-01-06T"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), + DateTimes.of("2011-01-06T01"), ImmutableMap.of("output2", "d", "rows", 4, "imps", 4, "impers2", 4), + DateTimes.of("2011-01-07T"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), + DateTimes.of("2011-01-07T01"), ImmutableMap.of("output2", "e", "rows", 5, "imps", 5, "impers2", 5), + DateTimes.of("2011-01-08T"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), + DateTimes.of("2011-01-08T01"), ImmutableMap.of("output2", "f", "rows", 6, "imps", 6, "impers2", 6), + DateTimes.of("2011-01-09T"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7), + DateTimes.of("2011-01-09T01"), ImmutableMap.of("output2", "g", "rows", 7, "imps", 7, "impers2", 7) ), runner.run(QueryPlus.wrap(query), context), "renamed aggregators test" @@ -3133,7 +3135,7 @@ public class CachingClusteredClientTest @Test public void testIfNoneMatch() throws Exception { - Interval interval = new Interval("2016/2017"); + Interval interval = Intervals.of("2016/2017"); final DataSegment dataSegment = new DataSegment( "dataSource", interval, diff --git a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java index 3ee8c9ec37a..37c9734c528 100644 --- a/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/io/druid/client/CachingQueryRunnerTest.java @@ -31,7 +31,9 @@ import io.druid.client.cache.CacheConfig; import io.druid.client.cache.CacheStats; import io.druid.client.cache.MapCache; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; @@ -56,7 +58,6 @@ import io.druid.query.topn.TopNQueryConfig; import io.druid.query.topn.TopNQueryQueryToolChest; import io.druid.query.topn.TopNResultValue; import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -92,11 +93,11 @@ public class CachingQueryRunnerTest ); private static final Object[] objects = new Object[]{ - new DateTime("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, - new DateTime("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, - new DateTime("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, - new DateTime("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 + DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, + DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, + DateTimes.of("2011-01-08"), "a", 50, 4988, "b", 50, 4987, "c", 50, 4986, + DateTimes.of("2011-01-09"), "a", 50, 4985, "b", 50, 4984, "c", 50, 4983 }; private ExecutorService backgroundExecutorService; @@ -156,13 +157,13 @@ public class CachingQueryRunnerTest .build(); Result row1 = new Result( - new DateTime("2011-04-01"), + DateTimes.of("2011-04-01"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) ); Result row2 = new Result<>( - new DateTime("2011-04-02"), + DateTimes.of("2011-04-02"), new TimeseriesResultValue( ImmutableMap.of("rows", 13L, "idx", 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9) ) @@ -258,7 +259,7 @@ public class CachingQueryRunnerTest }; String segmentIdentifier = "segment"; - SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0); + SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0); DefaultObjectMapper objectMapper = new DefaultObjectMapper(); CachingQueryRunner runner = new CachingQueryRunner( @@ -336,7 +337,7 @@ public class CachingQueryRunnerTest { DefaultObjectMapper objectMapper = new DefaultObjectMapper(); String segmentIdentifier = "segment"; - SegmentDescriptor segmentDescriptor = new SegmentDescriptor(new Interval("2011/2012"), "version", 0); + SegmentDescriptor segmentDescriptor = new SegmentDescriptor(Intervals.of("2011/2012"), "version", 0); CacheStrategy cacheStrategy = toolchest.getCacheStrategy(query); Cache.NamedKey cacheKey = CacheUtil.computeSegmentCacheKey( diff --git a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java index 49dc29b71eb..8aa3fc9c556 100644 --- a/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java +++ b/server/src/test/java/io/druid/client/CoordinatorServerViewTest.java @@ -26,9 +26,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; - import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.TableDataSource; import io.druid.server.coordination.DruidServerMetadata; @@ -107,14 +107,12 @@ public class CoordinatorServerViewTest extends CuratorTestBase TimelineLookup timeline = overlordServerView.getTimeline(new TableDataSource("test_overlord_server_view")); List serverLookupRes = (List) timeline.lookup( - new Interval( - "2014-10-20T00:00:00Z/P1D" - ) + Intervals.of("2014-10-20T00:00:00Z/P1D") ); Assert.assertEquals(1, serverLookupRes.size()); TimelineObjectHolder actualTimelineObjectHolder = serverLookupRes.get(0); - Assert.assertEquals(new Interval("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); + Assert.assertEquals(Intervals.of("2014-10-20T00:00:00Z/P1D"), actualTimelineObjectHolder.getInterval()); Assert.assertEquals("v1", actualTimelineObjectHolder.getVersion()); PartitionHolder actualPartitionHolder = actualTimelineObjectHolder.getObject(); @@ -133,9 +131,9 @@ public class CoordinatorServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2014-10-20T00:00:00Z/P1D"))).size() + ((List) timeline.lookup(Intervals.of("2014-10-20T00:00:00Z/P1D"))).size() ); - Assert.assertNull(timeline.findEntry(new Interval("2014-10-20T00:00:00Z/P1D"), "v1")); + Assert.assertNull(timeline.findEntry(Intervals.of("2014-10-20T00:00:00Z/P1D"), "v1")); } @Test @@ -204,7 +202,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), (List) timeline.lookup( - new Interval( + Intervals.of( "2011-04-01/2011-04-09" ) ) @@ -225,11 +223,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase createExpected("2011-04-03/2011-04-06", "v1", druidServers.get(1), segments.get(1)), createExpected("2011-04-06/2011-04-09", "v3", druidServers.get(3), segments.get(3)) ), - (List) timeline.lookup( - new Interval( - "2011-04-01/2011-04-09" - ) - ) + (List) timeline.lookup(Intervals.of("2011-04-01/2011-04-09")) ); // unannounce all the segments @@ -243,7 +237,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase Assert.assertEquals( 0, - ((List) timeline.lookup(new Interval("2011-04-01/2011-04-09"))).size() + ((List) timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))).size() ); } @@ -264,7 +258,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase DataSegment segment ) { - return Pair.of(new Interval(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); + return Pair.of(Intervals.of(intervalStr), Pair.of(version, Pair.of(druidServer, segment))); } private void assertValues( @@ -345,7 +339,7 @@ public class CoordinatorServerViewTest extends CuratorTestBase { return DataSegment.builder() .dataSource("test_overlord_server_view") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/client/DataSegmentTest.java b/server/src/test/java/io/druid/client/DataSegmentTest.java index 74030bdeb8b..682498fd0e1 100644 --- a/server/src/test/java/io/druid/client/DataSegmentTest.java +++ b/server/src/test/java/io/druid/client/DataSegmentTest.java @@ -19,18 +19,19 @@ package io.druid.client; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import io.druid.timeline.partition.SingleDimensionShardSpec; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -51,7 +52,7 @@ public class DataSegmentTest public void testV1Serialization() throws Exception { - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -66,7 +67,10 @@ public class DataSegmentTest 1 ); - final Map objectMap = mapper.readValue(mapper.writeValueAsString(segment), new TypeReference>(){}); + final Map objectMap = mapper.readValue( + mapper.writeValueAsString(segment), + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT + ); Assert.assertEquals(10, objectMap.size()); Assert.assertEquals("something", objectMap.get("dataSource")); @@ -106,8 +110,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(NoneShardSpec.instance()) .build(); @@ -122,8 +126,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(new SingleDimensionShardSpec("bar", null, "abc", 0)) .build(); @@ -138,8 +142,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .shardSpec(new SingleDimensionShardSpec("bar", "abc", "def", 1)) .build(); @@ -154,8 +158,8 @@ public class DataSegmentTest { final DataSegment segment = DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/2012-01-02")) - .version(new DateTime("2012-01-01T11:22:33.444Z").toString()) + .interval(Intervals.of("2012-01-01/2012-01-02")) + .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .build(); final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); @@ -196,7 +200,7 @@ public class DataSegmentTest { return DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .size(1) .build(); diff --git a/server/src/test/java/io/druid/client/DirectDruidClientTest.java b/server/src/test/java/io/druid/client/DirectDruidClientTest.java index 425b96fbd7e..ad59b648558 100644 --- a/server/src/test/java/io/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/io/druid/client/DirectDruidClientTest.java @@ -34,6 +34,8 @@ import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.QueryableDruidServer; import io.druid.client.selector.ServerSelector; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -53,9 +55,7 @@ import org.easymock.EasyMock; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.jboss.netty.handler.timeout.ReadTimeoutException; -import org.joda.time.DateTime; import org.joda.time.Duration; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -122,8 +122,8 @@ public class DirectDruidClientTest final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -193,7 +193,7 @@ public class DirectDruidClientTest ); List results = Sequences.toList(s1, Lists.newArrayList()); Assert.assertEquals(1, results.size()); - Assert.assertEquals(new DateTime("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-01-01T01:02:03Z"), results.get(0).getTimestamp()); Assert.assertEquals(3, client1.getNumOpenConnections()); client2.run(QueryPlus.wrap(query), defaultContext); @@ -240,8 +240,8 @@ public class DirectDruidClientTest final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -308,8 +308,8 @@ public class DirectDruidClientTest DataSegment dataSegment = new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), diff --git a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java index 3edd25b372a..7e6fad72b60 100644 --- a/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/io/druid/client/client/BatchServerInventoryViewTest.java @@ -36,6 +36,7 @@ import io.druid.client.ServerView; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.Comparators; @@ -80,7 +81,7 @@ import java.util.concurrent.atomic.AtomicInteger; public class BatchServerInventoryViewTest { private static final String testBasePath = "/test"; - public static final DateTime SEGMENT_INTERVAL_START = new DateTime("2013-01-01"); + public static final DateTime SEGMENT_INTERVAL_START = DateTimes.of("2013-01-01"); public static final int INITIAL_SEGMENTS = 100; private static final Timing timing = new Timing(); @@ -382,7 +383,7 @@ public class BatchServerInventoryViewTest SEGMENT_INTERVAL_START.plusDays(offset + 1) ) ) - .version(new DateTime().toString()) + .version(DateTimes.nowUtc().toString()) .build(); } diff --git a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java index 50cf50df6ca..284c456d5f1 100644 --- a/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java +++ b/server/src/test/java/io/druid/client/client/ImmutableSegmentLoadInfoTest.java @@ -23,12 +23,12 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import junit.framework.Assert; -import org.joda.time.Interval; import org.junit.Test; import java.io.IOException; @@ -43,9 +43,7 @@ public class ImmutableSegmentLoadInfoTest ImmutableSegmentLoadInfo segmentLoadInfo = new ImmutableSegmentLoadInfo( new DataSegment( "test_ds", - new Interval( - "2011-04-01/2011-04-02" - ), + Intervals.of("2011-04-01/2011-04-02"), "v1", null, null, diff --git a/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java index 3febded43d6..ca23ce74fec 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientAppendQueryTest.java @@ -20,6 +20,7 @@ package io.druid.client.indexing; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -33,9 +34,11 @@ public class ClientAppendQueryTest { private ClientAppendQuery clientAppendQuery; private static final String DATA_SOURCE = "data_source"; + private final DateTime start = DateTimes.nowUtc(); private List segments = Lists.newArrayList( - new DataSegment(DATA_SOURCE, new Interval(new DateTime(), new DateTime().plus(1)), new DateTime().toString(), null, - null, null, null, 0, 0)); + new DataSegment(DATA_SOURCE, new Interval(start, start.plus(1)), start.toString(), null, + null, null, null, 0, 0)); + @Before public void setUp() { diff --git a/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java index bb0a22139e5..db29d181994 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientConversionQueryTest.java @@ -19,6 +19,7 @@ package io.druid.client.indexing; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -29,9 +30,10 @@ public class ClientConversionQueryTest { private ClientConversionQuery clientConversionQuery; private static final String DATA_SOURCE = "data_source"; - private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1)); - private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, new DateTime().toString(), null, - null, null, null, 0, 0); + public static final DateTime START = DateTimes.nowUtc(); + private static final Interval INTERVAL = new Interval(START, START.plus(1)); + private static final DataSegment DATA_SEGMENT = + new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null, null, null, null, 0, 0); @Test public void testGetType() diff --git a/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java index a1dc2db0aaa..6f2b4e8aea3 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientKillQueryTest.java @@ -19,6 +19,7 @@ package io.druid.client.indexing; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; @@ -29,7 +30,8 @@ import org.junit.Test; public class ClientKillQueryTest { private static final String DATA_SOURCE = "data_source"; - private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1)); + public static final DateTime START = DateTimes.nowUtc(); + private static final Interval INTERVAL = new Interval(START, START.plus(1)); ClientKillQuery clientKillQuery; @Before diff --git a/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java b/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java index d7791e76553..0019bd3c136 100644 --- a/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java +++ b/server/src/test/java/io/druid/client/indexing/ClientMergeQueryTest.java @@ -20,6 +20,7 @@ package io.druid.client.indexing; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.query.aggregation.AggregatorFactory; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; @@ -32,8 +33,9 @@ import java.util.List; public class ClientMergeQueryTest { private static final String DATA_SOURCE = "data_source"; - private static final Interval INTERVAL = new Interval(new DateTime(), new DateTime().plus(1)); - private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, new DateTime().toString(), null, + public static final DateTime START = DateTimes.nowUtc(); + private static final Interval INTERVAL = new Interval(START, START.plus(1)); + private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null, null, null, null, 0, 0); private static final List SEGMENT_LIST = Lists.newArrayList(DATA_SEGMENT); private static final List AGGREGATOR_LIST = Lists.newArrayList(); diff --git a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java index bdeeb461d36..ecf1d9eaa70 100644 --- a/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java +++ b/server/src/test/java/io/druid/client/selector/ServerSelectorTest.java @@ -23,11 +23,11 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.client.DirectDruidClient; import io.druid.client.DruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -51,7 +51,7 @@ public class ServerSelectorTest final ServerSelector selector = new ServerSelector( DataSegment.builder() .dataSource("test_broker_server_view") - .interval(new Interval("2012/2013")) + .interval(Intervals.of("2012/2013")) .loadSpec( ImmutableMap.of( "type", @@ -78,8 +78,7 @@ public class ServerSelectorTest DataSegment.builder() .dataSource( "test_broker_server_view") - .interval(new Interval( - "2012/2013")) + .interval(Intervals.of("2012/2013")) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java index 0fb821ca0e1..41f1f6df214 100644 --- a/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java +++ b/server/src/test/java/io/druid/client/selector/TierSelectorStrategyTest.java @@ -23,13 +23,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.client.DirectDruidClient; import io.druid.client.DruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -118,8 +118,8 @@ public class TierSelectorStrategyTest final ServerSelector serverSelector = new ServerSelector( new DataSegment( "test", - new Interval("2013-01-01/2013-01-02"), - new DateTime("2013-01-01").toString(), + Intervals.of("2013-01-01/2013-01-02"), + DateTimes.of("2013-01-01").toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), diff --git a/server/src/test/java/io/druid/curator/discovery/CuratorDruidLeaderSelectorTest.java b/server/src/test/java/io/druid/curator/discovery/CuratorDruidLeaderSelectorTest.java new file mode 100644 index 00000000000..bb0c27912c5 --- /dev/null +++ b/server/src/test/java/io/druid/curator/discovery/CuratorDruidLeaderSelectorTest.java @@ -0,0 +1,172 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.curator.discovery; + +import com.metamx.emitter.EmittingLogger; +import com.metamx.emitter.service.ServiceEmitter; +import io.druid.curator.CuratorTestBase; +import io.druid.discovery.DruidLeaderSelector; +import io.druid.java.util.common.logger.Logger; +import io.druid.server.DruidNode; +import io.druid.server.initialization.ServerConfig; +import org.easymock.EasyMock; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +/** + */ +public class CuratorDruidLeaderSelectorTest extends CuratorTestBase +{ + private static final Logger logger = new Logger(CuratorDruidLeaderSelectorTest.class); + + @Before + public void setUp() throws Exception + { + EmittingLogger.registerEmitter(EasyMock.createNiceMock(ServiceEmitter.class)); + setupServerAndCurator(); + } + + @Test(timeout = 15000) + public void testSimple() throws Exception + { + curator.start(); + curator.blockUntilConnected(); + + AtomicReference currLeader = new AtomicReference<>(); + + String latchPath = "/testlatchPath"; + + CuratorDruidLeaderSelector leaderSelector1 = new CuratorDruidLeaderSelector( + curator, + new DruidNode("s1", "h1", 8080, null, new ServerConfig()), + latchPath + ); + leaderSelector1.registerListener( + new DruidLeaderSelector.Listener() + { + @Override + public void becomeLeader() + { + logger.info("listener1.becomeLeader()."); + currLeader.set("h1:8080"); + throw new RuntimeException("I am Rogue."); + } + + @Override + public void stopBeingLeader() + { + logger.info("listener1.stopBeingLeader()."); + throw new RuntimeException("I said I am Rogue."); + } + } + ); + + while (!"h1:8080".equals(currLeader.get())) { + logger.info("current leader = [%s]", currLeader.get()); + Thread.sleep(100); + } + + Assert.assertTrue(leaderSelector1.localTerm() >= 1); + + CuratorDruidLeaderSelector leaderSelector2 = new CuratorDruidLeaderSelector( + curator, + new DruidNode("s2", "h2", 8080, null, new ServerConfig()), + latchPath + ); + leaderSelector2.registerListener( + new DruidLeaderSelector.Listener() + { + private AtomicInteger attemptCount = new AtomicInteger(0); + + @Override + public void becomeLeader() + { + logger.info("listener2.becomeLeader()."); + + if (attemptCount.getAndIncrement() < 1) { + throw new RuntimeException("will become leader on next attempt."); + } + + currLeader.set("h2:8080"); + } + + @Override + public void stopBeingLeader() + { + logger.info("listener2.stopBeingLeader()."); + throw new RuntimeException("I am broken."); + } + } + ); + + while (!"h2:8080".equals(currLeader.get())) { + logger.info("current leader = [%s]", currLeader.get()); + Thread.sleep(100); + } + + Assert.assertTrue(leaderSelector2.isLeader()); + Assert.assertEquals("h2:8080", leaderSelector1.getCurrentLeader()); + Assert.assertEquals(2, leaderSelector2.localTerm()); + + CuratorDruidLeaderSelector leaderSelector3 = new CuratorDruidLeaderSelector( + curator, + new DruidNode("s3", "h3", 8080, null, new ServerConfig()), + latchPath + ); + leaderSelector3.registerListener( + new DruidLeaderSelector.Listener() + { + @Override + public void becomeLeader() + { + logger.info("listener3.becomeLeader()."); + currLeader.set("h3:8080"); + } + + @Override + public void stopBeingLeader() + { + logger.info("listener3.stopBeingLeader()."); + } + } + ); + + leaderSelector2.unregisterListener(); + while (!"h3:8080".equals(currLeader.get())) { + logger.info("current leader = [%s]", currLeader.get()); + Thread.sleep(100); + } + + Assert.assertTrue(leaderSelector3.isLeader()); + Assert.assertEquals("h3:8080", leaderSelector1.getCurrentLeader()); + Assert.assertEquals(1, leaderSelector3.localTerm()); + } + + @After + public void tearDown() + { + tearDownServerAndCurator(); + } +} diff --git a/server/src/test/java/io/druid/curator/discovery/CuratorDruidNodeAnnouncerAndDiscoveryTest.java b/server/src/test/java/io/druid/curator/discovery/CuratorDruidNodeAnnouncerAndDiscoveryTest.java index 2ddce0843d9..1203e86e365 100644 --- a/server/src/test/java/io/druid/curator/discovery/CuratorDruidNodeAnnouncerAndDiscoveryTest.java +++ b/server/src/test/java/io/druid/curator/discovery/CuratorDruidNodeAnnouncerAndDiscoveryTest.java @@ -39,6 +39,7 @@ import org.junit.Test; import java.util.Collection; import java.util.HashSet; +import java.util.List; import java.util.Set; /** @@ -128,15 +129,15 @@ public class CuratorDruidNodeAnnouncerAndDiscoveryTest extends CuratorTestBase new DruidNodeDiscovery.Listener() { @Override - public void nodeAdded(DiscoveryDruidNode node) + public void nodesAdded(List nodes) { - coordNodes.add(node); + coordNodes.addAll(nodes); } @Override - public void nodeRemoved(DiscoveryDruidNode node) + public void nodesRemoved(List nodes) { - coordNodes.remove(node); + coordNodes.removeAll(nodes); } } ); @@ -146,15 +147,15 @@ public class CuratorDruidNodeAnnouncerAndDiscoveryTest extends CuratorTestBase new DruidNodeDiscovery.Listener() { @Override - public void nodeAdded(DiscoveryDruidNode node) + public void nodesAdded(List nodes) { - overlordNodes.add(node); + overlordNodes.addAll(nodes); } @Override - public void nodeRemoved(DiscoveryDruidNode node) + public void nodesRemoved(List nodes) { - overlordNodes.remove(node); + overlordNodes.removeAll(nodes); } } ); diff --git a/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java b/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java index bb55b1ee1bc..ca6032499b2 100644 --- a/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java +++ b/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java @@ -38,6 +38,7 @@ public class ServerDiscoverySelectorTest private ServerDiscoverySelector serverDiscoverySelector; private ServiceInstance instance; private static final int PORT = 8080; + private static final int SSL_PORT = 8280; private static final String ADDRESS = "localhost"; @Before @@ -77,6 +78,63 @@ public class ServerDiscoverySelectorTest Assert.assertEquals("http", uri.getScheme()); } + @Test + public void testPickWithNullSslPort() throws Exception + { + EasyMock.expect(serviceProvider.getInstance()).andReturn(instance).anyTimes(); + EasyMock.expect(instance.getAddress()).andReturn(ADDRESS).anyTimes(); + EasyMock.expect(instance.getPort()).andReturn(PORT).anyTimes(); + EasyMock.expect(instance.getSslPort()).andReturn(null).anyTimes(); + EasyMock.replay(instance, serviceProvider); + Server server = serverDiscoverySelector.pick(); + Assert.assertEquals(PORT, server.getPort()); + Assert.assertEquals(ADDRESS, server.getAddress()); + Assert.assertTrue(server.getHost().contains(Integer.toString(PORT))); + Assert.assertTrue(server.getHost().contains(ADDRESS)); + Assert.assertEquals("http", server.getScheme()); + EasyMock.verify(instance, serviceProvider); + final URI uri = new URI( + server.getScheme(), + null, + server.getAddress(), + server.getPort(), + "/druid/indexer/v1/action", + null, + null + ); + Assert.assertEquals(PORT, uri.getPort()); + Assert.assertEquals(ADDRESS, uri.getHost()); + Assert.assertEquals("http", uri.getScheme()); + } + + @Test + public void testPickWithSslPort() throws Exception + { + EasyMock.expect(serviceProvider.getInstance()).andReturn(instance).anyTimes(); + EasyMock.expect(instance.getAddress()).andReturn(ADDRESS).anyTimes(); + EasyMock.expect(instance.getPort()).andReturn(PORT).anyTimes(); + EasyMock.expect(instance.getSslPort()).andReturn(SSL_PORT).anyTimes(); + EasyMock.replay(instance, serviceProvider); + Server server = serverDiscoverySelector.pick(); + Assert.assertEquals(SSL_PORT, server.getPort()); + Assert.assertEquals(ADDRESS, server.getAddress()); + Assert.assertTrue(server.getHost().contains(Integer.toString(SSL_PORT))); + Assert.assertTrue(server.getHost().contains(ADDRESS)); + Assert.assertEquals("https", server.getScheme()); + EasyMock.verify(instance, serviceProvider); + final URI uri = new URI( + server.getScheme(), + null, + server.getAddress(), + server.getPort(), + "/druid/indexer/v1/action", + null, + null + ); + Assert.assertEquals(SSL_PORT, uri.getPort()); + Assert.assertEquals(ADDRESS, uri.getHost()); + Assert.assertEquals("https", uri.getScheme()); + } @Test public void testPickIPv6() throws Exception diff --git a/server/src/test/java/io/druid/discovery/DruidNodeDiscoveryProviderTest.java b/server/src/test/java/io/druid/discovery/DruidNodeDiscoveryProviderTest.java index e9e5b0e689f..d7467b2d858 100644 --- a/server/src/test/java/io/druid/discovery/DruidNodeDiscoveryProviderTest.java +++ b/server/src/test/java/io/druid/discovery/DruidNodeDiscoveryProviderTest.java @@ -19,6 +19,7 @@ package io.druid.discovery; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.server.DruidNode; @@ -47,15 +48,15 @@ public class DruidNodeDiscoveryProviderTest new DruidNodeDiscovery.Listener() { @Override - public void nodeAdded(DiscoveryDruidNode node) + public void nodesAdded(List nodes) { - dataNodes.add(node); + dataNodes.addAll(nodes); } @Override - public void nodeRemoved(DiscoveryDruidNode node) + public void nodesRemoved(List nodes) { - dataNodes.remove(node); + dataNodes.removeAll(nodes); } } ); @@ -66,15 +67,15 @@ public class DruidNodeDiscoveryProviderTest new DruidNodeDiscovery.Listener() { @Override - public void nodeAdded(DiscoveryDruidNode node) + public void nodesAdded(List nodes) { - lookupNodes.add(node); + lookupNodes.addAll(nodes); } @Override - public void nodeRemoved(DiscoveryDruidNode node) + public void nodesRemoved(List nodes) { - lookupNodes.remove(node); + lookupNodes.removeAll(nodes); } } ); @@ -204,14 +205,14 @@ public class DruidNodeDiscoveryProviderTest void add(DiscoveryDruidNode node) { for (DruidNodeDiscovery.Listener listener : listeners) { - listener.nodeAdded(node); + listener.nodesAdded(ImmutableList.of(node)); } } void remove(DiscoveryDruidNode node) { for (DruidNodeDiscovery.Listener listener : listeners) { - listener.nodeRemoved(node); + listener.nodesRemoved(ImmutableList.of(node)); } } } diff --git a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java index f493d07298b..19d8a665b55 100644 --- a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java +++ b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java @@ -27,6 +27,7 @@ import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.ObjectMetadata; import io.druid.indexing.overlord.SegmentPublishResult; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; @@ -55,7 +56,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final ObjectMapper mapper = new DefaultObjectMapper(); private final DataSegment defaultSegment = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "version", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -67,7 +68,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment defaultSegment2 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "version", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -79,7 +80,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment defaultSegment3 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-03T00Z/2015-01-04T00Z"), + Intervals.of("2015-01-03T00Z/2015-01-04T00Z"), "version", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -92,7 +93,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest // Overshadows defaultSegment, defaultSegment2 private final DataSegment defaultSegment4 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -104,7 +105,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment0of0 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -116,7 +117,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment1of0 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -128,7 +129,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment2of0 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -140,7 +141,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment2of1 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -152,7 +153,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest private final DataSegment numberedSegment3of1 = new DataSegment( "fooDataSource", - Interval.parse("2015-01-01T00Z/2015-01-02T00Z"), + Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), "zversion", ImmutableMap.of(), ImmutableList.of("dim1"), @@ -523,8 +524,8 @@ public class IndexerSQLMetadataStorageCoordinatorTest coordinator.getUsedSegmentsForIntervals( defaultSegment.getDataSource(), ImmutableList.of( - Interval.parse("2015-01-03T00Z/2015-01-03T05Z"), - Interval.parse("2015-01-03T09Z/2015-01-04T00Z") + Intervals.of("2015-01-03T00Z/2015-01-03T05Z"), + Intervals.of("2015-01-03T09Z/2015-01-04T00Z") ) ) ); @@ -554,7 +555,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest Set actualSegments = ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), - Interval.parse("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive + Intervals.of("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive ) ); Assert.assertEquals( @@ -573,7 +574,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest ImmutableSet.copyOf( coordinator.getUsedSegmentsForInterval( defaultSegment.getDataSource(), - Interval.parse("2015-1-1T23:59:59.999Z/2015-02-01T00Z") + Intervals.of("2015-1-1T23:59:59.999Z/2015-02-01T00Z") ) ) ); @@ -701,7 +702,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest ImmutableSet.copyOf( coordinator.getUnusedSegmentsForInterval( defaultSegment.getDataSource(), - Interval.parse("2000/2999") + Intervals.of("2000/2999") ) ) ); diff --git a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java index 9f6074f0b85..3faad305e53 100644 --- a/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java +++ b/server/src/test/java/io/druid/metadata/MetadataSegmentManagerTest.java @@ -26,12 +26,12 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.metamx.emitter.EmittingLogger; -import io.druid.java.util.common.StringUtils; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.StringUtils; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -49,7 +49,7 @@ public class MetadataSegmentManagerTest private final DataSegment segment1 = new DataSegment( "wikipedia", - new Interval("2012-03-15T00:00:00.000/2012-03-16T00:00:00.000"), + Intervals.of("2012-03-15T00:00:00.000/2012-03-16T00:00:00.000"), "2012-03-16T00:36:30.848Z", ImmutableMap.of( "type", "s3_zip", @@ -65,7 +65,7 @@ public class MetadataSegmentManagerTest private final DataSegment segment2 = new DataSegment( "wikipedia", - new Interval("2012-01-05T00:00:00.000/2012-01-06T00:00:00.000"), + Intervals.of("2012-01-05T00:00:00.000/2012-01-06T00:00:00.000"), "2012-01-06T22:19:12.565Z", ImmutableMap.of( "type", "s3_zip", @@ -154,12 +154,12 @@ public class MetadataSegmentManagerTest Assert.assertEquals( ImmutableList.of(segment2.getInterval()), - manager.getUnusedSegmentIntervals("wikipedia", new Interval("1970/3000"), 1) + manager.getUnusedSegmentIntervals("wikipedia", Intervals.of("1970/3000"), 1) ); Assert.assertEquals( ImmutableList.of(segment2.getInterval(), segment1.getInterval()), - manager.getUnusedSegmentIntervals("wikipedia", new Interval("1970/3000"), 5) + manager.getUnusedSegmentIntervals("wikipedia", Intervals.of("1970/3000"), 5) ); } } diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java index 954388a5700..5df9c6b535e 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java @@ -29,13 +29,13 @@ import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.server.audit.SQLAuditManager; import io.druid.server.audit.SQLAuditManagerConfig; import io.druid.server.coordinator.rules.IntervalLoadRule; import io.druid.server.coordinator.rules.Rule; import io.druid.server.metrics.NoopServiceEmitter; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -87,7 +87,7 @@ public class SQLMetadataRuleManagerTest { List rules = Arrays.asList( new IntervalLoadRule( - new Interval("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -111,7 +111,7 @@ public class SQLMetadataRuleManagerTest { List rules = Arrays.asList( new IntervalLoadRule( - new Interval("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) @@ -149,7 +149,7 @@ public class SQLMetadataRuleManagerTest { List rules = Arrays.asList( new IntervalLoadRule( - new Interval("2015-01-01/2015-02-01"), ImmutableMap.of( + Intervals.of("2015-01-01/2015-02-01"), ImmutableMap.of( DruidServer.DEFAULT_TIER, DruidServer.DEFAULT_NUM_REPLICANTS ) diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java index 63f6e668ce9..f052737e660 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataStorageActionHandlerTest.java @@ -25,11 +25,10 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; - import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; - -import org.joda.time.DateTime; +import io.druid.java.util.common.jackson.JacksonUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -90,9 +89,7 @@ public class SQLMetadataStorageActionHandlerTest @Override public TypeReference> getLogType() { - return new TypeReference>() - { - }; + return JacksonUtils.TYPE_REFERENCE_MAP_STRING_STRING; } @Override @@ -119,7 +116,7 @@ public class SQLMetadataStorageActionHandlerTest final String entryId = "1234"; - handler.insert(entryId, new DateTime("2014-01-02T00:00:00.123"), "testDataSource", entry, true, null); + handler.insert(entryId, DateTimes.of("2014-01-02T00:00:00.123"), "testDataSource", entry, true, null); Assert.assertEquals( Optional.of(entry), @@ -148,7 +145,7 @@ public class SQLMetadataStorageActionHandlerTest Assert.assertEquals( ImmutableList.of(), - handler.getInactiveStatusesSince(new DateTime("2014-01-01")) + handler.getInactiveStatusesSince(DateTimes.of("2014-01-01")) ); Assert.assertTrue(handler.setStatus(entryId, false, status1)); @@ -173,12 +170,12 @@ public class SQLMetadataStorageActionHandlerTest Assert.assertEquals( ImmutableList.of(), - handler.getInactiveStatusesSince(new DateTime("2014-01-03")) + handler.getInactiveStatusesSince(DateTimes.of("2014-01-03")) ); Assert.assertEquals( ImmutableList.of(status1), - handler.getInactiveStatusesSince(new DateTime("2014-01-01")) + handler.getInactiveStatusesSince(DateTimes.of("2014-01-01")) ); } @@ -189,10 +186,10 @@ public class SQLMetadataStorageActionHandlerTest Map entry = ImmutableMap.of("a", 1); Map status = ImmutableMap.of("count", 42); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); thrown.expect(EntryExistsException.class); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); } @Test @@ -202,7 +199,7 @@ public class SQLMetadataStorageActionHandlerTest Map entry = ImmutableMap.of("a", 1); Map status = ImmutableMap.of("count", 42); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableList.of(), @@ -234,7 +231,7 @@ public class SQLMetadataStorageActionHandlerTest Map entry = ImmutableMap.of("a", 1); Map status = ImmutableMap.of("count", 42); - handler.insert(entryId, new DateTime("2014-01-01"), "test", entry, true, status); + handler.insert(entryId, DateTimes.of("2014-01-01"), "test", entry, true, status); Assert.assertEquals( ImmutableMap.>of(), diff --git a/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java b/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java index 970e4e9f3ea..9fd01143014 100644 --- a/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java +++ b/server/src/test/java/io/druid/query/LocatedSegmentDescriptorSerdeTest.java @@ -21,9 +21,9 @@ package io.druid.query; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -39,7 +39,7 @@ public class LocatedSegmentDescriptorSerdeTest public void testDimensionsSpecSerde() throws Exception { LocatedSegmentDescriptor expected = new LocatedSegmentDescriptor( - new SegmentDescriptor(new Interval(100, 200), "version", 100), + new SegmentDescriptor(Intervals.utc(100, 200), "version", 100), 65535, Arrays.asList( new DruidServerMetadata("server1", "host1", null, 30000L, ServerType.HISTORICAL, "tier1", 0), diff --git a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java index 3185f9b3fe9..c44f7b80efe 100644 --- a/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java +++ b/server/src/test/java/io/druid/realtime/firehose/CombiningFirehoseFactoryTest.java @@ -20,12 +20,12 @@ package io.druid.realtime.firehose; import com.google.common.collect.Lists; - import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.parsers.ParseException; import io.druid.segment.realtime.firehose.CombiningFirehoseFactory; import io.druid.utils.Runnables; @@ -82,7 +82,7 @@ public class CombiningFirehoseFactoryTest @Override public DateTime getTimestamp() { - return new DateTime(timestamp); + return DateTimes.utc(timestamp); } @Override diff --git a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java index 71f7a462e6d..d415875ad73 100644 --- a/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java +++ b/server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java @@ -19,7 +19,6 @@ package io.druid.segment.indexing; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -28,13 +27,14 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.segment.TestHelper; import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -57,7 +57,7 @@ public class DataSchemaTest null ), null - ), new TypeReference>() {} + ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); DataSchema schema = new DataSchema( @@ -66,8 +66,8 @@ public class DataSchemaTest new AggregatorFactory[]{ new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), - }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + }, + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); @@ -84,12 +84,16 @@ public class DataSchemaTest new StringInputRowParser( new JSONParseSpec( new TimestampSpec("time", "auto", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2")), ImmutableList.of("dimC"), null), + new DimensionsSpec( + DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2")), + ImmutableList.of("dimC"), + null + ), null, null ), null - ), new TypeReference>() {} + ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); DataSchema schema = new DataSchema( @@ -98,8 +102,8 @@ public class DataSchemaTest new AggregatorFactory[]{ new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), - }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + }, + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); @@ -116,12 +120,17 @@ public class DataSchemaTest new StringInputRowParser( new JSONParseSpec( new TimestampSpec("time", "auto", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "metric1")), ImmutableList.of("dimC"), null), + new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of( + "time", + "dimA", + "dimB", + "metric1" + )), ImmutableList.of("dimC"), null), null, null ), null - ), new TypeReference>() {} + ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); DataSchema schema = new DataSchema( @@ -130,8 +139,8 @@ public class DataSchemaTest new AggregatorFactory[]{ new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), - }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + }, + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); schema.getParser(); @@ -144,12 +153,16 @@ public class DataSchemaTest new StringInputRowParser( new JSONParseSpec( new TimestampSpec("time", "auto", null), - new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time")), ImmutableList.of("dimC"), null), + new DimensionsSpec( + DimensionsSpec.getDefaultSchemas(ImmutableList.of("time")), + ImmutableList.of("dimC"), + null + ), null, null ), null - ), new TypeReference>() {} + ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); DataSchema schema = new DataSchema( @@ -159,8 +172,8 @@ public class DataSchemaTest new DoubleSumAggregatorFactory("metric1", "col1"), new DoubleSumAggregatorFactory("metric2", "col2"), new DoubleSumAggregatorFactory("metric1", "col3"), - }, - new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Interval.parse("2014/2015"))), + }, + new ArbitraryGranularitySpec(Granularities.DAY, ImmutableList.of(Intervals.of("2014/2015"))), jsonMapper ); schema.getParser(); @@ -242,7 +255,10 @@ public class DataSchemaTest ); Assert.assertEquals( actual.getGranularitySpec(), - new ArbitraryGranularitySpec(new DurationGranularity(86400000, null), ImmutableList.of(Interval.parse("2014/2015"))) + new ArbitraryGranularitySpec( + new DurationGranularity(86400000, null), + ImmutableList.of(Intervals.of("2014/2015")) + ) ); } } diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java index 957d04a8e0d..c77241487bf 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java @@ -24,8 +24,9 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -42,11 +43,11 @@ public class ArbitraryGranularityTest final GranularitySpec spec = new ArbitraryGranularitySpec( null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); Assert.assertNotNull(spec.getQueryGranularity()); } @@ -57,66 +58,66 @@ public class ArbitraryGranularityTest final GranularitySpec spec = new ArbitraryGranularitySpec( Granularities.NONE, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); Assert.assertTrue(spec.isRollup()); Assert.assertEquals( Lists.newArrayList( - new Interval("2012-01-01T00Z/2012-01-03T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z") + Intervals.of("2012-01-01T00Z/2012-01-03T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z") ), Lists.newArrayList(spec.bucketIntervals().get()) ); Assert.assertEquals( "2012-01-03T00Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T00Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T00Z")) ); Assert.assertEquals( "2012-01-03T01Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T01Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T01Z")) ); Assert.assertEquals( "2012-01-04T01Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-04T01Z")) + spec.bucketInterval(DateTimes.of("2012-01-04T01Z")) ); Assert.assertEquals( "2012-01-07T23:59:59.999Z", - Optional.of(new Interval("2012-01-07T00Z/2012-01-08T00Z")), - spec.bucketInterval(new DateTime("2012-01-07T23:59:59.999Z")) + Optional.of(Intervals.of("2012-01-07T00Z/2012-01-08T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-07T23:59:59.999Z")) ); Assert.assertEquals( "2012-01-08T01Z", - Optional.of(new Interval("2012-01-08T00Z/2012-01-11T00Z")), - spec.bucketInterval(new DateTime("2012-01-08T01Z")) + Optional.of(Intervals.of("2012-01-08T00Z/2012-01-11T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-08T01Z")) ); Assert.assertEquals( "2012-01-04T00Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-04T00Z")) + spec.bucketInterval(DateTimes.of("2012-01-04T00Z")) ); Assert.assertEquals( "2012-01-05T00Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-05T00Z")) + spec.bucketInterval(DateTimes.of("2012-01-05T00Z")) ); } @@ -124,9 +125,9 @@ public class ArbitraryGranularityTest public void testOverlapViolation() { List intervals = Lists.newArrayList( - new Interval("2012-01-02T00Z/2012-01-04T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-02T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); boolean thrown = false; @@ -144,11 +145,11 @@ public class ArbitraryGranularityTest public void testRollupSetting() { List intervals = Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, false, intervals); @@ -159,8 +160,8 @@ public class ArbitraryGranularityTest public void testOverlapViolationSameStartInstant() { List intervals = Lists.newArrayList( - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-03T00Z/2012-01-05T00Z") + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-05T00Z") ); boolean thrown = false; @@ -178,11 +179,11 @@ public class ArbitraryGranularityTest public void testJson() { final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-02-01T00Z/2012-03-01T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-02-01T00Z/2012-03-01T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") )); try { diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java index cec80b3e7b0..2ed837c0c29 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java @@ -24,9 +24,10 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.Period; @@ -49,10 +50,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -60,45 +61,45 @@ public class UniformGranularityTest Assert.assertEquals( Lists.newArrayList( - new Interval("2012-01-01T00Z/P1D"), - new Interval("2012-01-02T00Z/P1D"), - new Interval("2012-01-03T00Z/P1D"), - new Interval("2012-01-07T00Z/P1D"), - new Interval("2012-01-08T00Z/P1D"), - new Interval("2012-01-09T00Z/P1D"), - new Interval("2012-01-10T00Z/P1D") + Intervals.of("2012-01-01T00Z/P1D"), + Intervals.of("2012-01-02T00Z/P1D"), + Intervals.of("2012-01-03T00Z/P1D"), + Intervals.of("2012-01-07T00Z/P1D"), + Intervals.of("2012-01-08T00Z/P1D"), + Intervals.of("2012-01-09T00Z/P1D"), + Intervals.of("2012-01-10T00Z/P1D") ), Lists.newArrayList(spec.bucketIntervals().get()) ); Assert.assertEquals( "2012-01-03T00Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T00Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T00Z")) ); Assert.assertEquals( "2012-01-03T01Z", - Optional.of(new Interval("2012-01-03T00Z/2012-01-04T00Z")), - spec.bucketInterval(new DateTime("2012-01-03T01Z")) + Optional.of(Intervals.of("2012-01-03T00Z/2012-01-04T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-03T01Z")) ); Assert.assertEquals( "2012-01-04T01Z", Optional.absent(), - spec.bucketInterval(new DateTime("2012-01-04T01Z")) + spec.bucketInterval(DateTimes.of("2012-01-04T01Z")) ); Assert.assertEquals( "2012-01-07T23:59:59.999Z", - Optional.of(new Interval("2012-01-07T00Z/2012-01-08T00Z")), - spec.bucketInterval(new DateTime("2012-01-07T23:59:59.999Z")) + Optional.of(Intervals.of("2012-01-07T00Z/2012-01-08T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-07T23:59:59.999Z")) ); Assert.assertEquals( "2012-01-08T01Z", - Optional.of(new Interval("2012-01-08T00Z/2012-01-09T00Z")), - spec.bucketInterval(new DateTime("2012-01-08T01Z")) + Optional.of(Intervals.of("2012-01-08T00Z/2012-01-09T00Z")), + spec.bucketInterval(DateTimes.of("2012-01-08T01Z")) ); } @@ -106,10 +107,10 @@ public class UniformGranularityTest public void testRollupSetting() { List intervals = Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ); final GranularitySpec spec = new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, false, intervals); @@ -123,10 +124,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -156,10 +157,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -168,10 +169,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -190,10 +191,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ); @@ -202,10 +203,10 @@ public class UniformGranularityTest Granularities.YEAR, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -214,10 +215,10 @@ public class UniformGranularityTest Granularities.DAY, null, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-12T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-12T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -226,10 +227,10 @@ public class UniformGranularityTest Granularities.DAY, Granularities.ALL, Lists.newArrayList( - new Interval("2012-01-08T00Z/2012-01-11T00Z"), - new Interval("2012-01-07T00Z/2012-01-08T00Z"), - new Interval("2012-01-03T00Z/2012-01-04T00Z"), - new Interval("2012-01-01T00Z/2012-01-03T00Z") + Intervals.of("2012-01-08T00Z/2012-01-11T00Z"), + Intervals.of("2012-01-07T00Z/2012-01-08T00Z"), + Intervals.of("2012-01-03T00Z/2012-01-04T00Z"), + Intervals.of("2012-01-01T00Z/2012-01-03T00Z") ) ) ); @@ -242,11 +243,11 @@ public class UniformGranularityTest new PeriodGranularity(new Period("P1D"), null, DateTimeZone.forID("America/Los_Angeles")), null, Lists.newArrayList( - new Interval("2012-01-08T00-08:00/2012-01-11T00-08:00"), - new Interval("2012-01-07T00-08:00/2012-01-08T00-08:00"), - new Interval("2012-01-03T00-08:00/2012-01-04T00-08:00"), - new Interval("2012-01-01T00-08:00/2012-01-03T00-08:00"), - new Interval("2012-09-01T00-07:00/2012-09-03T00-07:00") + Intervals.of("2012-01-08T00-08:00/2012-01-11T00-08:00"), + Intervals.of("2012-01-07T00-08:00/2012-01-08T00-08:00"), + Intervals.of("2012-01-03T00-08:00/2012-01-04T00-08:00"), + Intervals.of("2012-01-01T00-08:00/2012-01-03T00-08:00"), + Intervals.of("2012-09-01T00-07:00/2012-09-03T00-07:00") ) ); diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java index 485e6b95d28..d8eb28c10a7 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentFinderTest.java @@ -24,10 +24,10 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; import org.apache.commons.io.FileUtils; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -50,7 +50,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_1 = DataSegment.builder() .dataSource("wikipedia") .interval( - new Interval( + Intervals.of( "2013-08-31T00:00:00.000Z/2013-09-01T00:00:00.000Z" ) ) @@ -69,7 +69,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_2 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-01T00:00:00.000Z/2013-09-02T00:00:00.000Z" ) ) @@ -77,7 +77,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_3 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" ) ) @@ -86,7 +86,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_4_0 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" ) ) @@ -95,7 +95,7 @@ public class LocalDataSegmentFinderTest private static final DataSegment SEGMENT_4_1 = DataSegment.builder(SEGMENT_1) .interval( - new Interval( + Intervals.of( "2013-09-02T00:00:00.000Z/2013-09-03T00:00:00.000Z" ) ) diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java index 8240adf2e29..b5eaad7fc9c 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentKillerTest.java @@ -21,9 +21,9 @@ package io.druid.segment.loading; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -103,7 +103,7 @@ public class LocalDataSegmentKillerTest { return new DataSegment( "dataSource", - Interval.parse("2000/3000"), + Intervals.of("2000/3000"), "ver", ImmutableMap.of( "type", "local", diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java index 19fb108ec81..efd5633747a 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java @@ -24,10 +24,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.Files; import com.google.common.primitives.Ints; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -50,15 +50,15 @@ public class LocalDataSegmentPusherTest LocalDataSegmentPusherConfig config; File dataSegmentFiles; DataSegment dataSegment = new DataSegment( - "ds", - new Interval(0, 1), - "v1", - null, - null, - null, - NoneShardSpec.instance(), - null, - -1 + "ds", + Intervals.utc(0, 1), + "v1", + null, + null, + null, + NoneShardSpec.instance(), + null, + -1 ); @Before diff --git a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java index cdb72409fa1..ed10455f337 100644 --- a/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java +++ b/server/src/test/java/io/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java @@ -27,11 +27,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.metamx.emitter.EmittingLogger; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.segment.TestHelper; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -375,7 +375,7 @@ public class SegmentLoaderLocalCacheManagerTest { return DataSegment.builder() .dataSource("test_segment_loader") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec( ImmutableMap.of( "type", diff --git a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java index a3a77f67b8f..241edd8f1f6 100644 --- a/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java +++ b/server/src/test/java/io/druid/segment/loading/StorageLocationTest.java @@ -20,8 +20,8 @@ package io.druid.segment.loading; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -77,7 +77,7 @@ public class StorageLocationTest { return new DataSegment( "test", - new Interval(intervalString), + Intervals.of(intervalString), "1", ImmutableMap.of(), Arrays.asList("d"), diff --git a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java index 49c11be5b07..90b1e66f4b2 100644 --- a/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/io/druid/segment/realtime/RealtimeManagerTest.java @@ -38,7 +38,9 @@ import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.InputRowParser; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.BaseQuery; @@ -102,10 +104,10 @@ public class RealtimeManagerTest private static QueryRunnerFactoryConglomerate conglomerate; private static final List rows = Arrays.asList( - makeRow(new DateTime("9000-01-01").getMillis()), + makeRow(DateTimes.of("9000-01-01").getMillis()), makeRow(new ParseException("parse error")), null, - makeRow(new DateTime().getMillis()) + makeRow(System.currentTimeMillis()) ); private RealtimeManager realtimeManager; @@ -212,10 +214,10 @@ public class RealtimeManagerTest null ); plumber = new TestPlumber(new Sink( - new Interval("0/P5000Y"), + Intervals.of("0/P5000Y"), schema, tuningConfig.getShardSpec(), - new DateTime().toString(), + DateTimes.nowUtc().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() )); @@ -232,10 +234,10 @@ public class RealtimeManagerTest EasyMock.createNiceMock(DataSegmentServerAnnouncer.class) ); plumber2 = new TestPlumber(new Sink( - new Interval("0/P5000Y"), + Intervals.of("0/P5000Y"), schema2, tuningConfig.getShardSpec(), - new DateTime().toString(), + DateTimes.nowUtc().toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() )); @@ -565,7 +567,7 @@ public class RealtimeManagerTest query, ImmutableList.of( new SegmentDescriptor( - new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", 0 )) @@ -580,7 +582,7 @@ public class RealtimeManagerTest query, ImmutableList.of( new SegmentDescriptor( - new Interval("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), + Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z"), "ver", 1 )) @@ -650,8 +652,8 @@ public class RealtimeManagerTest Thread.sleep(10); } - final Interval interval_26_28 = new Interval("2011-03-26T00:00:00.000Z/2011-03-28T00:00:00.000Z"); - final Interval interval_28_29 = new Interval("2011-03-28T00:00:00.000Z/2011-03-29T00:00:00.000Z"); + final Interval interval_26_28 = Intervals.of("2011-03-26T00:00:00.000Z/2011-03-28T00:00:00.000Z"); + final Interval interval_28_29 = Intervals.of("2011-03-28T00:00:00.000Z/2011-03-29T00:00:00.000Z"); final SegmentDescriptor descriptor_26_28_0 = new SegmentDescriptor(interval_26_28, "ver0", 0); final SegmentDescriptor descriptor_28_29_0 = new SegmentDescriptor(interval_28_29, "ver1", 0); final SegmentDescriptor descriptor_26_28_1 = new SegmentDescriptor(interval_26_28, "ver0", 1); @@ -799,7 +801,7 @@ public class RealtimeManagerTest @Override public DateTime getTimestamp() { - return new DateTime(timestamp); + return DateTimes.utc(timestamp); } @Override diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java index af2aedecd44..e1237ef1696 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverFailTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Query; @@ -44,7 +45,6 @@ import io.druid.segment.realtime.appenderator.AppenderatorDriverTest.TestSegment import io.druid.segment.realtime.appenderator.AppenderatorDriverTest.TestSegmentHandoffNotifierFactory; import io.druid.timeline.DataSegment; import org.hamcrest.CoreMatchers; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; @@ -73,17 +73,17 @@ public class AppenderatorDriverFailTest private static final List ROWS = ImmutableList.of( new MapBasedInputRow( - new DateTime("2000"), + DateTimes.of("2000"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", "1") ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", 2.0) ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of("dim2", "bar", "met1", 2.0) ) diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java index 46c75009483..d6430b6acff 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java @@ -34,6 +34,8 @@ import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -47,7 +49,6 @@ import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.PartitionChunk; import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -77,17 +78,17 @@ public class AppenderatorDriverTest private static final List ROWS = Arrays.asList( new MapBasedInputRow( - new DateTime("2000"), + DateTimes.of("2000"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", "1") ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim1"), ImmutableMap.of("dim1", "foo", "met1", 2.0) ), new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of("dim2", "bar", "met1", 2.0) ) @@ -145,8 +146,8 @@ public class AppenderatorDriverTest Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), - new SegmentIdentifier(DATA_SOURCE, new Interval("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)), + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); @@ -164,7 +165,7 @@ public class AppenderatorDriverTest for (int i = 0; i < numSegments * MAX_ROWS_PER_SEGMENT; i++) { committerSupplier.setMetadata(i + 1); InputRow row = new MapBasedInputRow( - new DateTime("2000T01"), + DateTimes.of("2000T01"), ImmutableList.of("dim2"), ImmutableMap.of( "dim2", @@ -236,7 +237,7 @@ public class AppenderatorDriverTest Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); @@ -259,7 +260,7 @@ public class AppenderatorDriverTest ImmutableSet.of( // The second and third rows have the same dataSource, interval, and version, but different shardSpec of // different partitionNum - new SegmentIdentifier(DATA_SOURCE, new Interval("2000T01/PT1H"), VERSION, new NumberedShardSpec(i - 1, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(i - 1, 0)) ), asIdentifiers(segmentsAndMetadata.getSegments()) ); @@ -322,14 +323,14 @@ public class AppenderatorDriverTest Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(handedoffFromSequence0.getSegments()) ); Assert.assertEquals( ImmutableSet.of( - new SegmentIdentifier(DATA_SOURCE, new Interval("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) + new SegmentIdentifier(DATA_SOURCE, Intervals.of("2000T01/PT1H"), VERSION, new NumberedShardSpec(0, 0)) ), asIdentifiers(handedoffFromSequence1.getSegments()) ); @@ -417,14 +418,15 @@ public class AppenderatorDriverTest ) throws IOException { synchronized (counters) { - final long timestampTruncated = granularity.bucketStart(row.getTimestamp()).getMillis(); + DateTime dateTimeTruncated = granularity.bucketStart(row.getTimestamp()); + final long timestampTruncated = dateTimeTruncated.getMillis(); if (!counters.containsKey(timestampTruncated)) { counters.put(timestampTruncated, new AtomicInteger()); } final int partitionNum = counters.get(timestampTruncated).getAndIncrement(); return new SegmentIdentifier( dataSource, - granularity.bucket(new DateTime(timestampTruncated)), + granularity.bucket(dateTimeTruncated), VERSION, new NumberedShardSpec(partitionNum, 0) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java index f678f64b955..10d314eed88 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorTest.java @@ -28,6 +28,8 @@ import com.google.common.collect.Lists; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; @@ -43,8 +45,6 @@ import io.druid.segment.indexing.RealtimeTuningConfig; import io.druid.segment.realtime.plumber.Committers; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -303,7 +303,7 @@ public class AppenderatorTest // Query1: 2000/2001 final TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) - .intervals(ImmutableList.of(new Interval("2000/2001"))) + .intervals(ImmutableList.of(Intervals.of("2000/2001"))) .aggregators( Arrays.asList( new LongSumAggregatorFactory("count", "count"), @@ -319,7 +319,7 @@ public class AppenderatorTest "query1", ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ) ), @@ -329,7 +329,7 @@ public class AppenderatorTest // Query2: 2000/2002 final TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) - .intervals(ImmutableList.of(new Interval("2000/2002"))) + .intervals(ImmutableList.of(Intervals.of("2000/2002"))) .aggregators( Arrays.asList( new LongSumAggregatorFactory("count", "count"), @@ -345,11 +345,11 @@ public class AppenderatorTest "query2", ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) ) ), @@ -359,7 +359,7 @@ public class AppenderatorTest // Query3: 2000/2001T01 final TimeseriesQuery query3 = Druids.newTimeseriesQueryBuilder() .dataSource(AppenderatorTester.DATASOURCE) - .intervals(ImmutableList.of(new Interval("2000/2001T01"))) + .intervals(ImmutableList.of(Intervals.of("2000/2001T01"))) .aggregators( Arrays.asList( new LongSumAggregatorFactory("count", "count"), @@ -374,11 +374,11 @@ public class AppenderatorTest Assert.assertEquals( ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) ) ), @@ -390,8 +390,8 @@ public class AppenderatorTest .dataSource(AppenderatorTester.DATASOURCE) .intervals( ImmutableList.of( - new Interval("2000/2001T01"), - new Interval("2001T03/2001T04") + Intervals.of("2000/2001T01"), + Intervals.of("2001T03/2001T04") ) ) .aggregators( @@ -408,11 +408,11 @@ public class AppenderatorTest Assert.assertEquals( ImmutableList.of( new Result<>( - new DateTime("2000"), + DateTimes.of("2000"), new TimeseriesResultValue(ImmutableMap.of("count", 3L, "met", 7L)) ), new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) ) ), @@ -465,7 +465,7 @@ public class AppenderatorTest "query1", ImmutableList.of( new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 4L, "met", 120L)) ) ), @@ -486,7 +486,7 @@ public class AppenderatorTest new MultipleSpecificSegmentSpec( ImmutableList.of( new SegmentDescriptor( - new Interval("2001/PT1H"), + Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum() ) @@ -501,7 +501,7 @@ public class AppenderatorTest "query2", ImmutableList.of( new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 1L, "met", 8L)) ) ), @@ -522,12 +522,12 @@ public class AppenderatorTest new MultipleSpecificSegmentSpec( ImmutableList.of( new SegmentDescriptor( - new Interval("2001/PT1H"), + Intervals.of("2001/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum() ), new SegmentDescriptor( - new Interval("2001T03/PT1H"), + Intervals.of("2001T03/PT1H"), IDENTIFIERS.get(2).getVersion(), IDENTIFIERS.get(2).getShardSpec().getPartitionNum() ) @@ -542,7 +542,7 @@ public class AppenderatorTest "query2", ImmutableList.of( new Result<>( - new DateTime("2001"), + DateTimes.of("2001"), new TimeseriesResultValue(ImmutableMap.of("count", 2L, "met", 72L)) ) ), @@ -555,7 +555,7 @@ public class AppenderatorTest { return new SegmentIdentifier( AppenderatorTester.DATASOURCE, - new Interval(interval), + Intervals.of(interval), version, new LinearShardSpec(partitionNum) ); @@ -564,7 +564,7 @@ public class AppenderatorTest static InputRow IR(String ts, String dim, long met) { return new MapBasedInputRow( - new DateTime(ts).getMillis(), + DateTimes.of(ts).getMillis(), ImmutableList.of("dim"), ImmutableMap.of( "dim", diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java index 29c5f3ba4a3..abb086b295c 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/CommittedTest.java @@ -23,8 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -36,21 +36,21 @@ public class CommittedTest private static final SegmentIdentifier IDENTIFIER_OBJECT1 = new SegmentIdentifier( "foo", - new Interval("2000/2001"), + Intervals.of("2000/2001"), "2000", new LinearShardSpec(1) ); private static final SegmentIdentifier IDENTIFIER_OBJECT2 = new SegmentIdentifier( "foo", - new Interval("2001/2002"), + Intervals.of("2001/2002"), "2001", new LinearShardSpec(1) ); private static final SegmentIdentifier IDENTIFIER_OBJECT3 = new SegmentIdentifier( "foo", - new Interval("2001/2002"), + Intervals.of("2001/2002"), "2001", new LinearShardSpec(2) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java index a286515a3f9..f2ebc839d64 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactoryTest.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.guice.GuiceInjectors; import io.druid.initialization.Initialization; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DruidProcessingConfig; import io.druid.query.aggregation.AggregatorFactory; @@ -44,7 +45,6 @@ import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.realtime.FireDepartmentMetrics; import io.druid.segment.realtime.plumber.Committers; import io.druid.timeline.partition.LinearShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -157,7 +157,7 @@ public class DefaultOfflineAppenderatorFactoryTest Assert.assertEquals(null, appenderator.startJob()); SegmentIdentifier identifier = new SegmentIdentifier( "dataSourceName", - new Interval("2000/2001"), + Intervals.of("2000/2001"), "A", new LinearShardSpec(0) ); diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java index 79571ef6f7e..d3bc406c0e9 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/SegmentIdentifierTest.java @@ -21,6 +21,7 @@ package io.druid.segment.realtime.appenderator; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.timeline.partition.NumberedShardSpec; import org.joda.time.Interval; import org.junit.Assert; @@ -29,7 +30,7 @@ import org.junit.Test; public class SegmentIdentifierTest { private static final String DATA_SOURCE = "foo"; - private static final Interval INTERVAL = new Interval("2000/PT1H"); + private static final Interval INTERVAL = Intervals.of("2000/PT1H"); private static final String VERSION = "v1"; private static final NumberedShardSpec SHARD_SPEC_0 = new NumberedShardSpec(0, 2); private static final NumberedShardSpec SHARD_SPEC_1 = new NumberedShardSpec(1, 2); diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java index 4b2b106d8f2..94c87c953c3 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/EventReceiverFirehoseTest.java @@ -27,12 +27,12 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.MapInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.server.metrics.EventReceiverFirehoseMetric; import io.druid.server.metrics.EventReceiverFirehoseRegister; import org.apache.commons.io.IOUtils; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -228,7 +228,7 @@ public class EventReceiverFirehoseTest @Test(timeout = 40_000L) public void testShutdownWithPrevTime() throws Exception { - firehose.shutdown(DateTime.now().minusMinutes(2).toString()); + firehose.shutdown(DateTimes.nowUtc().minusMinutes(2).toString()); while (!firehose.isClosed()) { Thread.sleep(50); } @@ -237,7 +237,7 @@ public class EventReceiverFirehoseTest @Test(timeout = 40_000L) public void testShutdown() throws Exception { - firehose.shutdown(DateTime.now().plusMillis(100).toString()); + firehose.shutdown(DateTimes.nowUtc().plusMillis(100).toString()); while (!firehose.isClosed()) { Thread.sleep(50); } diff --git a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java index bf3a4780567..0d983788f39 100644 --- a/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java +++ b/server/src/test/java/io/druid/segment/realtime/firehose/IngestSegmentFirehoseTest.java @@ -31,6 +31,8 @@ import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.hll.HyperLogLogCollector; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -45,8 +47,6 @@ import io.druid.segment.TestHelper; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.incremental.IncrementalIndexStorageAdapter; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -127,7 +127,7 @@ public class IngestSegmentFirehoseTest final InputRow row = firehose.nextRow(); Assert.assertNotNull(row); if (count == 0) { - Assert.assertEquals(new DateTime("2014-10-22T00Z"), row.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-22T00Z"), row.getTimestamp()); Assert.assertEquals("host1", row.getRaw("host")); Assert.assertEquals("0,1", row.getRaw("spatial")); Assert.assertEquals(10L, row.getRaw("visited_sum")); @@ -148,14 +148,14 @@ public class IngestSegmentFirehoseTest // Do a spatial filter final IngestSegmentFirehose firehose2 = new IngestSegmentFirehose( - ImmutableList.of(new WindowedStorageAdapter(queryable, new Interval("2000/3000"))), + ImmutableList.of(new WindowedStorageAdapter(queryable, Intervals.of("2000/3000"))), ImmutableList.of("host", "spatial"), ImmutableList.of("visited_sum", "unique_hosts"), new SpatialDimFilter("spatial", new RadiusBound(new float[]{1, 0}, 0.1f)) ); final InputRow row = firehose2.nextRow(); Assert.assertFalse(firehose2.hasMore()); - Assert.assertEquals(new DateTime("2014-10-22T00Z"), row.getTimestamp()); + Assert.assertEquals(DateTimes.of("2014-10-22T00Z"), row.getTimestamp()); Assert.assertEquals("host2", row.getRaw("host")); Assert.assertEquals("1,0", row.getRaw("spatial")); Assert.assertEquals(40L, row.getRaw("visited_sum")); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java index bc40f3dcb41..b328add2de7 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/CoordinatorBasedSegmentHandoffNotifierTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableSegmentLoadInfo; import io.druid.client.coordinator.CoordinatorClient; +import io.druid.java.util.common.Intervals; import io.druid.query.SegmentDescriptor; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; @@ -53,7 +54,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffCallbackNotCalled() throws IOException, InterruptedException { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); SegmentDescriptor descriptor = new SegmentDescriptor( @@ -110,7 +111,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffCallbackCalled() throws IOException, InterruptedException { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); SegmentDescriptor descriptor = new SegmentDescriptor( @@ -168,7 +169,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffChecksForVersion() { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertFalse( @@ -212,7 +213,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffChecksForAssignableServer() { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertTrue( @@ -243,7 +244,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest @Test public void testHandoffChecksForPartitionNumber() { - Interval interval = new Interval( + Interval interval = Intervals.of( "2011-04-01/2011-04-02" ); Assert.assertTrue( @@ -281,7 +282,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest Lists.newArrayList( new ImmutableSegmentLoadInfo( createSegment( - new Interval( + Intervals.of( "2011-04-01/2011-04-02" ), "v1", 1 ), @@ -289,7 +290,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest ) ), new SegmentDescriptor( - new Interval( + Intervals.of( "2011-04-01/2011-04-03" ), "v1", 1 ) @@ -301,7 +302,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest Lists.newArrayList( new ImmutableSegmentLoadInfo( createSegment( - new Interval( + Intervals.of( "2011-04-01/2011-04-04" ), "v1", 1 ), @@ -309,7 +310,7 @@ public class CoordinatorBasedSegmentHandoffNotifierTest ) ), new SegmentDescriptor( - new Interval( + Intervals.of( "2011-04-02/2011-04-03" ), "v1", 1 ) diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/CustomVersioningPolicyTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/CustomVersioningPolicyTest.java new file mode 100644 index 00000000000..4a909d36a80 --- /dev/null +++ b/server/src/test/java/io/druid/segment/realtime/plumber/CustomVersioningPolicyTest.java @@ -0,0 +1,48 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.segment.realtime.plumber; + +import io.druid.TestUtil; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Test; + +public class CustomVersioningPolicyTest +{ + + @Test + public void testSerialization() throws Exception + { + Interval interval = new Interval(DateTime.now(DateTimeZone.UTC), DateTime.now(DateTimeZone.UTC)); + String version = "someversion"; + + CustomVersioningPolicy policy = new CustomVersioningPolicy(version); + + CustomVersioningPolicy serialized = TestUtil.MAPPER.readValue( + TestUtil.MAPPER.writeValueAsBytes(policy), + CustomVersioningPolicy.class + ); + + Assert.assertEquals(version, policy.getVersion(interval)); + Assert.assertEquals(version, serialized.getVersion(interval)); + } +} diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java index 74feccfb9e1..1a3d8ac65ad 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/IntervalStartVersioningPolicyTest.java @@ -19,7 +19,7 @@ package io.druid.segment.realtime.plumber; -import org.joda.time.Interval; +import io.druid.java.util.common.Intervals; import org.junit.Assert; import org.junit.Test; @@ -31,7 +31,7 @@ public class IntervalStartVersioningPolicyTest public void testGetVersion() throws Exception { IntervalStartVersioningPolicy policy = new IntervalStartVersioningPolicy(); - String version = policy.getVersion(new Interval("2013-01-01/2013-01-02")); + String version = policy.getVersion(Intervals.of("2013-01-01/2013-01-02")); Assert.assertEquals("2013-01-01T00:00:00.000Z", version); } } diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java index 8fa35b7b12d..f4dfc672346 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactoryTest.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.Assert; @@ -34,7 +35,7 @@ public class MessageTimeRejectionPolicyFactoryTest Period period = new Period("PT10M"); RejectionPolicy rejectionPolicy = new MessageTimeRejectionPolicyFactory().create(period); - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); DateTime past = now.minus(period).minus(1); DateTime future = now.plus(period).plus(1); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index 640f16b1188..414dd897738 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -36,6 +36,8 @@ import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.DefaultQueryRunnerFactoryConglomerate; import io.druid.query.Query; @@ -256,10 +258,10 @@ public class RealtimePlumberSchoolTest .put( 0L, new Sink( - new Interval(0, TimeUnit.HOURS.toMillis(1)), + Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(), - new DateTime("2014-12-01T12:34:56.789").toString(), + DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() ) @@ -303,10 +305,10 @@ public class RealtimePlumberSchoolTest .put( 0L, new Sink( - new Interval(0, TimeUnit.HOURS.toMillis(1)), + Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(), - new DateTime("2014-12-01T12:34:56.789").toString(), + DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() ) @@ -353,7 +355,7 @@ public class RealtimePlumberSchoolTest private void testPersistHydrantGapsHelper(final Object commitMetadata) throws Exception { - Interval testInterval = new Interval(new DateTime("1970-01-01"), new DateTime("1971-01-01")); + Interval testInterval = new Interval(DateTimes.of("1970-01-01"), DateTimes.of("1971-01-01")); RealtimePlumber plumber2 = (RealtimePlumber) realtimePlumberSchool.findPlumber(schema2, tuningConfig, metrics); plumber2.getSinks() @@ -363,7 +365,7 @@ public class RealtimePlumberSchoolTest testInterval, schema2, tuningConfig.getShardSpec(), - new DateTime("2014-12-01T12:34:56.789").toString(), + DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), tuningConfig.isReportParseExceptions() ) @@ -418,8 +420,8 @@ public class RealtimePlumberSchoolTest List hydrants = Lists.newArrayList(sinks.get(new Long(0))); - DateTime startTime = new DateTime("1970-01-01T00:00:00.000Z"); - Interval expectedInterval = new Interval(startTime, new DateTime("1971-01-01T00:00:00.000Z")); + DateTime startTime = DateTimes.of("1970-01-01T00:00:00.000Z"); + Interval expectedInterval = new Interval(startTime, DateTimes.of("1971-01-01T00:00:00.000Z")); Assert.assertEquals(0, hydrants.get(0).getCount()); Assert.assertEquals( expectedInterval, @@ -580,13 +582,13 @@ public class RealtimePlumberSchoolTest @Override public long getTimestampFromEpoch() { - return new DateTime(timeStr).getMillis(); + return DateTimes.of(timeStr).getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime(timeStr); + return DateTimes.of(timeStr); } @Override @@ -640,13 +642,13 @@ public class RealtimePlumberSchoolTest @Override public long getTimestampFromEpoch() { - return new DateTime(timeStr).getMillis(); + return DateTimes.of(timeStr).getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime(timeStr); + return DateTimes.of(timeStr); } @Override diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java index c7324f64306..7eab7ed587a 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactoryTest.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import org.joda.time.Period; import org.junit.Assert; @@ -35,7 +36,7 @@ public class ServerTimeRejectionPolicyFactoryTest RejectionPolicy rejectionPolicy = new ServerTimeRejectionPolicyFactory().create(period); - DateTime now = new DateTime(); + DateTime now = DateTimes.nowUtc(); DateTime past = now.minus(period).minus(100); DateTime future = now.plus(period).plus(100); diff --git a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java index c93323da67e..b84d154b2f0 100644 --- a/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/io/druid/segment/realtime/plumber/SinkTest.java @@ -24,6 +24,8 @@ import com.google.common.collect.Lists; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; @@ -54,8 +56,8 @@ public class SinkTest new DefaultObjectMapper() ); - final Interval interval = new Interval("2013-01-01/2013-01-02"); - final String version = new DateTime().toString(); + final Interval interval = Intervals.of("2013-01-01/2013-01-02"); + final String version = DateTimes.nowUtc().toString(); RealtimeTuningConfig tuningConfig = new RealtimeTuningConfig( 100, new Period("P1Y"), @@ -94,13 +96,13 @@ public class SinkTest @Override public long getTimestampFromEpoch() { - return new DateTime("2013-01-01").getMillis(); + return DateTimes.of("2013-01-01").getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime("2013-01-01"); + return DateTimes.of("2013-01-01"); } @Override @@ -142,7 +144,7 @@ public class SinkTest ); FireHydrant currHydrant = sink.getCurrHydrant(); - Assert.assertEquals(new Interval("2013-01-01/PT1M"), currHydrant.getIndex().getInterval()); + Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), currHydrant.getIndex().getInterval()); FireHydrant swapHydrant = sink.swap(); @@ -159,13 +161,13 @@ public class SinkTest @Override public long getTimestampFromEpoch() { - return new DateTime("2013-01-01").getMillis(); + return DateTimes.of("2013-01-01").getMillis(); } @Override public DateTime getTimestamp() { - return new DateTime("2013-01-01"); + return DateTimes.of("2013-01-01"); } @Override @@ -208,7 +210,7 @@ public class SinkTest Assert.assertEquals(currHydrant, swapHydrant); Assert.assertNotSame(currHydrant, sink.getCurrHydrant()); - Assert.assertEquals(new Interval("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval()); + Assert.assertEquals(Intervals.of("2013-01-01/PT1M"), sink.getCurrHydrant().getIndex().getInterval()); Assert.assertEquals(2, Iterators.size(sink.iterator())); } diff --git a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java index 60b5ffd28b8..a3ca079f9cc 100644 --- a/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java +++ b/server/src/test/java/io/druid/server/AsyncQueryForwardingServletTest.java @@ -29,7 +29,6 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.servlet.GuiceFilter; - import io.druid.common.utils.SocketUtil; import io.druid.guice.GuiceInjectors; import io.druid.guice.Jerseys; @@ -108,7 +107,9 @@ public class AsyncQueryForwardingServletTest extends BaseJettyTest public void configure(Binder binder) { JsonConfigProvider.bindInstance( - binder, Key.get(DruidNode.class, Self.class), new DruidNode("test", "localhost", null, null, new ServerConfig()) + binder, + Key.get(DruidNode.class, Self.class), + new DruidNode("test", "localhost", null, null, new ServerConfig()) ); binder.bind(JettyServerInitializer.class).to(ProxyJettyServerInit.class).in(LazySingleton.class); Jerseys.addResource(binder, SlowResource.class); @@ -197,24 +198,24 @@ public class AsyncQueryForwardingServletTest extends BaseJettyTest final QueryHostFinder hostFinder = new QueryHostFinder(null) { @Override - public String getHost(Query query) + public io.druid.client.selector.Server getServer(Query query) { - return "localhost:" + node.getPlaintextPort(); + return new TestServer("http", "localhost", node.getPlaintextPort()); } @Override - public String getDefaultHost() + public io.druid.client.selector.Server getDefaultServer() { - return "localhost:" + node.getPlaintextPort(); + return new TestServer("http", "localhost", node.getPlaintextPort()); } @Override - public Collection getAllHosts() + public Collection getAllServers() { return ImmutableList.of( - "localhost:" + node.getPlaintextPort(), - "localhost:" + port1, - "localhost:" + port2 + new TestServer("http", "localhost", node.getPlaintextPort()), + new TestServer("http", "localhost", port1), + new TestServer("http", "localhost", port2) ); } }; @@ -241,9 +242,9 @@ public class AsyncQueryForwardingServletTest extends BaseJettyTest ) { @Override - protected URI rewriteURI(HttpServletRequest request, String host) + protected URI rewriteURI(HttpServletRequest request, String scheme, String host) { - String uri = super.rewriteURI(request, host).toString(); + String uri = super.rewriteURI(request, scheme, host).toString(); if (uri.contains("/druid/v2")) { return URI.create(uri.replace("/druid/v2", "/default")); } @@ -272,7 +273,7 @@ public class AsyncQueryForwardingServletTest extends BaseJettyTest // test params Assert.assertEquals( new URI("http://localhost:1234/some/path?param=1"), - AsyncQueryForwardingServlet.makeURI("localhost:1234", "/some/path", "param=1") + AsyncQueryForwardingServlet.makeURI("http", "localhost:1234", "/some/path", "param=1") ); // HttpServletRequest.getQueryString returns encoded form @@ -280,6 +281,7 @@ public class AsyncQueryForwardingServletTest extends BaseJettyTest Assert.assertEquals( "http://[2a00:1450:4007:805::1007]:1234/some/path?param=1¶m2=%E2%82%AC", AsyncQueryForwardingServlet.makeURI( + "http", HostAndPort.fromParts("2a00:1450:4007:805::1007", 1234).toString(), "/some/path", "param=1¶m2=%E2%82%AC" @@ -289,7 +291,46 @@ public class AsyncQueryForwardingServletTest extends BaseJettyTest // test null query Assert.assertEquals( new URI("http://localhost/"), - AsyncQueryForwardingServlet.makeURI("localhost", "/", null) + AsyncQueryForwardingServlet.makeURI("http", "localhost", "/", null) ); } + + private static class TestServer implements io.druid.client.selector.Server + { + + private final String scheme; + private final String address; + private final int port; + + public TestServer(String scheme, String address, int port) + { + this.scheme = scheme; + this.address = address; + this.port = port; + } + + @Override + public String getScheme() + { + return scheme; + } + + @Override + public String getHost() + { + return address + ":" + port; + } + + @Override + public String getAddress() + { + return address; + } + + @Override + public int getPort() + { + return port; + } + } } diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index aacf8a3ac94..a397c749171 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -46,6 +46,7 @@ import io.druid.client.TimelineServerView; import io.druid.client.selector.HighestPriorityTierSelectorStrategy; import io.druid.client.selector.RandomServerSelectorStrategy; import io.druid.client.selector.ServerSelector; +import io.druid.java.util.common.Intervals; import io.druid.query.TableDataSource; import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.server.coordination.ServerType; @@ -57,7 +58,7 @@ import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; import org.easymock.EasyMock; import org.joda.time.DateTime; -import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -69,7 +70,7 @@ public class ClientInfoResourceTest { private static final String KEY_DIMENSIONS = "dimensions"; private static final String KEY_METRICS = "metrics"; - private static final DateTime FIXED_TEST_TIME = new DateTime(2015, 9, 14, 0, 0); /* always use the same current time for unit tests */ + private static final DateTime FIXED_TEST_TIME = new DateTime(2015, 9, 14, 0, 0, ISOChronology.getInstanceUTC()); /* always use the same current time for unit tests */ private final String dataSource = "test-data-source"; @@ -373,7 +374,7 @@ public class ClientInfoResourceTest { DataSegment segment = DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) @@ -381,7 +382,7 @@ public class ClientInfoResourceTest .build(); server.addDataSegment(segment.getIdentifier(), segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); - timeline.add(new Interval(interval), version, new SingleElementPartitionChunk(ss)); + timeline.add(Intervals.of(interval), version, new SingleElementPartitionChunk(ss)); } private void addSegmentWithShardSpec( @@ -396,7 +397,7 @@ public class ClientInfoResourceTest { DataSegment segment = DataSegment.builder() .dataSource(dataSource) - .interval(new Interval(interval)) + .interval(Intervals.of(interval)) .version(version) .dimensions(dims) .metrics(metrics) @@ -405,7 +406,7 @@ public class ClientInfoResourceTest .build(); server.addDataSegment(segment.getIdentifier(), segment); ServerSelector ss = new ServerSelector(segment, new HighestPriorityTierSelectorStrategy(new RandomServerSelectorStrategy())); - timeline.add(new Interval(interval), version, shardSpec.createChunk(ss)); + timeline.add(Intervals.of(interval), version, shardSpec.createChunk(ss)); } private ClientInfoResource getResourceTestHelper( diff --git a/server/src/test/java/io/druid/server/SegmentManagerTest.java b/server/src/test/java/io/druid/server/SegmentManagerTest.java index 3176b00368a..ef6aab8b55d 100644 --- a/server/src/test/java/io/druid/server/SegmentManagerTest.java +++ b/server/src/test/java/io/druid/server/SegmentManagerTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.segment.AbstractSegment; import io.druid.segment.QueryableIndex; @@ -148,9 +149,9 @@ public class SegmentManagerTest private static final List segments = ImmutableList.of( new DataSegment( "small_source", - new Interval("0/1000"), + Intervals.of("0/1000"), "0", - ImmutableMap.of("interval", new Interval("0/1000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("0/1000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -159,9 +160,9 @@ public class SegmentManagerTest ), new DataSegment( "small_source", - new Interval("1000/2000"), + Intervals.of("1000/2000"), "0", - ImmutableMap.of("interval", new Interval("1000/2000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -170,9 +171,9 @@ public class SegmentManagerTest ), new DataSegment( "large_source", - new Interval("0/1000"), + Intervals.of("0/1000"), "0", - ImmutableMap.of("interval", new Interval("0/1000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("0/1000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -181,9 +182,9 @@ public class SegmentManagerTest ), new DataSegment( "large_source", - new Interval("1000/2000"), + Intervals.of("1000/2000"), "0", - ImmutableMap.of("interval", new Interval("1000/2000"), "version", 0), + ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 0), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), @@ -193,9 +194,9 @@ public class SegmentManagerTest // overshadowing the ahead segment new DataSegment( "large_source", - new Interval("1000/2000"), + Intervals.of("1000/2000"), "1", - ImmutableMap.of("interval", new Interval("1000/2000"), "version", 1), + ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 1), Lists.newArrayList(), Lists.newArrayList(), NoneShardSpec.instance(), diff --git a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java index f0edf608751..896ada8780e 100644 --- a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java +++ b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java @@ -24,11 +24,11 @@ import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.metadata.TestDerbyConnector; import io.druid.server.metrics.NoopServiceEmitter; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -76,7 +76,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); ObjectMapper mapper = new DefaultObjectMapper(); AuditEntry serde = mapper.readValue(mapper.writeValueAsString(entry), AuditEntry.class); @@ -95,7 +95,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); auditManager.doAudit(entry); byte[] payload = connector.lookup( @@ -121,16 +121,14 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); auditManager.doAudit(entry); auditManager.doAudit(entry); List auditEntries = auditManager.fetchAuditHistory( "testKey", "testType", - new Interval( - "2012-01-01T00:00:00Z/2013-01-03T00:00:00Z" - ) + Intervals.of("2012-01-01T00:00:00Z/2013-01-03T00:00:00Z") ); Assert.assertEquals(2, auditEntries.size()); Assert.assertEquals(entry, auditEntries.get(0)); @@ -149,7 +147,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey2", @@ -160,7 +158,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); auditManager.doAudit(entry1); auditManager.doAudit(entry2); @@ -185,7 +183,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -196,7 +194,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry3 = new AuditEntry( "testKey", @@ -207,7 +205,7 @@ public class SQLAuditManagerTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-03T00:00:00Z") + DateTimes.of("2013-01-03T00:00:00Z") ); auditManager.doAudit(entry1); auditManager.doAudit(entry2); diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java index 76c59048c1a..f7c970fc2d1 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestDropTest.java @@ -19,10 +19,11 @@ package io.druid.server.coordination; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -42,7 +43,7 @@ public class SegmentChangeRequestDropTest { ObjectMapper mapper = new DefaultObjectMapper(); - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -60,7 +61,7 @@ public class SegmentChangeRequestDropTest final SegmentChangeRequestDrop segmentDrop = new SegmentChangeRequestDrop(segment); Map objectMap = mapper.readValue( - mapper.writeValueAsString(segmentDrop), new TypeReference>(){} + mapper.writeValueAsString(segmentDrop), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); Assert.assertEquals(11, objectMap.size()); diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java index ac0340f873c..3c8a68db6a1 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestLoadTest.java @@ -19,10 +19,11 @@ package io.druid.server.coordination; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.jackson.JacksonUtils; +import io.druid.java.util.common.Intervals; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -42,7 +43,7 @@ public class SegmentChangeRequestLoadTest { ObjectMapper mapper = new DefaultObjectMapper(); - final Interval interval = new Interval("2011-10-01/2011-10-02"); + final Interval interval = Intervals.of("2011-10-01/2011-10-02"); final ImmutableMap loadSpec = ImmutableMap.of("something", "or_other"); DataSegment segment = new DataSegment( @@ -60,7 +61,7 @@ public class SegmentChangeRequestLoadTest final SegmentChangeRequestLoad segmentDrop = new SegmentChangeRequestLoad(segment); Map objectMap = mapper.readValue( - mapper.writeValueAsString(segmentDrop), new TypeReference>(){} + mapper.writeValueAsString(segmentDrop), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); Assert.assertEquals(11, objectMap.size()); diff --git a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java index 9412232afcc..33cf26e9dd9 100644 --- a/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/io/druid/server/coordination/ServerManagerTest.java @@ -32,6 +32,7 @@ import io.druid.client.cache.CacheConfig; import io.druid.client.cache.LocalCacheProvider; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.Pair; import io.druid.java.util.common.granularity.Granularities; @@ -157,19 +158,19 @@ public class ServerManagerTest segmentManager ); - loadQueryable("test", "1", new Interval("P1d/2011-04-01")); - loadQueryable("test", "1", new Interval("P1d/2011-04-02")); - loadQueryable("test", "2", new Interval("P1d/2011-04-02")); - loadQueryable("test", "1", new Interval("P1d/2011-04-03")); - loadQueryable("test", "1", new Interval("P1d/2011-04-04")); - loadQueryable("test", "1", new Interval("P1d/2011-04-05")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T01")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T02")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T03")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T05")); - loadQueryable("test", "2", new Interval("PT1h/2011-04-04T06")); - loadQueryable("test2", "1", new Interval("P1d/2011-04-01")); - loadQueryable("test2", "1", new Interval("P1d/2011-04-02")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-01")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-02")); + loadQueryable("test", "2", Intervals.of("P1d/2011-04-02")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-03")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-04")); + loadQueryable("test", "1", Intervals.of("P1d/2011-04-05")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T01")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T02")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T03")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T05")); + loadQueryable("test", "2", Intervals.of("PT1h/2011-04-04T06")); + loadQueryable("test2", "1", Intervals.of("P1d/2011-04-01")); + loadQueryable("test2", "1", Intervals.of("P1d/2011-04-02")); } @Test @@ -178,9 +179,9 @@ public class ServerManagerTest Future future = assertQueryable( Granularities.DAY, "test", - new Interval("P1d/2011-04-01"), + Intervals.of("P1d/2011-04-01"), ImmutableList.>of( - new Pair("1", new Interval("P1d/2011-04-01")) + new Pair("1", Intervals.of("P1d/2011-04-01")) ) ); waitForTestVerificationAndCleanup(future); @@ -188,10 +189,10 @@ public class ServerManagerTest future = assertQueryable( Granularities.DAY, - "test", new Interval("P2d/2011-04-02"), + "test", Intervals.of("P2d/2011-04-02"), ImmutableList.>of( - new Pair("1", new Interval("P1d/2011-04-01")), - new Pair("2", new Interval("P1d/2011-04-02")) + new Pair("1", Intervals.of("P1d/2011-04-01")), + new Pair("2", Intervals.of("P1d/2011-04-02")) ) ); waitForTestVerificationAndCleanup(future); @@ -201,7 +202,7 @@ public class ServerManagerTest public void testDelete1() throws Exception { final String dataSouce = "test"; - final Interval interval = new Interval("2011-04-01/2011-04-02"); + final Interval interval = Intervals.of("2011-04-01/2011-04-02"); Future future = assertQueryable( Granularities.DAY, @@ -226,50 +227,50 @@ public class ServerManagerTest @Test public void testDelete2() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); waitForTestVerificationAndCleanup(future); - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); - dropQueryable("test", "1", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); + dropQueryable("test", "1", Intervals.of("2011-04-04/2011-04-05")); future = assertQueryable( Granularities.HOUR, - "test", new Interval("2011-04-04/2011-04-04T06"), + "test", Intervals.of("2011-04-04/2011-04-04T06"), ImmutableList.>of( - new Pair("2", new Interval("2011-04-04T00/2011-04-04T01")), - new Pair("2", new Interval("2011-04-04T01/2011-04-04T02")), - new Pair("2", new Interval("2011-04-04T02/2011-04-04T03")), - new Pair("2", new Interval("2011-04-04T04/2011-04-04T05")), - new Pair("2", new Interval("2011-04-04T05/2011-04-04T06")) + new Pair("2", Intervals.of("2011-04-04T00/2011-04-04T01")), + new Pair("2", Intervals.of("2011-04-04T01/2011-04-04T02")), + new Pair("2", Intervals.of("2011-04-04T02/2011-04-04T03")), + new Pair("2", Intervals.of("2011-04-04T04/2011-04-04T05")), + new Pair("2", Intervals.of("2011-04-04T05/2011-04-04T06")) ) ); waitForTestVerificationAndCleanup(future); future = assertQueryable( Granularities.HOUR, - "test", new Interval("2011-04-04/2011-04-04T03"), + "test", Intervals.of("2011-04-04/2011-04-04T03"), ImmutableList.>of( - new Pair("2", new Interval("2011-04-04T00/2011-04-04T01")), - new Pair("2", new Interval("2011-04-04T01/2011-04-04T02")), - new Pair("2", new Interval("2011-04-04T02/2011-04-04T03")) + new Pair("2", Intervals.of("2011-04-04T00/2011-04-04T01")), + new Pair("2", Intervals.of("2011-04-04T01/2011-04-04T02")), + new Pair("2", Intervals.of("2011-04-04T02/2011-04-04T03")) ) ); waitForTestVerificationAndCleanup(future); future = assertQueryable( Granularities.HOUR, - "test", new Interval("2011-04-04T04/2011-04-04T06"), + "test", Intervals.of("2011-04-04T04/2011-04-04T06"), ImmutableList.>of( - new Pair("2", new Interval("2011-04-04T04/2011-04-04T05")), - new Pair("2", new Interval("2011-04-04T05/2011-04-04T06")) + new Pair("2", Intervals.of("2011-04-04T04/2011-04-04T05")), + new Pair("2", Intervals.of("2011-04-04T05/2011-04-04T06")) ) ); waitForTestVerificationAndCleanup(future); @@ -278,13 +279,13 @@ public class ServerManagerTest @Test public void testReferenceCounting() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -307,7 +308,7 @@ public class ServerManagerTest queryWaitLatch.countDown(); future.get(); - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); for (SegmentForTesting segmentForTesting : factory.getAdapters()) { Assert.assertTrue(segmentForTesting.isClosed()); @@ -317,13 +318,13 @@ public class ServerManagerTest @Test public void testReferenceCountingWhileQueryExecuting() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -343,7 +344,7 @@ public class ServerManagerTest Assert.assertFalse(segmentForTesting.isClosed()); } - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); for (SegmentForTesting segmentForTesting : factory.getAdapters()) { Assert.assertFalse(segmentForTesting.isClosed()); @@ -360,13 +361,13 @@ public class ServerManagerTest @Test public void testMultipleDrops() throws Exception { - loadQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + loadQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); Future future = assertQueryable( Granularities.DAY, - "test", new Interval("2011-04-04/2011-04-06"), + "test", Intervals.of("2011-04-04/2011-04-06"), ImmutableList.>of( - new Pair("3", new Interval("2011-04-04/2011-04-05")) + new Pair("3", Intervals.of("2011-04-04/2011-04-05")) ) ); @@ -386,8 +387,8 @@ public class ServerManagerTest Assert.assertFalse(segmentForTesting.isClosed()); } - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); - dropQueryable("test", "3", new Interval("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); + dropQueryable("test", "3", Intervals.of("2011-04-04/2011-04-05")); for (SegmentForTesting segmentForTesting : factory.getAdapters()) { Assert.assertFalse(segmentForTesting.isClosed()); diff --git a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java index a1456819540..dc14592b820 100644 --- a/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordination/ZkCoordinatorTest.java @@ -35,6 +35,7 @@ import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.lifecycle.Lifecycle; @@ -269,7 +270,7 @@ public class ZkCoordinatorTest extends CuratorTestBase { zkCoordinator.start(); - final DataSegment segment = makeSegment("test", "1", new Interval("P1d/2011-04-01")); + final DataSegment segment = makeSegment("test", "1", Intervals.of("P1d/2011-04-01")); zkCoordinator.removeSegment(segment, new DataSegmentChangeCallback() { @@ -318,7 +319,7 @@ public class ZkCoordinatorTest extends CuratorTestBase { zkCoordinator.start(); - final DataSegment segment = makeSegment("test", "1", new Interval("P1d/2011-04-01")); + final DataSegment segment = makeSegment("test", "1", Intervals.of("P1d/2011-04-01")); zkCoordinator.addSegment(segment, new DataSegmentChangeCallback() { @@ -371,19 +372,19 @@ public class ZkCoordinatorTest extends CuratorTestBase { List segments = Lists.newLinkedList(); for (int i = 0; i < COUNT; ++i) { - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test" + i, "2", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-03"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-04"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-05"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T01"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T02"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T03"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T05"))); - segments.add(makeSegment("test" + i, "2", new Interval("PT1h/2011-04-04T06"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-03"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-04"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-05"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T01"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T02"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T03"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T05"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("PT1h/2011-04-04T06"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-02"))); } Collections.sort(segments); @@ -532,11 +533,11 @@ public class ZkCoordinatorTest extends CuratorTestBase List segments = Lists.newLinkedList(); for (int i = 0; i < COUNT; ++i) { - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test" + i, "1", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test" + i, "2", new Interval("P1d/2011-04-02"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-01"))); - segments.add(makeSegment("test_two" + i, "1", new Interval("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test" + i, "1", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test" + i, "2", Intervals.of("P1d/2011-04-02"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-01"))); + segments.add(makeSegment("test_two" + i, "1", Intervals.of("P1d/2011-04-02"))); } Collections.sort(segments); diff --git a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java index 6286cbaf650..736c13bf748 100644 --- a/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java @@ -31,6 +31,7 @@ import com.google.common.util.concurrent.MoreExecutors; import io.druid.curator.PotentiallyGzippedCompressionProvider; import io.druid.curator.announcement.Announcer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; import io.druid.server.coordination.BatchDataSegmentAnnouncer; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.SegmentChangeRequestHistory; @@ -43,7 +44,6 @@ import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.test.TestingCluster; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; @@ -353,11 +353,11 @@ public class BatchDataSegmentAnnouncerTest .dataSource("foo") .interval( new Interval( - new DateTime("2013-01-01").plusDays(offset), - new DateTime("2013-01-02").plusDays(offset) + DateTimes.of("2013-01-01").plusDays(offset), + DateTimes.of("2013-01-02").plusDays(offset) ) ) - .version(new DateTime().toString()) + .version(DateTimes.nowUtc().toString()) .dimensions(ImmutableList.of("dim1", "dim2")) .metrics(ImmutableList.of("met1", "met2")) .loadSpec(ImmutableMap.of("type", "local")) diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java index 874b6a745f8..c592a7bc47e 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -22,6 +22,7 @@ package io.druid.server.coordinator; import com.carrotsearch.junitbenchmarks.AbstractBenchmark; import com.carrotsearch.junitbenchmarks.BenchmarkOptions; import com.google.common.util.concurrent.MoreExecutors; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.AfterClass; @@ -86,8 +87,8 @@ public class CostBalancerStrategyBenchmark extends AbstractBenchmark // Benchmark Joda Interval Gap impl vs CostBalancer.gapMillis - private final Interval interval1 = new Interval("2015-01-01T01:00:00Z/2015-01-01T02:00:00Z"); - private final Interval interval2 = new Interval("2015-02-01T01:00:00Z/2015-02-01T02:00:00Z"); + private final Interval interval1 = Intervals.of("2015-01-01T01:00:00Z/2015-01-01T02:00:00Z"); + private final Interval interval2 = Intervals.of("2015-02-01T01:00:00Z/2015-02-01T02:00:00Z"); volatile Long sum; @BenchmarkOptions(warmupRounds = 1000, benchmarkRounds = 1000000) diff --git a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java index 521a33da504..781ddceffd1 100644 --- a/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/CostBalancerStrategyTest.java @@ -25,6 +25,8 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; @@ -40,7 +42,7 @@ import java.util.concurrent.Executors; public class CostBalancerStrategyTest { - private static final Interval day = new Interval("2015-01-01T00/2015-01-01T01"); + private static final Interval day = Intervals.of("2015-01-01T00/2015-01-01T01"); /** * Create Druid cluster with serverCount servers having maxSegments segments each, and 1 server with 98 segment @@ -134,7 +136,7 @@ public class CostBalancerStrategyTest List serverHolderList = setupDummyCluster(10, 20); DataSegment segment = getSegment(1000); - final DateTime referenceTimestamp = new DateTime("2014-01-01"); + final DateTime referenceTimestamp = DateTimes.of("2014-01-01"); BalancerStrategy strategy = new CostBalancerStrategy( MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1)) ); @@ -146,7 +148,7 @@ public class CostBalancerStrategyTest @Test public void testComputeJointSegmentCost() { - DateTime referenceTime = new DateTime("2014-01-01T00:00:00"); + DateTime referenceTime = DateTimes.of("2014-01-01T00:00:00"); CostBalancerStrategy strategy = new CostBalancerStrategy( MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(4)) ); diff --git a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index 1faf9f3eed8..b6e89f69d6b 100644 --- a/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; @@ -39,7 +40,7 @@ import java.util.concurrent.Executors; public class DiskNormalizedCostBalancerStrategyTest { - private static final Interval day = new Interval("2015-01-01T00/2015-01-01T01"); + private static final Interval day = Intervals.of("2015-01-01T00/2015-01-01T01"); /** * Create Druid cluster with serverCount servers having maxSegments segments each, and 1 server with 98 segment diff --git a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java index 97b5dbb9906..18acacaa782 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidClusterTest.java @@ -26,11 +26,11 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -46,7 +46,7 @@ public class DruidClusterTest private static final List segments = ImmutableList.of( new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container1", "blobPath", "blobPath1"), null, @@ -57,7 +57,7 @@ public class DruidClusterTest ), new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container2", "blobPath", "blobPath2"), null, diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java index da55a9dd017..8a67377377f 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java @@ -29,6 +29,7 @@ import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidServer; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.metadata.MetadataRuleManager; import io.druid.server.coordinator.helper.DruidCoordinatorRuleRunner; import io.druid.server.coordinator.rules.PeriodLoadRule; @@ -36,7 +37,6 @@ import io.druid.server.coordinator.rules.Rule; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Before; @@ -97,8 +97,8 @@ public class DruidCoordinatorBalancerProfiler "segment" + i, new DataSegment( "datasource" + i, - new Interval(new DateTime("2012-01-01"), (new DateTime("2012-01-01")).plusHours(1)), - (new DateTime("2012-03-01")).toString(), + new Interval(DateTimes.of("2012-01-01"), (DateTimes.of("2012-01-01")).plusHours(1)), + (DateTimes.of("2012-03-01")).toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -155,7 +155,7 @@ public class DruidCoordinatorBalancerProfiler .withReplicationThrottleLimit(5) .build() ) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withEmitter(emitter) .withDatabaseRuleManager(manager) .withReplicationManager(new ReplicationThrottler(2, 500)) @@ -245,7 +245,7 @@ public class DruidCoordinatorBalancerProfiler MAX_SEGMENTS_TO_MOVE ).build() ) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorBalancerTester tester = new DruidCoordinatorBalancerTester(coordinator); watch.start(); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java index ea6ca8b7921..66a041744ec 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTest.java @@ -27,6 +27,7 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; @@ -84,9 +85,9 @@ public class DruidCoordinatorBalancerTest segment3 = EasyMock.createMock(DataSegment.class); segment4 = EasyMock.createMock(DataSegment.class); - DateTime start1 = new DateTime("2012-01-01"); - DateTime start2 = new DateTime("2012-02-01"); - DateTime version = new DateTime("2012-03-01"); + DateTime start1 = DateTimes.of("2012-01-01"); + DateTime start2 = DateTimes.of("2012-02-01"); + DateTime version = DateTimes.of("2012-03-01"); segment1 = new DataSegment( "datasource1", new Interval(start1, start1.plusHours(1)), @@ -174,14 +175,25 @@ public class DruidCoordinatorBalancerTest // Mock stuff that the coordinator needs mockCoordinator(coordinator); + BalancerStrategy predefinedPickOrderStrategy = new PredefinedPickOrderBalancerStrategy( + balancerStrategy, + ImmutableList.of( + new BalancerSegmentHolder(druidServer1, segment1), + new BalancerSegmentHolder(druidServer1, segment2), + new BalancerSegmentHolder(druidServer1, segment3), + new BalancerSegmentHolder(druidServer1, segment4) + ) + ); + DruidCoordinatorRuntimeParams params = defaullRuntimeParamsBuilder( ImmutableList.of(druidServer1, druidServer2), ImmutableList.of(peon1, peon2) - ).build(); + ) + .withBalancerStrategy(predefinedPickOrderStrategy) + .build(); params = new DruidCoordinatorBalancerTester(coordinator).run(params); - Assert.assertTrue(params.getCoordinatorStats().getTieredStat("movedCount", "normal") > 0); - Assert.assertTrue(params.getCoordinatorStats().getTieredStat("movedCount", "normal") < segments.size()); + Assert.assertEquals(2, params.getCoordinatorStats().getTieredStat("movedCount", "normal")); } @Test @@ -295,7 +307,7 @@ public class DruidCoordinatorBalancerTest ).build() ) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")); + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")); } private void mockDruidServer( @@ -312,7 +324,13 @@ public class DruidCoordinatorBalancerTest EasyMock.expect(druidServer.getCurrSize()).andReturn(currentSize).atLeastOnce(); EasyMock.expect(druidServer.getMaxSize()).andReturn(maxSize).atLeastOnce(); EasyMock.expect(druidServer.getSegments()).andReturn(segments).anyTimes(); - EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + if (!segments.isEmpty()) { + segments.values().forEach( + s -> EasyMock.expect(druidServer.getSegment(s.getIdentifier())).andReturn(s).anyTimes() + ); + } else { + EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + } EasyMock.replay(druidServer); } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java index d5a9547c7e2..c57e24af16e 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java @@ -46,7 +46,7 @@ public class DruidCoordinatorBalancerTester extends DruidCoordinatorBalancer final String segmentName = segmentToMove.getIdentifier(); if (!toPeon.getSegmentsToLoad().contains(segmentToMove) && - !currentlyMovingSegments.get("normal").containsKey(segmentName) && + (toServer.getSegment(segmentName) == null) && new ServerHolder(toServer, toPeon).getAvailableSize() > segmentToMove.getSize()) { log.info( "Moving [%s] from [%s] to [%s]", @@ -65,6 +65,9 @@ public class DruidCoordinatorBalancerTester extends DruidCoordinatorBalancer } }); + final LoadQueuePeon dropPeon = params.getLoadManagementPeons().get(fromServerName); + dropPeon.markSegmentToDrop(segment.getSegment()); + currentlyMovingSegments.get("normal").put(segmentName, segment); } catch (Exception e) { diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index 2ad1723fdc3..a53924c7f47 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -31,6 +31,8 @@ import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceEventBuilder; import io.druid.client.DruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataRuleManager; import io.druid.segment.IndexIO; import io.druid.server.coordination.ServerType; @@ -75,14 +77,14 @@ public class DruidCoordinatorRuleRunnerTest EmittingLogger.registerEmitter(emitter); databaseRuleManager = EasyMock.createMock(MetadataRuleManager.class); - DateTime start = new DateTime("2012-01-01"); + DateTime start = DateTimes.of("2012-01-01"); availableSegments = Lists.newArrayList(); for (int i = 0; i < 24; i++) { availableSegments.add( new DataSegment( "test", new Interval(start, start.plusHours(1)), - new DateTime().toString(), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -123,15 +125,15 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), ImmutableMap.of("hot", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("normal", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("cold", 1) ) )).atLeastOnce(); @@ -206,7 +208,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withDynamicConfigs(new CoordinatorDynamicConfig.Builder().withMaxSegmentsToMove(5).build()) .build(); @@ -241,11 +243,11 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T06:00:00.000Z"), ImmutableMap.of("hot", 2) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("cold", 1) ) ) @@ -316,7 +318,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -349,11 +351,11 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -419,7 +421,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -449,11 +451,11 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -496,7 +498,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); ruleRunner.run(params); @@ -517,7 +519,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-02T00:00:00.000Z/2012-01-03T00:00:00.000Z"), + Intervals.of("2012-01-02T00:00:00.000Z/2012-01-03T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -578,10 +580,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("normal", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -628,7 +630,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -651,10 +653,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("normal", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -716,7 +718,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -742,10 +744,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -811,7 +813,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -835,10 +837,10 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ImmutableMap.of("hot", 1) ), - new IntervalDropRule(new Interval("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) + new IntervalDropRule(Intervals.of("2012-01-01T00:00:00.000Z/2012-01-02T00:00:00.000Z")) ) ).atLeastOnce(); EasyMock.replay(databaseRuleManager); @@ -902,7 +904,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -922,7 +924,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2012-01-01T01:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T01:00:00.000Z"), ImmutableMap.of("normal", 0) ) ) @@ -1007,7 +1009,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -1037,7 +1039,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), ImmutableMap.of("hot", 2) ) ) @@ -1091,7 +1093,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -1103,8 +1105,8 @@ public class DruidCoordinatorRuleRunnerTest DataSegment overFlowSegment = new DataSegment( "test", - new Interval("2012-02-01/2012-02-02"), - new DateTime().toString(), + Intervals.of("2012-02-01/2012-02-02"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -1120,7 +1122,7 @@ public class DruidCoordinatorRuleRunnerTest .withAvailableSegments(Arrays.asList(overFlowSegment)) .withDatabaseRuleManager(databaseRuleManager) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .build() ); @@ -1162,7 +1164,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2013-01-01T00:00:00.000Z"), ImmutableMap.of( "hot", 1, DruidServer.DEFAULT_TIER, 1 @@ -1223,7 +1225,7 @@ public class DruidCoordinatorRuleRunnerTest .withAvailableSegments(availableSegments) .withDatabaseRuleManager(databaseRuleManager) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .build(); @@ -1251,7 +1253,7 @@ public class DruidCoordinatorRuleRunnerTest EasyMock.expect(databaseRuleManager.getRulesWithDefault(EasyMock.anyObject())).andReturn( Lists.newArrayList( new IntervalLoadRule( - new Interval("2012-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z"), + Intervals.of("2012-01-01T00:00:00.000Z/2013-01-02T00:00:00.000Z"), ImmutableMap.of("normal", 1) ) ) @@ -1260,8 +1262,8 @@ public class DruidCoordinatorRuleRunnerTest DataSegment overFlowSegment = new DataSegment( "test", - new Interval("2012-02-01/2012-02-02"), - new DateTime().toString(), + Intervals.of("2012-02-01/2012-02-02"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -1330,7 +1332,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(segmentReplicantLookup) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .build(); DruidCoordinatorRuntimeParams afterParams = ruleRunner.run(params); @@ -1348,7 +1350,7 @@ public class DruidCoordinatorRuleRunnerTest Set availableSegments = new HashSet<>(); DataSegment v1 = new DataSegment( "test", - new Interval("2012-01-01/2012-01-02"), + Intervals.of("2012-01-01/2012-01-02"), "1", Maps.newHashMap(), Lists.newArrayList(), @@ -1359,7 +1361,7 @@ public class DruidCoordinatorRuleRunnerTest ); DataSegment v2 = new DataSegment( "test", - new Interval("2012-01-01/2012-01-02"), + Intervals.of("2012-01-01/2012-01-02"), "2", Maps.newHashMap(), Lists.newArrayList(), @@ -1417,7 +1419,7 @@ public class DruidCoordinatorRuleRunnerTest .withDatabaseRuleManager(databaseRuleManager) .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withDynamicConfigs(new CoordinatorDynamicConfig.Builder().withMaxSegmentsToMove(5).build()) .build(); @@ -1439,9 +1441,7 @@ public class DruidCoordinatorRuleRunnerTest private void mockCoordinator() { - EasyMock.expect(coordinator.getDynamicConfigs()).andReturn( - createCoordinatorDynamicConfig() - ).anyTimes(); + EasyMock.expect(coordinator.getDynamicConfigs()).andReturn(createCoordinatorDynamicConfig()).anyTimes(); coordinator.removeSegment(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.replay(coordinator); @@ -1450,6 +1450,7 @@ public class DruidCoordinatorRuleRunnerTest private void mockEmptyPeon() { EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).atLeastOnce(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(0).anyTimes(); EasyMock.replay(mockPeon); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java index fc541c58e02..eec7a87d88d 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java @@ -25,11 +25,11 @@ import com.google.common.collect.Lists; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.indexing.IndexingServiceClient; import io.druid.common.config.JacksonConfigManager; +import io.druid.java.util.common.Intervals; import io.druid.server.coordinator.helper.DruidCoordinatorSegmentMerger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -46,10 +46,10 @@ public class DruidCoordinatorSegmentMergerTest public void testNoMerges() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -61,10 +61,10 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeAtStart() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(90).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(90).build() ); Assert.assertEquals( @@ -78,10 +78,10 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeAtEnd() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(20).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(20).build() ); Assert.assertEquals( @@ -95,10 +95,10 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeInMiddle() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(10).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(20).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(10).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(20).build() ); Assert.assertEquals( @@ -112,9 +112,9 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeNoncontiguous() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(10).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(10).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(10).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(10).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(10).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(10).build() ); Assert.assertEquals( @@ -128,12 +128,12 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeSeriesByteLimited() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("2").size(40).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(40).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("2").size(40).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(40).build() ); Assert.assertEquals( @@ -149,16 +149,16 @@ public class DruidCoordinatorSegmentMergerTest public void testMergeSeriesSegmentLimited() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-07/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-08/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-09/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-10/P1D")).version("2").size(1).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-07/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-08/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-10/P1D")).version("2").size(1).build() ); Assert.assertEquals( @@ -182,13 +182,13 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMergeWithBacktracking() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P4D")).version("2").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("3").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("4").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("3").size(20).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-07/P1D")).version("2").size(20).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P4D")).version("2").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("4").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("3").size(20).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-07/P1D")).version("2").size(20).build() ); Assert.assertEquals( @@ -203,10 +203,10 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMergeWithGapsAlignedStart() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P8D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-09/P1D")).version("3").size(8).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P8D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("3").size(8).build() ); Assert.assertEquals( @@ -220,10 +220,10 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMergeWithGapsNonalignedStart() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P8D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("3").size(8).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-09/P1D")).version("3").size(8).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P8D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(8).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("3").size(8).build() ); Assert.assertEquals( @@ -237,12 +237,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge1() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -254,12 +254,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge2() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(15).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(15).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -273,12 +273,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge3() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -292,12 +292,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge4() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -311,12 +311,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge5() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -328,12 +328,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge6() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(25).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -347,12 +347,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge7() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(120).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("4").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(120).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( @@ -366,12 +366,12 @@ public class DruidCoordinatorSegmentMergerTest public void testOverlappingMerge8() { final List segments = ImmutableList.of( - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-01/P1D")).version("2").size(80).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-02/P4D")).version("2").size(120).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-03/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-04/P1D")).version("1").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-05/P1D")).version("3").size(1).build(), - DataSegment.builder().dataSource("foo").interval(new Interval("2012-01-06/P1D")).version("2").size(80).build() + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(120).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), + DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals(ImmutableList.of(ImmutableList.of(segments.get(4), segments.get(5))), merge(segments)); @@ -383,18 +383,18 @@ public class DruidCoordinatorSegmentMergerTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version("1") .shardSpec(new LinearShardSpec(1)) .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-02/P1D")) + .interval(Intervals.of("2012-01-02/P1D")) .version("1") .shardSpec(new LinearShardSpec(7)) .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-03/P1D")) + .interval(Intervals.of("2012-01-03/P1D")) .version("1") .shardSpec(new LinearShardSpec(1500)) .build() @@ -412,25 +412,25 @@ public class DruidCoordinatorSegmentMergerTest final List segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-01/P1D")) + .interval(Intervals.of("2012-01-01/P1D")) .version("1") .build(), DataSegment.builder() .dataSource("foo") - .interval(new Interval("2012-01-02/P1D")) + .interval(Intervals.of("2012-01-02/P1D")) .version("1") .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-03/P1D")) + .interval(Intervals.of("2012-01-03/P1D")) .version("1") .shardSpec(new LinearShardSpec(1500)) .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-04/P1D")) + .interval(Intervals.of("2012-01-04/P1D")) .version("1") .build(), DataSegment.builder().dataSource("foo") - .interval(new Interval("2012-01-05/P1D")) + .interval(Intervals.of("2012-01-05/P1D")) .version("1") .build() ); diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index ea963962428..0946c362b15 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -35,7 +35,9 @@ import io.druid.common.config.JacksonConfigManager; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.discovery.NoopServiceAnnouncer; +import io.druid.discovery.DruidLeaderSelector; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.concurrent.ScheduledExecutorFactory; import io.druid.metadata.MetadataRuleManager; import io.druid.metadata.MetadataSegmentManager; @@ -55,6 +57,7 @@ import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.utils.ZKPaths; +import org.easymock.Capture; import org.easymock.EasyMock; import org.joda.time.Duration; import org.joda.time.Interval; @@ -65,6 +68,7 @@ import org.junit.Test; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; @@ -192,7 +196,8 @@ public class DruidCoordinatorTest extends CuratorTestBase loadManagementPeons, null, new CostBalancerStrategyFactory(), - EasyMock.createNiceMock(LookupCoordinatorManager.class) + EasyMock.createNiceMock(LookupCoordinatorManager.class), + new TestDruidLeaderSelector() ); } @@ -207,18 +212,31 @@ public class DruidCoordinatorTest extends CuratorTestBase @Test public void testMoveSegment() throws Exception { - loadQueuePeon = EasyMock.createNiceMock(LoadQueuePeon.class); segment = EasyMock.createNiceMock(DataSegment.class); EasyMock.expect(segment.getIdentifier()).andReturn("dummySegment"); EasyMock.expect(segment.getDataSource()).andReturn("dummyDataSource"); EasyMock.replay(segment); + + loadQueuePeon = EasyMock.createNiceMock(LoadQueuePeon.class); EasyMock.expect(loadQueuePeon.getLoadQueueSize()).andReturn(new Long(1)); + loadQueuePeon.markSegmentToDrop(segment); + EasyMock.expectLastCall().once(); + Capture loadCallbackCapture = Capture.newInstance(); + Capture dropCallbackCapture = Capture.newInstance(); + loadQueuePeon.loadSegment(EasyMock.anyObject(DataSegment.class), EasyMock.capture(loadCallbackCapture)); + EasyMock.expectLastCall().once(); + loadQueuePeon.dropSegment(EasyMock.anyObject(DataSegment.class), EasyMock.capture(dropCallbackCapture)); + EasyMock.expectLastCall().once(); + loadQueuePeon.unmarkSegmentToDrop(segment); + EasyMock.expectLastCall().once(); + EasyMock.expect(loadQueuePeon.getSegmentsToDrop()).andReturn(new HashSet<>()).once(); + EasyMock.replay(loadQueuePeon); + DruidDataSource druidDataSource = EasyMock.createNiceMock(DruidDataSource.class); EasyMock.expect(druidDataSource.getSegment(EasyMock.anyString())).andReturn(segment); EasyMock.replay(druidDataSource); EasyMock.expect(databaseSegmentManager.getInventoryValue(EasyMock.anyString())).andReturn(druidDataSource); EasyMock.replay(databaseSegmentManager); - EasyMock.replay(loadQueuePeon); scheduledExecutorFactory = EasyMock.createNiceMock(ScheduledExecutorFactory.class); EasyMock.replay(scheduledExecutorFactory); EasyMock.replay(metadataRuleManager); @@ -246,6 +264,7 @@ public class DruidCoordinatorTest extends CuratorTestBase loadManagementPeons.put("from", loadQueuePeon); loadManagementPeons.put("to", loadQueuePeon); + EasyMock.expect(serverInventoryView.isSegmentLoadedByServer("to", segment)).andReturn(true).once(); EasyMock.replay(serverInventoryView); coordinator.moveSegment( @@ -253,6 +272,13 @@ public class DruidCoordinatorTest extends CuratorTestBase druidServer2.toImmutableDruidServer(), segment, null ); + + LoadPeonCallback loadCallback = loadCallbackCapture.getValue(); + loadCallback.execute(); + + LoadPeonCallback dropCallback = dropCallbackCapture.getValue(); + dropCallback.execute(); + EasyMock.verify(druidServer); EasyMock.verify(druidServer2); EasyMock.verify(loadQueuePeon); @@ -280,7 +306,7 @@ public class DruidCoordinatorTest extends CuratorTestBase DruidDataSource[] druidDataSources = { new DruidDataSource(dataSource, Collections.emptyMap()) }; - final DataSegment dataSegment = new DataSegment(dataSource, new Interval("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0); + final DataSegment dataSegment = new DataSegment(dataSource, Intervals.of("2010-01-01/P1D"), "v1", null, null, null, null, 0x9, 0); druidDataSources[0].addSegment("0", dataSegment); EasyMock.expect(databaseSegmentManager.isStarted()).andReturn(true).anyTimes(); @@ -384,10 +410,10 @@ public class DruidCoordinatorTest extends CuratorTestBase { DruidDataSource dataSource = new DruidDataSource("test", new HashMap()); DataSegment[] segments = new DataSegment[]{ - getSegment("test", new Interval("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), - getSegment("test", new Interval("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")) + getSegment("test", Intervals.of("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), + getSegment("test", Intervals.of("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")) }; for (DataSegment segment : segments) { dataSource.addSegment(segment.getIdentifier(), segment); @@ -399,10 +425,10 @@ public class DruidCoordinatorTest extends CuratorTestBase EasyMock.replay(databaseSegmentManager); Set availableSegments = coordinator.getOrderedAvailableDataSegments(); DataSegment[] expected = new DataSegment[]{ - getSegment("test", new Interval("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), - getSegment("test", new Interval("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")), - getSegment("test", new Interval("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")) + getSegment("test", Intervals.of("2016-01-11T01:00:00Z/2016-01-11T02:00:00Z")), + getSegment("test", Intervals.of("2016-01-10T03:00:00Z/2016-01-10T04:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T12:00:00Z")), + getSegment("test", Intervals.of("2016-01-09T10:00:00Z/2016-01-09T11:00:00Z")) }; Assert.assertEquals(expected.length, availableSegments.size()); Assert.assertEquals(expected, availableSegments.toArray()); @@ -419,4 +445,43 @@ public class DruidCoordinatorTest extends CuratorTestBase ); return segment; } + + private static class TestDruidLeaderSelector implements DruidLeaderSelector + { + private volatile Listener listener; + private volatile String leader; + + @Override + public String getCurrentLeader() + { + return leader; + } + + @Override + public boolean isLeader() + { + return leader != null; + } + + @Override + public int localTerm() + { + return 0; + } + + @Override + public void registerListener(Listener listener) + { + this.listener = listener; + leader = "what:1234"; + listener.becomeLeader(); + } + + @Override + public void unregisterListener() + { + leader = null; + listener.stopBeingLeader(); + } + } } diff --git a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java index 68e5d8cd895..108e1ac5c97 100644 --- a/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java +++ b/server/src/test/java/io/druid/server/coordinator/LoadQueuePeonTest.java @@ -27,6 +27,7 @@ import com.google.common.collect.Lists; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DataSegmentChangeCallback; import io.druid.server.coordination.DataSegmentChangeHandler; import io.druid.server.coordination.DataSegmentChangeRequest; @@ -40,7 +41,6 @@ import org.apache.curator.framework.recipes.cache.PathChildrenCacheEvent; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; import org.apache.curator.utils.ZKPaths; import org.joda.time.Duration; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -350,7 +350,7 @@ public class LoadQueuePeonTest extends CuratorTestBase { return DataSegment.builder() .dataSource("test_load_queue_peon") - .interval(new Interval(intervalStr)) + .interval(Intervals.of(intervalStr)) .loadSpec(ImmutableMap.of()) .version("2015-05-27T03:38:35.683Z") .dimensions(ImmutableList.of()) diff --git a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java index 4b33c928a8b..700972d25bc 100644 --- a/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ReservoirSegmentSamplerTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; @@ -73,9 +74,9 @@ public class ReservoirSegmentSamplerTest segment3 = EasyMock.createMock(DataSegment.class); segment4 = EasyMock.createMock(DataSegment.class); - DateTime start1 = new DateTime("2012-01-01"); - DateTime start2 = new DateTime("2012-02-01"); - DateTime version = new DateTime("2012-03-01"); + DateTime start1 = DateTimes.of("2012-01-01"); + DateTime start2 = DateTimes.of("2012-02-01"); + DateTime version = DateTimes.of("2012-03-01"); segment1 = new DataSegment( "datasource1", new Interval(start1, start1.plusHours(1)), diff --git a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java index 7f1bc806c65..30a97d0a9c6 100644 --- a/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java +++ b/server/src/test/java/io/druid/server/coordinator/ServerHolderTest.java @@ -24,11 +24,11 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -40,7 +40,7 @@ public class ServerHolderTest private static final List segments = ImmutableList.of( new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container1", "blobPath", "blobPath1"), null, @@ -51,7 +51,7 @@ public class ServerHolderTest ), new DataSegment( "test", - new Interval("2015-04-12/2015-04-13"), + Intervals.of("2015-04-12/2015-04-13"), "1", ImmutableMap.of("containerName", "container2", "blobPath", "blobPath2"), null, diff --git a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java index 9bd095be152..cdcdf436d90 100644 --- a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java +++ b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowedTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import io.druid.client.ImmutableDruidDataSource; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.DateTimes; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; import io.druid.server.coordinator.DruidCoordinator; @@ -46,7 +47,7 @@ public class DruidCoordinatorCleanupOvershadowedTest DruidCoordinatorCleanupOvershadowed druidCoordinatorCleanupOvershadowed; DruidCoordinator coordinator = EasyMock.createStrictMock(DruidCoordinator.class); private List availableSegments; - DateTime start = new DateTime("2012-01-01"); + DateTime start = DateTimes.of("2012-01-01"); DruidCluster druidCluster; private LoadQueuePeon mockPeon = EasyMock.createMock(LoadQueuePeon.class); private ImmutableDruidServer druidServer = EasyMock.createMock(ImmutableDruidServer.class); diff --git a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java index 89e1d6c9887..ae7ed642d48 100644 --- a/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java +++ b/server/src/test/java/io/druid/server/coordinator/helper/DruidCoordinatorSegmentKillerTest.java @@ -21,6 +21,7 @@ package io.druid.server.coordinator.helper; import com.google.common.collect.ImmutableList; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataSegmentManager; import io.druid.server.coordinator.TestDruidCoordinatorConfig; import org.easymock.EasyMock; @@ -41,45 +42,45 @@ public class DruidCoordinatorSegmentKillerTest testFindIntervalForKillTask(null, null); testFindIntervalForKillTask(ImmutableList.of(), null); - testFindIntervalForKillTask(ImmutableList.of(Interval.parse("2014/2015")), Interval.parse("2014/2015")); + testFindIntervalForKillTask(ImmutableList.of(Intervals.of("2014/2015")), Intervals.of("2014/2015")); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2016/2017")), - Interval.parse("2014/2017") + ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2016/2017")), + Intervals.of("2014/2017") ); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2014/2015"), Interval.parse("2015/2016")), - Interval.parse("2014/2016") + ImmutableList.of(Intervals.of("2014/2015"), Intervals.of("2015/2016")), + Intervals.of("2014/2016") ); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2015/2016"), Interval.parse("2014/2015")), - Interval.parse("2014/2016") + ImmutableList.of(Intervals.of("2015/2016"), Intervals.of("2014/2015")), + Intervals.of("2014/2016") ); testFindIntervalForKillTask( - ImmutableList.of(Interval.parse("2015/2017"), Interval.parse("2014/2016")), - Interval.parse("2014/2017") + ImmutableList.of(Intervals.of("2015/2017"), Intervals.of("2014/2016")), + Intervals.of("2014/2017") ); testFindIntervalForKillTask( ImmutableList.of( - Interval.parse("2015/2019"), - Interval.parse("2014/2016"), - Interval.parse("2018/2020") + Intervals.of("2015/2019"), + Intervals.of("2014/2016"), + Intervals.of("2018/2020") ), - Interval.parse("2014/2020") + Intervals.of("2014/2020") ); testFindIntervalForKillTask( ImmutableList.of( - Interval.parse("2015/2019"), - Interval.parse("2014/2016"), - Interval.parse("2018/2020"), - Interval.parse("2021/2022") + Intervals.of("2015/2019"), + Intervals.of("2014/2016"), + Intervals.of("2018/2020"), + Intervals.of("2021/2022") ), - Interval.parse("2014/2022") + Intervals.of("2014/2022") ); } diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java index ac76a368de5..93196fca4da 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java @@ -24,7 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; -import org.joda.time.Interval; +import io.druid.java.util.common.Intervals; import org.joda.time.Period; import org.junit.Test; import org.junit.runner.RunWith; @@ -47,9 +47,9 @@ public class BroadcastDistributionRuleSerdeTest new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of("large_source1", "large_source2"))}, new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of())}, new Object[]{new ForeverBroadcastDistributionRule(null)}, - new Object[]{new IntervalBroadcastDistributionRule(new Interval("0/1000"), ImmutableList.of("large_source"))}, - new Object[]{new IntervalBroadcastDistributionRule(new Interval("0/1000"), ImmutableList.of())}, - new Object[]{new IntervalBroadcastDistributionRule(new Interval("0/1000"), null)}, + new Object[]{new IntervalBroadcastDistributionRule(Intervals.of("0/1000"), ImmutableList.of("large_source"))}, + new Object[]{new IntervalBroadcastDistributionRule(Intervals.of("0/1000"), ImmutableList.of())}, + new Object[]{new IntervalBroadcastDistributionRule(Intervals.of("0/1000"), null)}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), ImmutableList.of("large_source"))}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), ImmutableList.of())}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), null)} diff --git a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index 9aa2ff77931..f4719a141b9 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -26,6 +26,8 @@ import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.Ordering; import io.druid.client.DruidServer; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.DruidCluster; @@ -35,8 +37,6 @@ import io.druid.server.coordinator.SegmentReplicantLookup; import io.druid.server.coordinator.ServerHolder; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; -import org.joda.time.DateTime; -import org.joda.time.Interval; import org.junit.Before; import org.junit.Test; @@ -61,8 +61,8 @@ public class BroadcastDistributionRuleTest { smallSegment = new DataSegment( "small_source", - new Interval("0/1000"), - new DateTime().toString(), + Intervals.of("0/1000"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -75,8 +75,8 @@ public class BroadcastDistributionRuleTest largeSegments.add( new DataSegment( "large_source", - new Interval((i * 1000) + "/" + ((i + 1) * 1000)), - new DateTime().toString(), + Intervals.of((i * 1000) + "/" + ((i + 1) * 1000)), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -91,8 +91,8 @@ public class BroadcastDistributionRuleTest largeSegments2.add( new DataSegment( "large_source2", - new Interval((i * 1000) + "/" + ((i + 1) * 1000)), - new DateTime().toString(), + Intervals.of((i * 1000) + "/" + ((i + 1) * 1000)), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), @@ -227,7 +227,7 @@ public class BroadcastDistributionRuleTest DruidCoordinatorRuntimeParams.newBuilder() .withDruidCluster(druidCluster) .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Lists.newArrayList( smallSegment, largeSegments.get(0), @@ -267,7 +267,7 @@ public class BroadcastDistributionRuleTest DruidCoordinatorRuntimeParams.newBuilder() .withDruidCluster(druidCluster) .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Lists.newArrayList( smallSegment, largeSegments.get(0), @@ -305,7 +305,7 @@ public class BroadcastDistributionRuleTest DruidCoordinatorRuntimeParams.newBuilder() .withDruidCluster(druidCluster) .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Lists.newArrayList( smallSegment, largeSegments.get(0), diff --git a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java index c3362cebcd2..a13209316a4 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java @@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; -import org.joda.time.Interval; +import io.druid.java.util.common.Intervals; import org.junit.Assert; import org.junit.Test; @@ -35,7 +35,7 @@ import org.junit.Test; public void testSerde() throws Exception { IntervalLoadRule rule = new IntervalLoadRule( - new Interval("0/3000"), + Intervals.of("0/3000"), ImmutableMap.of(DruidServer.DEFAULT_TIER, 2) ); @@ -49,7 +49,7 @@ import org.junit.Test; public void testSerdeNullTieredReplicants() throws Exception { IntervalLoadRule rule = new IntervalLoadRule( - new Interval("0/3000"), null + Intervals.of("0/3000"), null ); ObjectMapper jsonMapper = new DefaultObjectMapper(); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java index c107400b7d0..f62574c7ec0 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/LoadRuleTest.java @@ -36,6 +36,8 @@ import com.metamx.emitter.core.LoggingEmitter; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.coordinator.BalancerStrategy; import io.druid.server.coordinator.CoordinatorDynamicConfig; @@ -43,7 +45,6 @@ import io.druid.server.coordinator.CoordinatorStats; import io.druid.server.coordinator.CostBalancerStrategyFactory; import io.druid.server.coordinator.DruidCluster; import io.druid.server.coordinator.DruidCoordinatorRuntimeParams; -import io.druid.server.coordinator.LoadPeonCallback; import io.druid.server.coordinator.LoadQueuePeon; import io.druid.server.coordinator.LoadQueuePeonTester; import io.druid.server.coordinator.ReplicationThrottler; @@ -108,9 +109,10 @@ public class LoadRuleTest @Test public void testLoad() throws Exception { - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).atLeastOnce(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(0).anyTimes(); EasyMock.replay(mockPeon); @@ -205,7 +207,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -218,9 +220,10 @@ public class LoadRuleTest @Test public void testDrop() throws Exception { - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).anyTimes(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(0).anyTimes(); EasyMock.replay(mockPeon); @@ -319,7 +322,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -332,9 +335,10 @@ public class LoadRuleTest @Test public void testLoadWithNonExistentTier() throws Exception { - mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.loadSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).atLeastOnce(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(0).anyTimes(); EasyMock.replay(mockPeon); @@ -412,7 +416,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(new DruidCluster())) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -424,9 +428,10 @@ public class LoadRuleTest @Test public void testDropWithNonExistentTier() throws Exception { - mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); + mockPeon.dropSegment(EasyMock.anyObject(), EasyMock.anyObject()); EasyMock.expectLastCall().atLeastOnce(); - EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).atLeastOnce(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).anyTimes(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(0).anyTimes(); EasyMock.replay(mockPeon); @@ -521,7 +526,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(segment)).build(), segment ); @@ -612,7 +617,7 @@ public class LoadRuleTest .withSegmentReplicantLookup(SegmentReplicantLookup.make(druidCluster)) .withReplicationManager(throttler) .withBalancerStrategy(balancerStrategy) - .withBalancerReferenceTimestamp(new DateTime("2013-01-01")) + .withBalancerReferenceTimestamp(DateTimes.of("2013-01-01")) .withAvailableSegments(Arrays.asList(dataSegment1, dataSegment2, dataSegment3)) .withDynamicConfigs(new CoordinatorDynamicConfig.Builder().withMaxSegmentsInNodeLoadingQueue(2).build()) .build(); @@ -631,8 +636,8 @@ public class LoadRuleTest { return new DataSegment( dataSource, - new Interval("0/3000"), - new DateTime().toString(), + Intervals.of("0/3000"), + DateTimes.nowUtc().toString(), Maps.newHashMap(), Lists.newArrayList(), Lists.newArrayList(), diff --git a/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java index 5d34502ed78..699136fc7ac 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/PeriodDropRuleTest.java @@ -19,6 +19,7 @@ package io.druid.server.coordinator.rules; +import io.druid.java.util.common.DateTimes; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; @@ -33,13 +34,13 @@ public class PeriodDropRuleTest { private final static DataSegment.Builder builder = DataSegment.builder() .dataSource("test") - .version(new DateTime("2012-12-31T01:00:00").toString()) + .version(DateTimes.of("2012-12-31T01:00:00").toString()) .shardSpec(NoneShardSpec.instance()); @Test public void testAppliesToAll() { - DateTime now = new DateTime("2012-12-31T01:00:00"); + DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodDropRule rule = new PeriodDropRule( new Period("P5000Y") ); @@ -67,7 +68,7 @@ public class PeriodDropRuleTest @Test public void testAppliesToPeriod() { - DateTime now = new DateTime("2012-12-31T01:00:00"); + DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodDropRule rule = new PeriodDropRule( new Period("P1M") ); diff --git a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java index 688a1711dd8..c3c17615755 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java @@ -23,6 +23,8 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.DateTime; @@ -37,27 +39,27 @@ public class PeriodLoadRuleTest { private final static DataSegment.Builder builder = DataSegment.builder() .dataSource("test") - .version(new DateTime().toString()) + .version(DateTimes.nowUtc().toString()) .shardSpec(NoneShardSpec.instance()); @Test public void testAppliesToAll() { - DateTime now = new DateTime("2013-01-01"); + DateTime now = DateTimes.of("2013-01-01"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P5000Y"), ImmutableMap.of("", 0) ); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval("2012-01-01/2012-12-31")).build(), now)); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval("1000-01-01/2012-12-31")).build(), now)); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval("0500-01-01/2100-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("2012-01-01/2012-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("1000-01-01/2012-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("0500-01-01/2100-12-31")).build(), now)); } @Test public void testAppliesToPeriod() { - DateTime now = new DateTime("2012-12-31T01:00:00"); + DateTime now = DateTimes.of("2012-12-31T01:00:00"); PeriodLoadRule rule = new PeriodLoadRule( new Period("P1M"), ImmutableMap.of("", 0) diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index b9e31b48603..a254ff9d47e 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -25,6 +25,7 @@ import io.druid.client.CoordinatorServerView; import io.druid.client.DruidDataSource; import io.druid.client.DruidServer; import io.druid.client.indexing.IndexingServiceClient; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.security.Access; import io.druid.server.security.Action; @@ -66,7 +67,7 @@ public class DatasourcesResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null, null, null, @@ -79,7 +80,7 @@ public class DatasourcesResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-22/P1D"), + Intervals.of("2010-01-22/P1D"), null, null, null, @@ -92,7 +93,7 @@ public class DatasourcesResourceTest dataSegmentList.add( new DataSegment( "datasource2", - new Interval("2010-01-01/P1D"), + Intervals.of("2010-01-01/P1D"), null, null, null, @@ -260,7 +261,7 @@ public class DatasourcesResourceTest DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap()); dataSource1.addSegment( "partition", - new DataSegment("datasegment1", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10) + new DataSegment("datasegment1", Intervals.of("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10) ); EasyMock.expect(server.getDataSource("datasource1")).andReturn( dataSource1 @@ -339,8 +340,8 @@ public class DatasourcesResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null, new AuthConfig()); Response response = datasourcesResource.getSegmentDataSourceIntervals("invalidDataSource", null, null); @@ -448,7 +449,7 @@ public class DatasourcesResourceTest public void testDeleteDataSourceSpecificInterval() throws Exception { String interval = "2010-01-01_P1D"; - Interval theInterval = new Interval(interval.replace("_", "/")); + Interval theInterval = Intervals.of(interval.replace("_", "/")); IndexingServiceClient indexingServiceClient = EasyMock.createStrictMock(IndexingServiceClient.class); indexingServiceClient.killSegments("datasource1", theInterval); diff --git a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java index 3c437cc8f2a..5ec8f6857d9 100644 --- a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java +++ b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java @@ -22,6 +22,7 @@ package io.druid.server.http; import com.google.common.collect.ImmutableList; import io.druid.client.DruidServer; import io.druid.client.InventoryView; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.ServerType; import io.druid.server.security.AuthConfig; import io.druid.timeline.DataSegment; @@ -57,7 +58,7 @@ public class IntervalsResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-01T00:00:00.000Z/P1D"), + Intervals.of("2010-01-01T00:00:00.000Z/P1D"), null, null, null, @@ -70,7 +71,7 @@ public class IntervalsResourceTest dataSegmentList.add( new DataSegment( "datasource1", - new Interval("2010-01-22T00:00:00.000Z/P1D"), + Intervals.of("2010-01-22T00:00:00.000Z/P1D"), null, null, null, @@ -83,7 +84,7 @@ public class IntervalsResourceTest dataSegmentList.add( new DataSegment( "datasource2", - new Interval("2010-01-01T00:00:00.000Z/P1D"), + Intervals.of("2010-01-01T00:00:00.000Z/P1D"), null, null, null, @@ -108,8 +109,8 @@ public class IntervalsResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); - expectedIntervals.add(new Interval("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-22T00:00:00.000Z/2010-01-23T00:00:00.000Z")); IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); Response response = intervalsResource.getIntervals(request); @@ -135,7 +136,7 @@ public class IntervalsResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", "simple", null, request); @@ -156,7 +157,7 @@ public class IntervalsResourceTest EasyMock.replay(inventoryView); List expectedIntervals = new ArrayList<>(); - expectedIntervals.add(new Interval("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); + expectedIntervals.add(Intervals.of("2010-01-01T00:00:00.000Z/2010-01-02T00:00:00.000Z")); IntervalsResource intervalsResource = new IntervalsResource(inventoryView, new AuthConfig()); Response response = intervalsResource.getSpecificIntervals("2010-01-01T00:00:00.000Z/P1D", null, "full", request); diff --git a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java index 23e0b2ce91f..499c236303b 100644 --- a/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java +++ b/server/src/test/java/io/druid/server/http/LookupCoordinatorResourceTest.java @@ -22,6 +22,7 @@ package io.druid.server.http; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.io.ByteSource; import com.google.common.net.HostAndPort; import io.druid.audit.AuditInfo; @@ -42,8 +43,8 @@ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; -import java.util.List; import java.util.Map; +import java.util.Set; public class LookupCoordinatorResourceTest { @@ -147,7 +148,7 @@ public class LookupCoordinatorResourceTest @Test public void testDiscoveryGet() { - final List tiers = ImmutableList.of(); + final Set tiers = ImmutableSet.of(); final LookupCoordinatorManager lookupCoordinatorManager = EasyMock.createStrictMock( LookupCoordinatorManager.class); EasyMock.expect(lookupCoordinatorManager.discoverTiers()).andReturn(tiers).once(); diff --git a/server/src/test/java/io/druid/server/http/RulesResourceTest.java b/server/src/test/java/io/druid/server/http/RulesResourceTest.java index d153397cee9..b322c3bf580 100644 --- a/server/src/test/java/io/druid/server/http/RulesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/RulesResourceTest.java @@ -23,9 +23,10 @@ import com.google.common.collect.ImmutableList; import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.metadata.MetadataRuleManager; import org.easymock.EasyMock; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; @@ -59,7 +60,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -70,7 +71,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(2))) .andReturn(ImmutableList.of(entry1, entry2)) @@ -92,7 +93,7 @@ public class RulesResourceTest public void testGetDatasourceRuleHistoryWithInterval() { String interval = "P2D/2013-01-02T00:00:00Z"; - Interval theInterval = new Interval(interval); + Interval theInterval = Intervals.of(interval); AuditEntry entry1 = new AuditEntry( "testKey", "testType", @@ -102,7 +103,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -113,7 +114,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("datasource1"), EasyMock.eq("rules"), EasyMock.eq(theInterval))) .andReturn(ImmutableList.of(entry1, entry2)) @@ -162,7 +163,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -173,7 +174,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(2))) .andReturn(ImmutableList.of(entry1, entry2)) @@ -195,7 +196,7 @@ public class RulesResourceTest public void testGetAllDatasourcesRuleHistoryWithInterval() { String interval = "P2D/2013-01-02T00:00:00Z"; - Interval theInterval = new Interval(interval); + Interval theInterval = Intervals.of(interval); AuditEntry entry1 = new AuditEntry( "testKey", "testType", @@ -205,7 +206,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-02T00:00:00Z") + DateTimes.of("2013-01-02T00:00:00Z") ); AuditEntry entry2 = new AuditEntry( "testKey", @@ -216,7 +217,7 @@ public class RulesResourceTest "127.0.0.1" ), "testPayload", - new DateTime("2013-01-01T00:00:00Z") + DateTimes.of("2013-01-01T00:00:00Z") ); EasyMock.expect(auditManager.fetchAuditHistory(EasyMock.eq("rules"), EasyMock.eq(theInterval))) .andReturn(ImmutableList.of(entry1, entry2)) diff --git a/server/src/test/java/io/druid/server/http/ServersResourceTest.java b/server/src/test/java/io/druid/server/http/ServersResourceTest.java index c9842cb195c..3d3431e39f7 100644 --- a/server/src/test/java/io/druid/server/http/ServersResourceTest.java +++ b/server/src/test/java/io/druid/server/http/ServersResourceTest.java @@ -24,11 +24,11 @@ import com.google.common.collect.ImmutableList; import io.druid.client.CoordinatorServerView; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; import io.druid.server.coordination.DruidServerMetadata; import io.druid.server.coordination.ServerType; import io.druid.timeline.DataSegment; import org.easymock.EasyMock; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -47,7 +47,7 @@ public class ServersResourceTest DruidServer dummyServer = new DruidServer("dummy", "host", null, 1234L, ServerType.HISTORICAL, "tier", 0); DataSegment segment = DataSegment.builder() .dataSource("dataSource") - .interval(new Interval("2016-03-22T14Z/2016-03-22T15Z")) + .interval(Intervals.of("2016-03-22T14Z/2016-03-22T15Z")) .version("v0") .size(1L) .build(); diff --git a/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java b/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java deleted file mode 100644 index 5bee4731fa0..00000000000 --- a/server/src/test/java/io/druid/server/listener/announcer/ListenerDiscovererTest.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.server.listener.announcer; - -import com.google.common.collect.ImmutableSet; -import io.druid.concurrent.Execs; -import io.druid.curator.CuratorTestBase; -import io.druid.curator.announcement.Announcer; -import io.druid.segment.CloserRule; -import io.druid.server.http.HostAndPortWithScheme; -import io.druid.server.initialization.ZkPathsConfig; -import org.apache.curator.utils.ZKPaths; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; - -import java.io.Closeable; -import java.io.IOException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.TimeUnit; - -public class ListenerDiscovererTest extends CuratorTestBase -{ - @Rule - public CloserRule closerRule = new CloserRule(true); - - @Test(timeout = 60_000L) - public void testFullService() throws Exception - { - final String listenerKey = "listenerKey"; - final String listenerTier = "listenerTier"; - final String listenerTierChild = "tierChild"; - final String tierZkPath = ZKPaths.makePath(listenerTier, listenerTierChild); - - setupServerAndCurator(); - final ExecutorService executorService = Execs.singleThreaded("listenerDiscovererTest--%s"); - closerRule.closeLater(new Closeable() - { - @Override - public void close() throws IOException - { - executorService.shutdownNow(); - } - }); - closerRule.closeLater(server); - closerRule.closeLater(curator); - curator.start(); - curator.blockUntilConnected(10, TimeUnit.SECONDS); - Assert.assertEquals("/druid", curator.create().forPath("/druid")); - final Announcer announcer = new Announcer(curator, executorService); - closerRule.closeLater(new Closeable() - { - @Override - public void close() throws IOException - { - announcer.stop(); - } - }); - final ListeningAnnouncerConfig config = new ListeningAnnouncerConfig(new ZkPathsConfig()); - final ListenerDiscoverer listenerDiscoverer = new ListenerDiscoverer(curator, config); - listenerDiscoverer.start(); - closerRule.closeLater(new Closeable() - { - @Override - public void close() throws IOException - { - listenerDiscoverer.stop(); - } - }); - Assert.assertTrue(listenerDiscoverer.getNodes(listenerKey).isEmpty()); - - final HostAndPortWithScheme node = HostAndPortWithScheme.fromParts("http", "someHost", 8888); - final ListenerResourceAnnouncer listenerResourceAnnouncer = new ListenerResourceAnnouncer( - announcer, - config, - listenerKey, - node - ) - { - }; - listenerResourceAnnouncer.start(); - closerRule.closeLater(new Closeable() - { - @Override - public void close() throws IOException - { - listenerResourceAnnouncer.stop(); - } - }); - - final ListenerResourceAnnouncer tieredListenerResourceAnnouncer = new ListenerResourceAnnouncer( - announcer, - config, - tierZkPath, - node - ) - { - }; - tieredListenerResourceAnnouncer.start(); - closerRule.closeLater(new Closeable() - { - @Override - public void close() throws IOException - { - tieredListenerResourceAnnouncer.stop(); - } - }); - - announcer.start(); - - Assert.assertNotNull(curator.checkExists().forPath(config.getAnnouncementPath(listenerKey))); - // Have to wait for background syncing - while (listenerDiscoverer.getNodes(listenerKey).isEmpty()) { - // Will timeout at test's timeout setting - Thread.sleep(1); - } - Assert.assertEquals( - ImmutableSet.of(HostAndPortWithScheme.fromString(node.toString())), - listenerDiscoverer.getNodes(listenerKey) - ); - // 2nd call of two concurrent getNewNodes should return no entry collection - listenerDiscoverer.getNewNodes(listenerKey); - Assert.assertEquals( - 0, - listenerDiscoverer.getNewNodes(listenerKey).size() - ); - Assert.assertEquals( - ImmutableSet.of(listenerKey, listenerTier), - ImmutableSet.copyOf(listenerDiscoverer.discoverChildren(null)) - ); - Assert.assertEquals( - ImmutableSet.of(listenerTierChild), - ImmutableSet.copyOf(listenerDiscoverer.discoverChildren(listenerTier)) - ); - } -} diff --git a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java index bb843506cc7..cc2e46c05b6 100644 --- a/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java +++ b/server/src/test/java/io/druid/server/listener/announcer/ListenerResourceAnnouncerTest.java @@ -23,6 +23,7 @@ import com.google.common.primitives.Longs; import io.druid.concurrent.Execs; import io.druid.curator.CuratorTestBase; import io.druid.curator.announcement.Announcer; +import io.druid.java.util.common.StringUtils; import io.druid.segment.CloserRule; import io.druid.server.http.HostAndPortWithScheme; import io.druid.server.initialization.ZkPathsConfig; @@ -90,7 +91,7 @@ public class ListenerResourceAnnouncerTest extends CuratorTestBase } }); Assert.assertNotNull(curator.checkExists().forPath(announcePath)); - final String nodePath = ZKPaths.makePath(announcePath, String.format("%s:%s", node.getScheme(), node.getHostText())); + final String nodePath = ZKPaths.makePath(announcePath, StringUtils.format("%s:%s", node.getScheme(), node.getHostText())); Assert.assertNotNull(curator.checkExists().forPath(nodePath)); Assert.assertEquals(Longs.BYTES, curator.getData().decompressed().forPath(nodePath).length); Assert.assertNull(curator.checkExists() @@ -124,7 +125,7 @@ public class ListenerResourceAnnouncerTest extends CuratorTestBase announcer.announce( - EasyMock.eq(ZKPaths.makePath(announcePath, String.format("%s:%s", node.getScheme(), node.getHostText()))), + EasyMock.eq(ZKPaths.makePath(announcePath, StringUtils.format("%s:%s", node.getScheme(), node.getHostText()))), EasyMock.aryEq(resourceAnnouncer.getAnnounceBytes()) ); EasyMock.expectLastCall().once(); diff --git a/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java index cfb1d319138..2b1a4ac76af 100644 --- a/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/FileRequestLoggerTest.java @@ -21,6 +21,7 @@ package io.druid.server.log; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.io.CharStreams; +import io.druid.java.util.common.DateTimes; import io.druid.server.RequestLogLine; import org.easymock.EasyMock; import org.joda.time.DateTime; @@ -47,7 +48,7 @@ public class FileRequestLoggerTest @Test public void testLog() throws IOException { ObjectMapper objectMapper = new ObjectMapper(); - DateTime dateTime = new DateTime(); + DateTime dateTime = DateTimes.nowUtc(); File logDir = temporaryFolder.newFolder(); String actualLogString = dateTime.toString() + "\t" + HOST; diff --git a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java index aa8f8a51cac..83e1d38e78a 100644 --- a/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/io/druid/server/log/LoggingRequestLoggerTest.java @@ -20,11 +20,12 @@ package io.druid.server.log; import com.fasterxml.jackson.annotation.JsonTypeName; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.jackson.JacksonUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.LegacyDataSource; @@ -72,7 +73,7 @@ public class LoggingRequestLoggerTest @Override public List getIntervals() { - return Collections.singletonList(Interval.parse("2016-01-01T00Z/2016-01-02T00Z")); + return Collections.singletonList(Intervals.of("2016-01-01T00Z/2016-01-02T00Z")); } @Override @@ -159,9 +160,7 @@ public class LoggingRequestLoggerTest private static Map readContextMap(byte[] bytes) throws Exception { - final Map rawMap = mapper.readValue(bytes, new TypeReference>() - { - }); + final Map rawMap = mapper.readValue(bytes, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT); final Object contextMap = rawMap.get("contextMap"); if (contextMap == null) { return null; diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index 958fea55d39..b128b0bc9b5 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -36,17 +36,14 @@ import com.metamx.http.client.response.HttpResponseHandler; import com.metamx.http.client.response.SequenceInputStreamResponseHandler; import io.druid.audit.AuditInfo; import io.druid.common.config.JacksonConfigManager; -import io.druid.java.util.common.StringUtils; +import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; -import io.druid.query.lookup.LookupModule; +import io.druid.java.util.common.StringUtils; import io.druid.query.lookup.LookupsState; import io.druid.server.http.HostAndPortWithScheme; -import io.druid.server.listener.announcer.ListenerDiscoverer; import org.easymock.EasyMock; -import org.hamcrest.BaseMatcher; -import org.hamcrest.Description; import org.joda.time.Duration; import org.junit.After; import org.junit.Assert; @@ -63,6 +60,7 @@ import java.io.InputStream; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; @@ -73,7 +71,9 @@ public class LookupCoordinatorManagerTest @Rule public ExpectedException expectedException = ExpectedException.none(); private final ObjectMapper mapper = new DefaultObjectMapper(); - private final ListenerDiscoverer discoverer = EasyMock.createStrictMock(ListenerDiscoverer.class); + private final DruidNodeDiscoveryProvider druidNodeDiscoveryProvider = EasyMock.createStrictMock(DruidNodeDiscoveryProvider.class); + private final LookupNodeDiscovery lookupNodeDiscovery = EasyMock.createStrictMock( + LookupNodeDiscovery.class); private final HttpClient client = EasyMock.createStrictMock(HttpClient.class); private final JacksonConfigManager configManager = EasyMock.createStrictMock(JacksonConfigManager.class); private final LookupCoordinatorManagerConfig lookupCoordinatorManagerConfig = new LookupCoordinatorManagerConfig(); @@ -139,6 +139,8 @@ public class LookupCoordinatorManagerTest SERVICE_EMITTER.flush(); EVENT_EMITS.set(0L); + EasyMock.reset(lookupNodeDiscovery); + EasyMock.reset(configManager); EasyMock.expect( configManager.watch( @@ -532,7 +534,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -555,7 +557,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -587,7 +589,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -624,7 +626,7 @@ public class LookupCoordinatorManagerTest final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -686,7 +688,7 @@ public class LookupCoordinatorManagerTest final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -741,7 +743,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -771,7 +773,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -805,7 +807,7 @@ public class LookupCoordinatorManagerTest final AuditInfo auditInfo = new AuditInfo("author", "comment", "localhost"); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -860,7 +862,7 @@ public class LookupCoordinatorManagerTest ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -905,7 +907,7 @@ public class LookupCoordinatorManagerTest ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -930,7 +932,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -956,7 +958,7 @@ public class LookupCoordinatorManagerTest ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -985,7 +987,7 @@ public class LookupCoordinatorManagerTest ); final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -1010,7 +1012,7 @@ public class LookupCoordinatorManagerTest { final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -1052,11 +1054,11 @@ public class LookupCoordinatorManagerTest HostAndPortWithScheme host1 = HostAndPortWithScheme.fromParts("http", "host1", 1234); HostAndPortWithScheme host2 = HostAndPortWithScheme.fromParts("http", "host2", 3456); - EasyMock.reset(discoverer); + EasyMock.reset(lookupNodeDiscovery); EasyMock.expect( - discoverer.getNodes(LookupModule.getTierListenerPath("tier1")) + lookupNodeDiscovery.getNodesInTier("tier1") ).andReturn(ImmutableList.of(host1, host2)).anyTimes(); - EasyMock.replay(discoverer); + EasyMock.replay(lookupNodeDiscovery); LookupCoordinatorManager.LookupsCommunicator lookupsCommunicator = EasyMock.createMock(LookupCoordinatorManager.LookupsCommunicator.class); EasyMock.expect( @@ -1134,10 +1136,11 @@ public class LookupCoordinatorManagerTest }; final LookupCoordinatorManager manager = new LookupCoordinatorManager( - discoverer, + druidNodeDiscoveryProvider, configManager, lookupCoordinatorManagerConfig, - lookupsCommunicator + lookupsCommunicator, + lookupNodeDiscovery ); Assert.assertTrue(manager.knownOldState.get().isEmpty()); @@ -1155,7 +1158,7 @@ public class LookupCoordinatorManagerTest Thread.sleep(100); } - EasyMock.verify(discoverer, configManager, lookupsCommunicator); + EasyMock.verify(lookupNodeDiscovery, configManager, lookupsCommunicator); } @Test @@ -1163,7 +1166,7 @@ public class LookupCoordinatorManagerTest { LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -1199,7 +1202,7 @@ public class LookupCoordinatorManagerTest { LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -1246,7 +1249,7 @@ public class LookupCoordinatorManagerTest final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -1283,7 +1286,7 @@ public class LookupCoordinatorManagerTest final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, lookupCoordinatorManagerConfig @@ -1327,60 +1330,56 @@ public class LookupCoordinatorManagerTest @Test public void testLookupDiscoverAll() throws Exception { - final List fakeChildren = ImmutableList.of("tier1", "tier2"); - EasyMock.reset(discoverer); - EasyMock.expect(discoverer.discoverChildren(LookupCoordinatorManager.LOOKUP_LISTEN_ANNOUNCE_KEY)) + final Set fakeChildren = ImmutableSet.of("tier1", "tier2"); + EasyMock.reset(lookupNodeDiscovery); + EasyMock.expect(lookupNodeDiscovery.getAllTiers()) .andReturn(fakeChildren) .once(); - EasyMock.replay(discoverer); + EasyMock.replay(lookupNodeDiscovery); + final LookupCoordinatorManager manager = new LookupCoordinatorManager( - client, - discoverer, - mapper, + druidNodeDiscoveryProvider, configManager, - lookupCoordinatorManagerConfig + lookupCoordinatorManagerConfig, + EasyMock.createMock(LookupCoordinatorManager.LookupsCommunicator.class), + lookupNodeDiscovery ); + manager.start(); Assert.assertEquals(fakeChildren, manager.discoverTiers()); - EasyMock.verify(discoverer); + EasyMock.verify(lookupNodeDiscovery); } @Test - public void testLookupDiscoverAllExceptional() throws Exception + public void testDiscoverNodesInTier() throws Exception { - final IOException ex = new IOException("some exception"); - EasyMock.reset(discoverer); - EasyMock.expect(discoverer.discoverChildren(LookupCoordinatorManager.LOOKUP_LISTEN_ANNOUNCE_KEY)) - .andThrow(ex) + EasyMock.reset(lookupNodeDiscovery); + EasyMock.expect(lookupNodeDiscovery.getNodesInTier("tier")) + .andReturn( + ImmutableSet.of( + HostAndPortWithScheme.fromParts("http", "h1", 8080), + HostAndPortWithScheme.fromParts("http", "h2", 8080) + ) + ) .once(); - expectedException.expectCause( - new BaseMatcher() - { - @Override - public boolean matches(Object o) - { - return o == ex; - } + EasyMock.replay(lookupNodeDiscovery); - @Override - public void describeTo(Description description) - { - - } - } - ); - EasyMock.replay(discoverer); final LookupCoordinatorManager manager = new LookupCoordinatorManager( - client, - discoverer, - mapper, + druidNodeDiscoveryProvider, configManager, - lookupCoordinatorManagerConfig + lookupCoordinatorManagerConfig, + EasyMock.createMock(LookupCoordinatorManager.LookupsCommunicator.class), + lookupNodeDiscovery ); manager.start(); - manager.discoverTiers(); - EasyMock.verify(discoverer); + Assert.assertEquals( + ImmutableSet.of( + HostAndPort.fromParts("h1", 8080), + HostAndPort.fromParts("h2", 8080) + ), + ImmutableSet.copyOf(manager.discoverNodesInTier("tier"))); + EasyMock.verify(lookupNodeDiscovery); } //tests that lookups stored in db from 0.10.0 are converted and restored. @@ -1434,7 +1433,7 @@ public class LookupCoordinatorManagerTest final LookupCoordinatorManager manager = new LookupCoordinatorManager( client, - discoverer, + druidNodeDiscoveryProvider, mapper, configManager, new LookupCoordinatorManagerConfig() diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java new file mode 100644 index 00000000000..3884471fda3 --- /dev/null +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupNodeDiscoveryTest.java @@ -0,0 +1,121 @@ +/* + * Licensed to Metamarkets Group Inc. (Metamarkets) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. Metamarkets licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package io.druid.server.lookup.cache; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import io.druid.discovery.DiscoveryDruidNode; +import io.druid.discovery.DruidNodeDiscovery; +import io.druid.discovery.DruidNodeDiscoveryProvider; +import io.druid.discovery.LookupNodeService; +import io.druid.server.DruidNode; +import io.druid.server.http.HostAndPortWithScheme; +import io.druid.server.initialization.ServerConfig; +import org.easymock.EasyMock; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +/** + */ +public class LookupNodeDiscoveryTest +{ + private DruidNodeDiscoveryProvider druidNodeDiscoveryProvider; + private DruidNodeDiscovery druidNodeDiscovery; + private LookupNodeDiscovery lookupNodeDiscovery; + + @Before + public void setup() + { + druidNodeDiscoveryProvider = EasyMock.createStrictMock(DruidNodeDiscoveryProvider.class); + + druidNodeDiscovery = EasyMock.createStrictMock(DruidNodeDiscovery.class); + + EasyMock.expect(druidNodeDiscoveryProvider.getForService(LookupNodeService.DISCOVERY_SERVICE_KEY)) + .andReturn(druidNodeDiscovery); + + DiscoveryDruidNode node1 = new DiscoveryDruidNode( + new DruidNode("s1", "h1", 8080, null, new ServerConfig()), + DruidNodeDiscoveryProvider.NODE_TYPE_HISTORICAL, + ImmutableMap.of( + LookupNodeService.DISCOVERY_SERVICE_KEY, new LookupNodeService("tier1")) + ); + + DiscoveryDruidNode node2 = new DiscoveryDruidNode( + new DruidNode("s2", "h2", 8080, null, new ServerConfig()), + DruidNodeDiscoveryProvider.NODE_TYPE_PEON, + ImmutableMap.of( + LookupNodeService.DISCOVERY_SERVICE_KEY, new LookupNodeService("tier1")) + ); + + DiscoveryDruidNode node3 = new DiscoveryDruidNode( + new DruidNode("s3", "h3", 8080, null, new ServerConfig()), + DruidNodeDiscoveryProvider.NODE_TYPE_PEON, + ImmutableMap.of( + LookupNodeService.DISCOVERY_SERVICE_KEY, new LookupNodeService("tier2")) + ); + + EasyMock.expect(druidNodeDiscovery.getAllNodes()) + .andReturn(ImmutableSet.of(node1, node2, node3)) + .anyTimes();; + + EasyMock.replay(druidNodeDiscoveryProvider, druidNodeDiscovery); + + lookupNodeDiscovery = new LookupNodeDiscovery(druidNodeDiscoveryProvider); + } + + @Test + public void testGetNodesInTier() throws Exception + { + Assert.assertEquals( + ImmutableList.of( + HostAndPortWithScheme.fromParts("http", "h1", 8080), + HostAndPortWithScheme.fromParts("http", "h2", 8080) + ), + ImmutableList.copyOf(lookupNodeDiscovery.getNodesInTier("tier1")) + ); + + Assert.assertEquals( + ImmutableList.of( + HostAndPortWithScheme.fromParts("http", "h3", 8080) + ), + ImmutableList.copyOf(lookupNodeDiscovery.getNodesInTier("tier2")) + ); + + Assert.assertEquals( + ImmutableList.of(), + ImmutableList.copyOf(lookupNodeDiscovery.getNodesInTier("tier3")) + ); + + EasyMock.verify(druidNodeDiscoveryProvider, druidNodeDiscovery); + } + + @Test + public void testGetAllTiers() throws Exception + { + Assert.assertEquals( + ImmutableSet.of("tier1", "tier2"), + lookupNodeDiscovery.getAllTiers() + ); + + EasyMock.verify(druidNodeDiscoveryProvider, druidNodeDiscovery); + } +} diff --git a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java index 0acc6bd6f08..f97440bdad5 100644 --- a/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java +++ b/server/src/test/java/io/druid/server/metrics/HistoricalMetricsMonitorTest.java @@ -27,6 +27,7 @@ import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceEventBuilder; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.client.DruidServerConfig; +import io.druid.java.util.common.Intervals; import io.druid.server.SegmentManager; import io.druid.server.coordination.ZkCoordinator; import io.druid.timeline.DataSegment; @@ -34,7 +35,6 @@ import org.easymock.Capture; import org.easymock.CaptureType; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -67,7 +67,7 @@ public class HistoricalMetricsMonitorTest extends EasyMockSupport final String dataSource = "dataSource"; final DataSegment dataSegment = new DataSegment( dataSource, - Interval.parse("2014/2015"), + Intervals.of("2014/2015"), "version", ImmutableMap.of(), ImmutableList.of(), diff --git a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java index 2e07c418058..78e22f159f8 100644 --- a/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java +++ b/server/src/test/java/io/druid/server/router/QueryHostFinderTest.java @@ -19,11 +19,8 @@ package io.druid.server.router; -import com.google.common.collect.ImmutableMap; - -import io.druid.client.DruidServer; import io.druid.client.selector.Server; -import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.Pair; import io.druid.query.Query; import io.druid.query.TableDataSource; @@ -37,44 +34,19 @@ import org.junit.Before; import org.junit.Test; import java.util.Arrays; -import java.util.LinkedHashMap; /** */ public class QueryHostFinderTest { - private ServerDiscoverySelector selector; private TieredBrokerHostSelector brokerSelector; - private TieredBrokerConfig config; private Server server; @Before public void setUp() throws Exception { - selector = EasyMock.createMock(ServerDiscoverySelector.class); brokerSelector = EasyMock.createMock(TieredBrokerHostSelector.class); - config = new TieredBrokerConfig() - { - @Override - public LinkedHashMap getTierToBrokerMap() - { - return new LinkedHashMap<>( - ImmutableMap.of( - "hot", "hotBroker", - "medium", "mediumBroker", - DruidServer.DEFAULT_TIER, "coldBroker" - ) - ); - } - - @Override - public String getDefaultBrokerServiceName() - { - return "hotBroker"; - } - }; - server = new Server() { @Override @@ -101,24 +73,22 @@ public class QueryHostFinderTest return 0; } }; + + EasyMock.expect(brokerSelector.select(EasyMock.anyObject(Query.class))).andReturn( + Pair.of("service", server) + ); + EasyMock.replay(brokerSelector); } @After public void tearDown() throws Exception { EasyMock.verify(brokerSelector); - EasyMock.verify(selector); } @Test public void testFindServer() throws Exception { - EasyMock.expect(brokerSelector.select(EasyMock.anyObject())).andReturn(new Pair("hotBroker", selector)); - EasyMock.replay(brokerSelector); - - EasyMock.expect(selector.pick()).andReturn(server).once(); - EasyMock.replay(selector); - QueryHostFinder queryRunner = new QueryHostFinder( brokerSelector ); @@ -126,7 +96,7 @@ public class QueryHostFinderTest Server server = queryRunner.findServer( new TimeBoundaryQuery( new TableDataSource("test"), - new MultipleIntervalSegmentSpec(Arrays.asList(new Interval("2011-08-31/2011-09-01"))), + new MultipleIntervalSegmentSpec(Arrays.asList(Intervals.of("2011-08-31/2011-09-01"))), null, null, null diff --git a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java index 52d3808e9b1..a2c253dd405 100644 --- a/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java +++ b/server/src/test/java/io/druid/server/router/TieredBrokerHostSelectorTest.java @@ -20,20 +20,32 @@ package io.druid.server.router; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Function; import com.google.common.base.Supplier; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import com.metamx.http.client.HttpClient; import io.druid.client.DruidServer; -import io.druid.curator.discovery.ServerDiscoveryFactory; -import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.client.selector.Server; +import io.druid.discovery.DiscoveryDruidNode; +import io.druid.discovery.DruidNodeDiscovery; +import io.druid.discovery.DruidNodeDiscoveryProvider; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.Intervals; +import io.druid.java.util.common.Pair; import io.druid.query.Druids; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.spec.MultipleIntervalSegmentSpec; +import io.druid.query.timeseries.TimeseriesQuery; +import io.druid.server.DruidNode; import io.druid.server.coordinator.rules.IntervalLoadRule; import io.druid.server.coordinator.rules.Rule; +import io.druid.server.initialization.ServerConfig; import org.easymock.EasyMock; import org.joda.time.Interval; import org.junit.After; @@ -41,7 +53,9 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import javax.annotation.Nullable; import java.util.Arrays; +import java.util.Collection; import java.util.LinkedHashMap; import java.util.List; @@ -49,18 +63,59 @@ import java.util.List; */ public class TieredBrokerHostSelectorTest { - private ServerDiscoveryFactory factory; - private ServerDiscoverySelector selector; + private DruidNodeDiscoveryProvider druidNodeDiscoveryProvider; + private DruidNodeDiscovery druidNodeDiscovery; private TieredBrokerHostSelector brokerSelector; + private DiscoveryDruidNode node1; + private DiscoveryDruidNode node2; + private DiscoveryDruidNode node3; + @Before public void setUp() throws Exception { - factory = EasyMock.createMock(ServerDiscoveryFactory.class); - selector = EasyMock.createMock(ServerDiscoverySelector.class); + druidNodeDiscoveryProvider = EasyMock.createStrictMock(DruidNodeDiscoveryProvider.class); + + node1 = new DiscoveryDruidNode( + new DruidNode("hotBroker", "hotHost", 8080, null, new ServerConfig()), + DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, + ImmutableMap.of() + ); + + node2 = new DiscoveryDruidNode( + new DruidNode("coldBroker", "coldHost1", 8080, null, new ServerConfig()), + DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, + ImmutableMap.of() + ); + + node3 = new DiscoveryDruidNode( + new DruidNode("coldBroker", "coldHost2", 8080, null, new ServerConfig()), + DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, + ImmutableMap.of() + ); + + druidNodeDiscovery = new DruidNodeDiscovery() + { + @Override + public Collection getAllNodes() + { + return ImmutableSet.of(node1, node2, node3); + } + + @Override + public void registerListener(Listener listener) + { + listener.nodesAdded(ImmutableList.of(node1, node2, node3)); + } + }; + + EasyMock.expect(druidNodeDiscoveryProvider.getForNodeType(DruidNodeDiscoveryProvider.NODE_TYPE_BROKER)) + .andReturn(druidNodeDiscovery);; + + EasyMock.replay(druidNodeDiscoveryProvider); brokerSelector = new TieredBrokerHostSelector( - new TestRuleManager(null, null, null, null), + new TestRuleManager(null, null, null), new TieredBrokerConfig() { @Override @@ -81,20 +136,11 @@ public class TieredBrokerHostSelectorTest return "hotBroker"; } }, - factory, + druidNodeDiscoveryProvider, Arrays.asList(new TimeBoundaryTieredBrokerSelectorStrategy(), new PriorityTieredBrokerSelectorStrategy(0, 1)) ); - EasyMock.expect(factory.createSelector(EasyMock.anyObject())).andReturn(selector).atLeastOnce(); - EasyMock.replay(factory); - - selector.start(); - EasyMock.expectLastCall().atLeastOnce(); - selector.stop(); - EasyMock.expectLastCall().atLeastOnce(); - EasyMock.replay(selector); brokerSelector.start(); - } @After @@ -102,39 +148,47 @@ public class TieredBrokerHostSelectorTest { brokerSelector.stop(); - EasyMock.verify(selector); - EasyMock.verify(factory); + EasyMock.verify(druidNodeDiscoveryProvider); } @Test public void testBasicSelect() throws Exception { - String brokerName = (String) brokerSelector.select( - Druids.newTimeseriesQueryBuilder() - .dataSource("test") - .granularity("all") - .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(new Interval("2011-08-31/2011-09-01"))) - .build() - ).lhs; + TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() + .dataSource("test") + .granularity("all") + .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) + .intervals(Arrays.asList(Intervals.of("2011-08-31/2011-09-01"))) + .build(); - Assert.assertEquals("coldBroker", brokerName); + Pair p = brokerSelector.select(query); + Assert.assertEquals("coldBroker", p.lhs); + Assert.assertEquals("coldHost1:8080", p.rhs.getHost()); + + p = brokerSelector.select(query); + Assert.assertEquals("coldBroker", p.lhs); + Assert.assertEquals("coldHost2:8080", p.rhs.getHost()); + + p = brokerSelector.select(query); + Assert.assertEquals("coldBroker", p.lhs); + Assert.assertEquals("coldHost1:8080", p.rhs.getHost()); } @Test public void testBasicSelect2() throws Exception { - String brokerName = (String) brokerSelector.select( + Pair p = brokerSelector.select( Druids.newTimeseriesQueryBuilder() .dataSource("test") .granularity("all") .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(new Interval("2013-08-31/2013-09-01"))) + .intervals(Arrays.asList(Intervals.of("2013-08-31/2013-09-01"))) .build() - ).lhs; + ); - Assert.assertEquals("hotBroker", brokerName); + Assert.assertEquals("hotBroker", p.lhs); + Assert.assertEquals("hotHost:8080", p.rhs.getHost()); } @Test @@ -145,7 +199,7 @@ public class TieredBrokerHostSelectorTest .dataSource("test") .granularity("all") .aggregators(Arrays.asList(new CountAggregatorFactory("rows"))) - .intervals(Arrays.asList(new Interval("2010-08-31/2010-09-01"))) + .intervals(Arrays.asList(Intervals.of("2010-08-31/2010-09-01"))) .build() ).lhs; @@ -162,9 +216,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2013-08-31/2013-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2011-08-31/2011-09-01") + Intervals.of("2013-08-31/2013-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2011-08-31/2011-09-01") ) ) ).build() @@ -183,9 +237,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2011-08-31/2011-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2013-08-31/2013-09-01") + Intervals.of("2011-08-31/2011-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2013-08-31/2013-09-01") ) ) ).build() @@ -204,9 +258,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2011-08-31/2011-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2013-08-31/2013-09-01") + Intervals.of("2011-08-31/2011-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2013-08-31/2013-09-01") ) ) ) @@ -227,9 +281,9 @@ public class TieredBrokerHostSelectorTest .intervals( new MultipleIntervalSegmentSpec( Arrays.asList( - new Interval("2011-08-31/2011-09-01"), - new Interval("2012-08-31/2012-09-01"), - new Interval("2013-08-31/2013-09-01") + Intervals.of("2011-08-31/2011-09-01"), + Intervals.of("2012-08-31/2012-09-01"), + Intervals.of("2013-08-31/2013-09-01") ) ) ) @@ -240,16 +294,38 @@ public class TieredBrokerHostSelectorTest Assert.assertEquals("hotBroker", brokerName); } + @Test + public void testGetAllBrokers() + { + Assert.assertEquals( + ImmutableMap.of( + "mediumBroker", ImmutableList.of(), + "coldBroker", ImmutableList.of("coldHost1:8080", "coldHost2:8080"), + "hotBroker", ImmutableList.of("hotHost:8080") + ), + Maps.transformValues( + brokerSelector.getAllBrokers(), + new Function, List>() + { + @Override + public List apply(@Nullable List servers) + { + return Lists.transform(servers, server -> server.getHost()); + } + } + ) + ); + } + private static class TestRuleManager extends CoordinatorRuleManager { public TestRuleManager( @Global HttpClient httpClient, @Json ObjectMapper jsonMapper, - Supplier config, - ServerDiscoverySelector selector + Supplier config ) { - super(httpClient, jsonMapper, config, selector); + super(httpClient, jsonMapper, config, null); } @Override @@ -262,10 +338,10 @@ public class TieredBrokerHostSelectorTest public List getRulesWithDefault(String dataSource) { return Arrays.asList( - new IntervalLoadRule(new Interval("2013/2014"), ImmutableMap.of("hot", 1)), - new IntervalLoadRule(new Interval("2012/2013"), ImmutableMap.of("medium", 1)), + new IntervalLoadRule(Intervals.of("2013/2014"), ImmutableMap.of("hot", 1)), + new IntervalLoadRule(Intervals.of("2012/2013"), ImmutableMap.of("medium", 1)), new IntervalLoadRule( - new Interval("2011/2012"), + Intervals.of("2011/2012"), ImmutableMap.of(DruidServer.DEFAULT_TIER, 1) ) ); diff --git a/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java b/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java index 33187c17a98..d17d0e36e20 100644 --- a/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/NumberedShardSpecTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import io.druid.TestUtil; +import io.druid.java.util.common.Intervals; import io.druid.timeline.TimelineObjectHolder; import io.druid.timeline.VersionedIntervalTimeline; import io.druid.timeline.partition.NumberedShardSpec; @@ -183,7 +184,7 @@ public class NumberedShardSpecTest ) { VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); - Interval interval = new Interval("2000/3000"); + Interval interval = Intervals.of("2000/3000"); String version = "v1"; for (PartitionChunk chunk : chunks) { timeline.add(interval, version, chunk); diff --git a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java index 23b837edb34..e7688a52de9 100644 --- a/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java +++ b/server/src/test/java/io/druid/timeline/partition/HashBasedNumberedShardSpecTest.java @@ -28,6 +28,7 @@ import io.druid.TestUtil; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import org.joda.time.DateTime; @@ -143,7 +144,7 @@ public class HashBasedNumberedShardSpecTest ImmutableList.of("visitor_id"), TestUtil.MAPPER ); - final DateTime time = new DateTime(); + final DateTime time = DateTimes.nowUtc(); final InputRow inputRow = new MapBasedInputRow( time, ImmutableList.of("visitor_id", "cnt"), @@ -220,7 +221,7 @@ public class HashBasedNumberedShardSpecTest @Override public DateTime getTimestamp() { - return new DateTime(0); + return DateTimes.EPOCH; } @Override diff --git a/services/pom.xml b/services/pom.xml index 7752423c7e2..506b4a29c27 100644 --- a/services/pom.xml +++ b/services/pom.xml @@ -71,6 +71,27 @@ + + de.thetaphi + forbiddenapis + + + validate + validate + + check + testCheck + + + + + jdk-unsafe + + + + + org.apache.maven.plugins maven-shade-plugin diff --git a/services/src/main/java/io/druid/cli/CliBroker.java b/services/src/main/java/io/druid/cli/CliBroker.java index 3bade6247bf..95586e7af29 100644 --- a/services/src/main/java/io/druid/cli/CliBroker.java +++ b/services/src/main/java/io/druid/cli/CliBroker.java @@ -21,7 +21,6 @@ package io.druid.cli; import com.google.common.collect.ImmutableList; import com.google.inject.Binder; -import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.name.Names; import io.airlift.airline.Command; @@ -34,8 +33,6 @@ import io.druid.client.cache.CacheMonitor; import io.druid.client.selector.CustomTierSelectorStrategyConfig; import io.druid.client.selector.ServerSelectorStrategy; import io.druid.client.selector.TierSelectorStrategy; -import io.druid.discovery.DruidNodeDiscoveryProvider; -import io.druid.discovery.LookupNodeService; import io.druid.guice.CacheModule; import io.druid.guice.DruidProcessingModule; import io.druid.guice.Jerseys; @@ -123,14 +120,6 @@ public class CliBroker extends ServerRunnable MetricsModule.register(binder, CacheMonitor.class); LifecycleModule.register(binder, Server.class); - - binder.bind(DiscoverySideEffectsProvider.Child.class).toProvider( - new DiscoverySideEffectsProvider( - DruidNodeDiscoveryProvider.NODE_TYPE_BROKER, - ImmutableList.of(LookupNodeService.class) - ) - ).in(LazySingleton.class); - LifecycleModule.registerKey(binder, Key.get(DiscoverySideEffectsProvider.Child.class)); } }, new LookupModule(), diff --git a/services/src/main/java/io/druid/cli/CliCoordinator.java b/services/src/main/java/io/druid/cli/CliCoordinator.java index 939ddeaceb5..3aff7ddf15e 100644 --- a/services/src/main/java/io/druid/cli/CliCoordinator.java +++ b/services/src/main/java/io/druid/cli/CliCoordinator.java @@ -75,7 +75,6 @@ import io.druid.server.http.RulesResource; import io.druid.server.http.ServersResource; import io.druid.server.http.TiersResource; import io.druid.server.initialization.jetty.JettyServerInitializer; -import io.druid.server.listener.announcer.ListenerDiscoverer; import io.druid.server.lookup.cache.LookupCoordinatorManager; import io.druid.server.lookup.cache.LookupCoordinatorManagerConfig; import io.druid.server.router.TieredBrokerConfig; @@ -168,9 +167,6 @@ public class CliCoordinator extends ServerRunnable binder.bind(LookupCoordinatorManager.class).in(LazySingleton.class); binder.bind(DruidCoordinator.class); - binder.bind(ListenerDiscoverer.class).in(ManageLifecycle.class); - - LifecycleModule.register(binder, ListenerDiscoverer.class); LifecycleModule.register(binder, MetadataStorage.class); LifecycleModule.register(binder, DruidCoordinator.class); diff --git a/services/src/main/java/io/druid/cli/DumpSegment.java b/services/src/main/java/io/druid/cli/DumpSegment.java index f2d6935e2c8..ce23c12e1e2 100644 --- a/services/src/main/java/io/druid/cli/DumpSegment.java +++ b/services/src/main/java/io/druid/cli/DumpSegment.java @@ -278,7 +278,9 @@ public class DumpSegment extends GuiceRunnable final List selectors = Lists.newArrayList(); for (String columnName : columnNames) { - selectors.add(makeSelector(columnName, index.getColumn(columnName), cursor)); + selectors.add( + makeSelector(columnName, index.getColumn(columnName), cursor.getColumnSelectorFactory()) + ); } while (!cursor.isDone()) { diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java index 222c063f654..f3c819ca05b 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java @@ -29,6 +29,7 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; @@ -40,7 +41,6 @@ import org.apache.calcite.avatica.NoSuchConnectionException; import org.apache.calcite.avatica.NoSuchStatementException; import org.apache.calcite.avatica.QueryState; import org.apache.calcite.avatica.remote.TypedValue; -import org.joda.time.DateTime; import org.joda.time.Interval; import javax.annotation.Nonnull; @@ -552,7 +552,7 @@ public class DruidMeta extends MetaImpl log.debug("Connection[%s] timed out.", connectionId); closeConnection(new ConnectionHandle(connectionId)); }, - new Interval(new DateTime(), config.getConnectionIdleTimeout()).toDurationMillis(), + new Interval(DateTimes.nowUtc(), config.getConnectionIdleTimeout()).toDurationMillis(), TimeUnit.MILLISECONDS ) ); diff --git a/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java b/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java index fff3aa13277..bc0d8a7cefd 100644 --- a/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java +++ b/sql/src/main/java/io/druid/sql/calcite/aggregation/ApproxCountDistinctSqlAggregator.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.aggregation; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; @@ -90,7 +91,7 @@ public class ApproxCountDistinctSqlAggregator implements SqlAggregator final AggregatorFactory aggregatorFactory; if (input.isDirectColumnAccess() && rowSignature.getColumnType(input.getDirectColumn()) == ValueType.COMPLEX) { - aggregatorFactory = new HyperUniquesAggregatorFactory(name, input.getDirectColumn()); + aggregatorFactory = new HyperUniquesAggregatorFactory(name, input.getDirectColumn(), false, true); } else { final SqlTypeName sqlTypeName = rexNode.getType().getSqlTypeName(); final ValueType inputType = Calcites.getValueTypeForSqlTypeName(sqlTypeName); @@ -104,7 +105,7 @@ public class ApproxCountDistinctSqlAggregator implements SqlAggregator dimensionSpec = input.getSimpleExtraction().toDimensionSpec(null, ValueType.STRING); } else { final ExpressionVirtualColumn virtualColumn = input.toVirtualColumn( - String.format("%s:v", name), + StringUtils.format("%s:v", name), inputType, plannerContext.getExprMacroTable() ); @@ -112,7 +113,7 @@ public class ApproxCountDistinctSqlAggregator implements SqlAggregator virtualColumns.add(virtualColumn); } - aggregatorFactory = new CardinalityAggregatorFactory(name, ImmutableList.of(dimensionSpec), false); + aggregatorFactory = new CardinalityAggregatorFactory(name, null, ImmutableList.of(dimensionSpec), false, true); } return Aggregation.create(virtualColumns, aggregatorFactory).filter(filter); diff --git a/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java b/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java index 2a7ee4c49ad..d5da02d37b7 100644 --- a/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java +++ b/sql/src/main/java/io/druid/sql/calcite/aggregation/DimensionExpression.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.aggregation; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; @@ -84,7 +85,7 @@ public class DimensionExpression @Nullable public String getVirtualColumnName() { - return expression.isSimpleExtraction() ? null : String.format("%s:v", outputName); + return expression.isSimpleExtraction() ? null : StringUtils.format("%s:v", outputName); } @Override diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java index 10bca1f43b3..582d7397c52 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/CeilOperatorConversion.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -60,7 +61,7 @@ public class CeilOperatorConversion implements SqlOperatorConversion // CEIL(expr) return druidExpression.map( simpleExtraction -> null, - expression -> String.format("ceil(%s)", expression) + expression -> StringUtils.format("ceil(%s)", expression) ); } else if (call.getOperands().size() == 2) { // CEIL(expr TO timeUnit) diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java index e3cac19bafd..86475a39049 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/DruidExpression.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Chars; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.Parser; @@ -64,7 +65,7 @@ public class DruidExpression public static DruidExpression fromColumn(final String column) { - return new DruidExpression(SimpleExtraction.of(column, null), String.format("\"%s\"", escape(column))); + return new DruidExpression(SimpleExtraction.of(column, null), StringUtils.format("\"%s\"", escape(column))); } public static DruidExpression fromExpression(final String expression) diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java index 770844184c2..6b8f9ed7561 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java @@ -24,7 +24,9 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.ExprType; @@ -58,7 +60,6 @@ import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.sql.type.SqlTypeName; -import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; @@ -265,7 +266,7 @@ public class Expressions // Ignore casts for simple extractions (use Function.identity) since it is ok in many cases. typeCastExpression = operandExpression.map( Function.identity(), - expression -> String.format("CAST(%s, '%s')", expression, toExprType.toString()) + expression -> StringUtils.format("CAST(%s, '%s')", expression, toExprType.toString()) ); } else { typeCastExpression = operandExpression; @@ -301,7 +302,7 @@ public class Expressions return null; } else if (UNARY_PREFIX_OPERATOR_MAP.containsKey(operator)) { return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s)", UNARY_PREFIX_OPERATOR_MAP.get(operator), Iterables.getOnlyElement(operands).getExpression() @@ -309,7 +310,7 @@ public class Expressions ); } else if (UNARY_SUFFIX_OPERATOR_MAP.containsKey(operator)) { return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s)", Iterables.getOnlyElement(operands).getExpression(), UNARY_SUFFIX_OPERATOR_MAP.get(operator) @@ -320,7 +321,7 @@ public class Expressions throw new ISE("WTF?! Got binary operator[%s] with %s args?", kind, operands.size()); } return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s %s)", operands.get(0).getExpression(), BINARY_OPERATOR_MAP.get(operator), @@ -522,7 +523,7 @@ public class Expressions if (granularity != null) { // lhs is FLOOR(__time TO granularity); rhs must be a timestamp final long rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()).getMillis(); - final Interval rhsInterval = granularity.bucket(new DateTime(rhsMillis)); + final Interval rhsInterval = granularity.bucket(DateTimes.utc(rhsMillis)); // Is rhs aligned on granularity boundaries? final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis; diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java index 822d6ad358e..eac1b857754 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/FloorOperatorConversion.java @@ -19,6 +19,7 @@ package io.druid.sql.calcite.expression; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -57,7 +58,7 @@ public class FloorOperatorConversion implements SqlOperatorConversion // FLOOR(expr) return druidExpression.map( simpleExtraction -> null, // BucketExtractionFn could do this, but it's lame since it returns strings. - expression -> String.format("floor(%s)", expression) + expression -> StringUtils.format("floor(%s)", expression) ); } else if (call.getOperands().size() == 2) { // FLOOR(expr TO timeUnit) diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java index a6118b21e1b..677c83f9273 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/LookupOperatorConversion.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.inject.Inject; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.query.lookup.LookupReferencesManager; import io.druid.query.lookup.RegisteredLookupExtractionFn; @@ -65,7 +66,7 @@ public class LookupOperatorConversion implements SqlOperatorConversion plannerContext, rowSignature, rexNode, - calciteOperator().getName().toLowerCase(), + StringUtils.toLowerCase(calciteOperator().getName()), inputExpressions -> { final DruidExpression arg = inputExpressions.get(0); final Expr lookupNameExpr = inputExpressions.get(1).parse(plannerContext.getExprMacroTable()); diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java index 8879e8313a2..731c4413ed2 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/RegexpExtractOperatorConversion.java @@ -19,6 +19,7 @@ package io.druid.sql.calcite.expression; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.Expr; import io.druid.query.extraction.RegexDimExtractionFn; import io.druid.sql.calcite.planner.PlannerContext; @@ -58,7 +59,7 @@ public class RegexpExtractOperatorConversion implements SqlOperatorConversion plannerContext, rowSignature, rexNode, - calciteOperator().getName().toLowerCase(), + StringUtils.toLowerCase(calciteOperator().getName()), inputExpressions -> { final DruidExpression arg = inputExpressions.get(0); final Expr patternExpr = inputExpressions.get(1).parse(plannerContext.getExprMacroTable()); diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java b/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java index b51841b8f1d..53476dfd2a7 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/SimpleExtraction.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; +import io.druid.java.util.common.StringUtils; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; @@ -107,7 +108,7 @@ public class SimpleExtraction public String toString() { if (extractionFn != null) { - return String.format("%s(%s)", extractionFn, column); + return StringUtils.format("%s(%s)", extractionFn, column); } else { return column; } diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java index 6da872b8447..d1c169d6024 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/SubstringOperatorConversion.java @@ -19,6 +19,7 @@ package io.druid.sql.calcite.expression; +import io.druid.java.util.common.StringUtils; import io.druid.query.extraction.SubstringDimExtractionFn; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -65,7 +66,7 @@ public class SubstringOperatorConversion implements SqlOperatorConversion return input.map( simpleExtraction -> simpleExtraction.cascade(new SubstringDimExtractionFn(index, length < 0 ? null : length)), - expression -> String.format( + expression -> StringUtils.format( "substring(%s, %s, %s)", expression, DruidExpression.numberLiteral(index), diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java index aaa119b2e1a..8a0feef119b 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/TimeArithmeticOperatorConversion.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; import org.apache.calcite.rex.RexCall; @@ -85,7 +86,7 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon timeExpr, shiftExpr.map( simpleExtraction -> null, - expression -> String.format("concat('P', %s, 'M')", expression) + expression -> StringUtils.format("concat('P', %s, 'M')", expression) ), DruidExpression.fromExpression(DruidExpression.numberLiteral(direction > 0 ? 1 : -1)) ) @@ -94,7 +95,7 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon // timestamp_expr { + | - } (day-time interval) // Period is a value in milliseconds. Ignore time zone. return DruidExpression.fromExpression( - String.format( + StringUtils.format( "(%s %s %s)", timeExpr.getExpression(), direction > 0 ? "+" : "-", diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java b/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java index c1d7537b9ed..e6fbd3bcf46 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/TimeExtractOperatorConversion.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.StringUtils; import io.druid.query.expression.TimestampExtractExprMacro; import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.RowSignature; @@ -77,7 +78,7 @@ public class TimeExtractOperatorConversion implements SqlOperatorConversion ); } }, - expression -> String.format( + expression -> StringUtils.format( "timestamp_extract(%s,%s,%s)", expression, DruidExpression.stringLiteral(unit.name()), @@ -107,7 +108,7 @@ public class TimeExtractOperatorConversion implements SqlOperatorConversion } final TimestampExtractExprMacro.Unit unit = TimestampExtractExprMacro.Unit.valueOf( - RexLiteral.stringValue(call.getOperands().get(1)).toUpperCase() + StringUtils.toUpperCase(RexLiteral.stringValue(call.getOperands().get(1))) ); final DateTimeZone timeZone = call.getOperands().size() > 2 && !RexLiteral.isNullLiteral(call.getOperands().get(2)) diff --git a/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java b/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java index 42dbaef0ab1..c241fab86ae 100644 --- a/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java +++ b/sql/src/main/java/io/druid/sql/calcite/filtration/Filtration.java @@ -21,7 +21,7 @@ package io.druid.sql.calcite.filtration; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; -import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.ISE; import io.druid.math.expr.ExprMacroTable; import io.druid.query.filter.DimFilter; @@ -35,7 +35,6 @@ import java.util.List; public class Filtration { - private static final Interval ETERNITY = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT); private static final DimFilter MATCH_NOTHING = new ExpressionDimFilter( "1 == 2", ExprMacroTable.nil() ); @@ -51,13 +50,13 @@ public class Filtration private Filtration(final DimFilter dimFilter, final List intervals) { - this.intervals = intervals != null ? intervals : ImmutableList.of(ETERNITY); + this.intervals = intervals != null ? intervals : Intervals.ONLY_ETERNITY; this.dimFilter = dimFilter; } public static Interval eternity() { - return ETERNITY; + return Intervals.ETERNITY; } public static DimFilter matchNothing() diff --git a/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java b/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java index 345a7e748d2..076d8d2ea17 100644 --- a/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java +++ b/sql/src/main/java/io/druid/sql/calcite/filtration/RangeSets.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; +import io.druid.java.util.common.Intervals; import org.joda.time.Interval; import java.util.List; @@ -128,7 +129,7 @@ public class RangeSets end = Filtration.eternity().getEndMillis(); } - retVal.add(new Interval(start, end)); + retVal.add(Intervals.utc(start, end)); } return retVal; diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java index afbfac840fc..779b7928176 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java @@ -21,6 +21,7 @@ package io.druid.sql.calcite.planner; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Chars; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; @@ -165,7 +166,7 @@ public class Calcites public static int jodaToCalciteDate(final DateTime dateTime, final DateTimeZone timeZone) { final DateTime date = dateTime.withZone(timeZone).dayOfMonth().roundFloorCopy(); - return Days.daysBetween(new DateTime(0L, DateTimeZone.UTC), date.withZoneRetainFields(DateTimeZone.UTC)).getDays(); + return Days.daysBetween(DateTimes.EPOCH, date.withZoneRetainFields(DateTimeZone.UTC)).getDays(); } /** @@ -179,8 +180,7 @@ public class Calcites */ public static Calendar jodaToCalciteCalendarLiteral(final DateTime dateTime, final DateTimeZone timeZone) { - final Calendar calendar = Calendar.getInstance(Locale.ENGLISH); - calendar.setTimeZone(GMT_TIME_ZONE); + final Calendar calendar = Calendar.getInstance(GMT_TIME_ZONE, Locale.ENGLISH); calendar.setTimeInMillis(Calcites.jodaToCalciteTimestamp(dateTime, timeZone)); return calendar; } @@ -228,7 +228,7 @@ public class Calcites */ public static DateTime calciteDateToJoda(final int date, final DateTimeZone timeZone) { - return new DateTime(0L, DateTimeZone.UTC).plusDays(date).withZoneRetainFields(timeZone); + return DateTimes.EPOCH.plusDays(date).withZoneRetainFields(timeZone); } /** diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java index 33a056dc75d..563c0ab11e2 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidRexExecutor.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.planner; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.math.expr.Expr; import io.druid.math.expr.ExprEval; import io.druid.math.expr.ExprType; @@ -31,7 +32,6 @@ import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexExecutor; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.type.SqlTypeName; -import org.joda.time.DateTime; import java.math.BigDecimal; import java.util.List; @@ -77,7 +77,7 @@ public class DruidRexExecutor implements RexExecutor literalValue = exprResult.asBoolean(); } else if (sqlTypeName == SqlTypeName.DATE || sqlTypeName == SqlTypeName.TIMESTAMP) { literalValue = Calcites.jodaToCalciteCalendarLiteral( - new DateTime(exprResult.asLong()), + DateTimes.utc(exprResult.asLong()), plannerContext.getTimeZone() ); } else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) { diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java index d75adc5bfb6..087047be9e4 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/PlannerFactory.java @@ -19,7 +19,9 @@ package io.druid.sql.calcite.planner; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Inject; +import io.druid.guice.annotations.Json; import io.druid.math.expr.ExprMacroTable; import io.druid.server.QueryLifecycleFactory; import io.druid.sql.calcite.rel.QueryMaker; @@ -53,6 +55,7 @@ public class PlannerFactory private final DruidOperatorTable operatorTable; private final ExprMacroTable macroTable; private final PlannerConfig plannerConfig; + private final ObjectMapper jsonMapper; @Inject public PlannerFactory( @@ -60,7 +63,8 @@ public class PlannerFactory final QueryLifecycleFactory queryLifecycleFactory, final DruidOperatorTable operatorTable, final ExprMacroTable macroTable, - final PlannerConfig plannerConfig + final PlannerConfig plannerConfig, + final @Json ObjectMapper jsonMapper ) { this.druidSchema = druidSchema; @@ -68,13 +72,14 @@ public class PlannerFactory this.operatorTable = operatorTable; this.macroTable = macroTable; this.plannerConfig = plannerConfig; + this.jsonMapper = jsonMapper; } public DruidPlanner createPlanner(final Map queryContext) { final SchemaPlus rootSchema = Calcites.createRootSchema(druidSchema); final PlannerContext plannerContext = PlannerContext.create(operatorTable, macroTable, plannerConfig, queryContext); - final QueryMaker queryMaker = new QueryMaker(queryLifecycleFactory, plannerContext); + final QueryMaker queryMaker = new QueryMaker(queryLifecycleFactory, plannerContext, jsonMapper); final FrameworkConfig frameworkConfig = Frameworks .newConfigBuilder() .parserConfig(PARSER_CONFIG) diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/Rules.java b/sql/src/main/java/io/druid/sql/calcite/planner/Rules.java index 71788457e11..8ad6956e4e9 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/Rules.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/Rules.java @@ -219,7 +219,7 @@ public class Rules rules.add(CaseFilteredAggregatorRule.instance()); // Druid-specific rules. - rules.add(new DruidTableScanRule(plannerContext, queryMaker)); + rules.add(new DruidTableScanRule(queryMaker)); rules.add(new DruidFilterRule()); if (plannerConfig.getMaxSemiJoinRowsInMemory() > 0) { diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidNestedGroupBy.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java similarity index 58% rename from sql/src/main/java/io/druid/sql/calcite/rel/DruidNestedGroupBy.java rename to sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java index 3ada2785f16..df80ee0ed8e 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidNestedGroupBy.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidOuterQueryRel.java @@ -19,48 +19,51 @@ package io.druid.sql.calcite.rel; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryDataSource; +import io.druid.query.TableDataSource; import io.druid.query.filter.DimFilter; import io.druid.sql.calcite.table.RowSignature; import org.apache.calcite.interpreter.BindableConvention; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; +import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelWriter; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; +import javax.annotation.Nullable; +import java.io.IOException; import java.util.List; -public class DruidNestedGroupBy extends DruidRel +/** + * DruidRel that uses a "query" dataSource. + */ +public class DruidOuterQueryRel extends DruidRel { - private final DruidRel sourceRel; + private final RelNode sourceRel; private final DruidQueryBuilder queryBuilder; - private DruidNestedGroupBy( + private DruidOuterQueryRel( RelOptCluster cluster, RelTraitSet traitSet, - DruidRel sourceRel, - DruidQueryBuilder queryBuilder + RelNode sourceRel, + DruidQueryBuilder queryBuilder, + QueryMaker queryMaker ) { - super(cluster, traitSet, sourceRel.getQueryMaker()); + super(cluster, traitSet, queryMaker); this.sourceRel = sourceRel; this.queryBuilder = queryBuilder; - - if (sourceRel.getQueryBuilder().getGrouping() == null) { - throw new IllegalArgumentException("inner query must be groupBy"); - } - - if (queryBuilder.getGrouping() == null) { - throw new IllegalArgumentException("outer query must be groupBy"); - } } - public static DruidNestedGroupBy from( + public static DruidOuterQueryRel from( final DruidRel sourceRel, final DimFilter filter, final Grouping grouping, @@ -68,21 +71,22 @@ public class DruidNestedGroupBy extends DruidRel final List rowOrder ) { - return new DruidNestedGroupBy( + return new DruidOuterQueryRel( sourceRel.getCluster(), sourceRel.getTraitSet(), sourceRel, DruidQueryBuilder.fullScan( sourceRel.getOutputRowSignature(), sourceRel.getCluster().getTypeFactory() - ).withFilter(filter).withGrouping(grouping, rowType, rowOrder) + ).withFilter(filter).withGrouping(grouping, rowType, rowOrder), + sourceRel.getQueryMaker() ); } @Override public RowSignature getSourceRowSignature() { - return sourceRel.getOutputRowSignature(); + return ((DruidRel) sourceRel).getOutputRowSignature(); } @Override @@ -94,7 +98,7 @@ public class DruidNestedGroupBy extends DruidRel @Override public Sequence runQuery() { - final QueryDataSource queryDataSource = sourceRel.asDataSource(); + final QueryDataSource queryDataSource = ((DruidRel) sourceRel).asDataSource(); if (queryDataSource != null) { return getQueryMaker().runQuery( queryDataSource, @@ -106,26 +110,28 @@ public class DruidNestedGroupBy extends DruidRel } @Override - public DruidNestedGroupBy withQueryBuilder(DruidQueryBuilder newQueryBuilder) + public DruidOuterQueryRel withQueryBuilder(final DruidQueryBuilder newQueryBuilder) { - return new DruidNestedGroupBy( + return new DruidOuterQueryRel( getCluster(), getTraitSet().plusAll(newQueryBuilder.getRelTraits()), sourceRel, - newQueryBuilder + newQueryBuilder, + getQueryMaker() ); } @Override public int getQueryCount() { - return 1 + sourceRel.getQueryCount(); + return 1 + ((DruidRel) sourceRel).getQueryCount(); } + @Nullable @Override public QueryDataSource asDataSource() { - final QueryDataSource queryDataSource = sourceRel.asDataSource(); + final QueryDataSource queryDataSource = ((DruidRel) sourceRel).asDataSource(); if (queryDataSource == null) { return null; } else { @@ -134,33 +140,65 @@ public class DruidNestedGroupBy extends DruidRel } @Override - public DruidNestedGroupBy asBindable() + public DruidOuterQueryRel asBindable() { - return new DruidNestedGroupBy( + return new DruidOuterQueryRel( getCluster(), getTraitSet().plus(BindableConvention.INSTANCE), sourceRel, - queryBuilder + queryBuilder, + getQueryMaker() ); } @Override - public DruidNestedGroupBy asDruidConvention() + public DruidOuterQueryRel asDruidConvention() { - return new DruidNestedGroupBy( + return new DruidOuterQueryRel( getCluster(), getTraitSet().plus(DruidConvention.instance()), - sourceRel, - queryBuilder + RelOptRule.convert(sourceRel, DruidConvention.instance()), + queryBuilder, + getQueryMaker() + ); + } + + @Override + public List getInputs() + { + return ImmutableList.of(sourceRel); + } + + @Override + public RelNode copy(final RelTraitSet traitSet, final List inputs) + { + return new DruidOuterQueryRel( + getCluster(), + traitSet, + Iterables.getOnlyElement(inputs), + getQueryBuilder(), + getQueryMaker() ); } @Override public RelWriter explainTerms(RelWriter pw) { - return pw - .item("sourceRel", sourceRel) - .item("queryBuilder", queryBuilder); + final TableDataSource dummyDataSource = new TableDataSource("__subquery__"); + final String queryString; + + try { + queryString = getQueryMaker() + .getJsonMapper() + .writeValueAsString(queryBuilder.toGroupByQuery(dummyDataSource, getPlannerContext())); + } + catch (IOException e) { + throw new RuntimeException(e); + } + + return super.explainTerms(pw) + .input("innerQuery", sourceRel) + .item("query", queryString); } @Override @@ -172,6 +210,6 @@ public class DruidNestedGroupBy extends DruidRel @Override public RelOptCost computeSelfCost(final RelOptPlanner planner, final RelMetadataQuery mq) { - return sourceRel.computeSelfCost(planner, mq).multiplyBy(2.0); + return planner.getCostFactory().makeCost(mq.getRowCount(sourceRel), 0, 0); } } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java index 3bae621ceb9..212d0270ecc 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryBuilder.java @@ -29,6 +29,8 @@ import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.math.expr.ExprMacroTable; import io.druid.query.DataSource; +import io.druid.query.Query; +import io.druid.query.QueryDataSource; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.DimFilter; @@ -64,6 +66,7 @@ import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.sql.type.SqlTypeName; +import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -337,6 +340,55 @@ public class DruidQueryBuilder } } + /** + * Return this query as some kind of Druid query. The returned query will either be {@link TopNQuery}, + * {@link TimeseriesQuery}, {@link GroupByQuery}, or {@link SelectQuery}. + * + * @param dataSource data source to query + * @param plannerContext planner context + * + * @return Druid query + */ + public Query toQuery( + final DataSource dataSource, + final PlannerContext plannerContext + ) + { + if (dataSource instanceof QueryDataSource) { + // If there is a subquery then the outer query must be a groupBy. + final GroupByQuery outerQuery = toGroupByQuery(dataSource, plannerContext); + + if (outerQuery == null) { + // Bug in the planner rules. They shouldn't allow this to happen. + throw new IllegalStateException("Can't use QueryDataSource without an outer groupBy query!"); + } + + return outerQuery; + } + + final TimeseriesQuery tsQuery = toTimeseriesQuery(dataSource, plannerContext); + if (tsQuery != null) { + return tsQuery; + } + + final TopNQuery topNQuery = toTopNQuery(dataSource, plannerContext); + if (topNQuery != null) { + return topNQuery; + } + + final GroupByQuery groupByQuery = toGroupByQuery(dataSource, plannerContext); + if (groupByQuery != null) { + return groupByQuery; + } + + final SelectQuery selectQuery = toSelectQuery(dataSource, plannerContext); + if (selectQuery != null) { + return selectQuery; + } + + throw new IllegalStateException("WTF?! Cannot build a query even though we planned it?"); + } + /** * Return this query as a Timeseries query, or null if this query is not compatible with Timeseries. * @@ -345,6 +397,7 @@ public class DruidQueryBuilder * * @return query or null */ + @Nullable public TimeseriesQuery toTimeseriesQuery( final DataSource dataSource, final PlannerContext plannerContext @@ -417,6 +470,7 @@ public class DruidQueryBuilder * * @return query or null */ + @Nullable public TopNQuery toTopNQuery( final DataSource dataSource, final PlannerContext plannerContext @@ -491,6 +545,7 @@ public class DruidQueryBuilder * * @return query or null */ + @Nullable public GroupByQuery toGroupByQuery( final DataSource dataSource, final PlannerContext plannerContext @@ -525,6 +580,7 @@ public class DruidQueryBuilder * * @return query or null */ + @Nullable public SelectQuery toSelectQuery( final DataSource dataSource, final PlannerContext plannerContext @@ -536,6 +592,7 @@ public class DruidQueryBuilder final Filtration filtration = Filtration.create(filter).optimize(sourceRowSignature); final boolean descending; + final int threshold; if (limitSpec != null) { // Safe to assume limitSpec has zero or one entry; DruidSelectSortRule wouldn't push in anything else. @@ -548,8 +605,11 @@ public class DruidQueryBuilder } else { descending = false; } + + threshold = limitSpec.getLimit(); } else { descending = false; + threshold = 0; } // We need to ask for dummy columns to prevent Select from returning all of them. @@ -576,6 +636,9 @@ public class DruidQueryBuilder metrics.add(dummyColumn); } + // Not used for actual queries (will be replaced by QueryMaker) but the threshold is important for the planner. + final PagingSpec pagingSpec = new PagingSpec(null, threshold); + return new SelectQuery( dataSource, filtration.getQuerySegmentSpec(), @@ -585,7 +648,7 @@ public class DruidQueryBuilder ImmutableList.of(new DefaultDimensionSpec(dummyColumn, dummyColumn)), metrics.stream().sorted().distinct().collect(Collectors.toList()), getVirtualColumns(plannerContext.getExprMacroTable()), - new PagingSpec(null, 0) /* dummy -- will be replaced */, + pagingSpec, plannerContext.getQueryContext() ); } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java index 64d91a1e549..875ef579a60 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidQueryRel.java @@ -20,13 +20,9 @@ package io.druid.sql.calcite.rel; import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; import io.druid.java.util.common.guava.Sequence; import io.druid.query.QueryDataSource; import io.druid.query.groupby.GroupByQuery; -import io.druid.segment.VirtualColumns; -import io.druid.sql.calcite.filtration.Filtration; -import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.table.DruidTable; import io.druid.sql.calcite.table.RowSignature; import org.apache.calcite.interpreter.BindableConvention; @@ -40,14 +36,22 @@ import org.apache.calcite.rel.RelWriter; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; +import javax.annotation.Nullable; +import java.io.IOException; + +/** + * DruidRel that uses a "table" dataSource. + */ public class DruidQueryRel extends DruidRel { // Factors used for computing cost (see computeSelfCost). These are intended to encourage pushing down filters // and limits through stacks of nested queries when possible. private static final double COST_BASE = 1.0; + private static final double COST_PER_COLUMN = 0.001; private static final double COST_FILTER_MULTIPLIER = 0.1; private static final double COST_GROUPING_MULTIPLIER = 0.5; private static final double COST_LIMIT_MULTIPLIER = 0.5; + private static final double COST_HAVING_MULTIPLIER = 5.0; private final RelOptTable table; private final DruidTable druidTable; @@ -75,7 +79,6 @@ public class DruidQueryRel extends DruidRel final RelOptCluster cluster, final RelOptTable table, final DruidTable druidTable, - final PlannerContext plannerContext, final QueryMaker queryMaker ) { @@ -89,6 +92,7 @@ public class DruidQueryRel extends DruidRel ); } + @Nullable @Override public QueryDataSource asDataSource() { @@ -182,34 +186,18 @@ public class DruidQueryRel extends DruidRel @Override public RelWriter explainTerms(final RelWriter pw) { - pw.item("dataSource", druidTable.getDataSource()); - if (queryBuilder != null) { - final Filtration filtration = Filtration.create(queryBuilder.getFilter()).optimize(getSourceRowSignature()); - final VirtualColumns virtualColumns = queryBuilder.getVirtualColumns(getPlannerContext().getExprMacroTable()); - if (!virtualColumns.isEmpty()) { - pw.item("virtualColumns", virtualColumns); - } - if (!filtration.getIntervals().equals(ImmutableList.of(Filtration.eternity()))) { - pw.item("intervals", filtration.getIntervals()); - } - if (filtration.getDimFilter() != null) { - pw.item("filter", filtration.getDimFilter()); - } - if (queryBuilder.getSelectProjection() != null) { - pw.item("selectProjection", queryBuilder.getSelectProjection()); - } - if (queryBuilder.getGrouping() != null) { - pw.item("dimensions", queryBuilder.getGrouping().getDimensions()); - pw.item("aggregations", queryBuilder.getGrouping().getAggregations()); - } - if (queryBuilder.getHaving() != null) { - pw.item("having", queryBuilder.getHaving()); - } - if (queryBuilder.getLimitSpec() != null) { - pw.item("limitSpec", queryBuilder.getLimitSpec()); - } + final String queryString; + + try { + queryString = getQueryMaker() + .getJsonMapper() + .writeValueAsString(queryBuilder.toQuery(druidTable.getDataSource(), getPlannerContext())); } - return pw; + catch (IOException e) { + throw new RuntimeException(e); + } + + return pw.item("query", queryString); } @Override @@ -217,18 +205,29 @@ public class DruidQueryRel extends DruidRel { double cost = COST_BASE; + if (queryBuilder.getSelectProjection() != null) { + cost += COST_PER_COLUMN * queryBuilder.getSelectProjection().getVirtualColumns().size(); + cost += COST_PER_COLUMN * queryBuilder.getSelectProjection().getDirectColumns().size(); + } + if (queryBuilder.getFilter() != null) { cost *= COST_FILTER_MULTIPLIER; } if (queryBuilder.getGrouping() != null) { cost *= COST_GROUPING_MULTIPLIER; + cost += COST_PER_COLUMN * queryBuilder.getGrouping().getAggregatorFactories().size(); + cost += COST_PER_COLUMN * queryBuilder.getGrouping().getPostAggregators().size(); } - if (queryBuilder.getLimitSpec() != null) { + if (queryBuilder.getLimitSpec() != null && queryBuilder.getLimitSpec().getLimit() < Integer.MAX_VALUE) { cost *= COST_LIMIT_MULTIPLIER; } + if (queryBuilder.getHaving() != null) { + cost *= COST_HAVING_MULTIPLIER; + } + return planner.getCostFactory().makeCost(cost, 0, 0); } } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java index d1071bc57f2..5b4a22ffc88 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidRel.java @@ -35,11 +35,13 @@ import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.AbstractRelNode; +import javax.annotation.Nullable; + public abstract class DruidRel extends AbstractRelNode implements BindableRel { private final QueryMaker queryMaker; - public DruidRel(RelOptCluster cluster, RelTraitSet traitSet, QueryMaker queryMaker) + protected DruidRel(RelOptCluster cluster, RelTraitSet traitSet, QueryMaker queryMaker) { super(cluster, traitSet); this.queryMaker = queryMaker; @@ -74,6 +76,7 @@ public abstract class DruidRel extends AbstractRelNode imple * * @return query dataSource, or null if it is known in advance that this rel will yield an empty result set. */ + @Nullable public abstract QueryDataSource asDataSource(); public abstract T asBindable(); diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java index 65fafeeee9a..bd011a4590e 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.rel; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.druid.java.util.common.StringUtils; @@ -28,6 +29,7 @@ import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.query.QueryDataSource; import io.druid.query.ResourceLimitExceededException; +import io.druid.query.TableDataSource; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.BoundDimFilter; import io.druid.query.filter.DimFilter; @@ -42,19 +44,26 @@ import org.apache.calcite.interpreter.BindableConvention; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; +import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelTraitSet; +import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelWriter; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; +import javax.annotation.Nullable; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; +/** + * DruidRel that has a main query, and also a subquery "right" that is used to filter the main query. + */ public class DruidSemiJoin extends DruidRel { private final DruidRel left; - private final DruidRel right; + private final RelNode right; private final List leftExpressions; private final List rightKeys; private final int maxSemiJoinRowsInMemory; @@ -62,14 +71,15 @@ public class DruidSemiJoin extends DruidRel private DruidSemiJoin( final RelOptCluster cluster, final RelTraitSet traitSet, - final DruidRel left, - final DruidRel right, + final DruidRel left, + final RelNode right, final List leftExpressions, final List rightKeys, - final int maxSemiJoinRowsInMemory + final int maxSemiJoinRowsInMemory, + final QueryMaker queryMaker ) { - super(cluster, traitSet, left.getQueryMaker()); + super(cluster, traitSet, queryMaker); this.left = left; this.right = right; this.leftExpressions = ImmutableList.copyOf(leftExpressions); @@ -114,7 +124,8 @@ public class DruidSemiJoin extends DruidRel right, listBuilder.build(), rightKeys, - plannerContext.getPlannerConfig().getMaxSemiJoinRowsInMemory() + plannerContext.getPlannerConfig().getMaxSemiJoinRowsInMemory(), + left.getQueryMaker() ); } @@ -140,10 +151,12 @@ public class DruidSemiJoin extends DruidRel right, leftExpressions, rightKeys, - maxSemiJoinRowsInMemory + maxSemiJoinRowsInMemory, + getQueryMaker() ); } + @Nullable @Override public QueryDataSource asDataSource() { @@ -158,10 +171,11 @@ public class DruidSemiJoin extends DruidRel getCluster(), getTraitSet().replace(BindableConvention.INSTANCE), left, - right, + RelOptRule.convert(right, BindableConvention.INSTANCE), leftExpressions, rightKeys, - maxSemiJoinRowsInMemory + maxSemiJoinRowsInMemory, + getQueryMaker() ); } @@ -172,17 +186,18 @@ public class DruidSemiJoin extends DruidRel getCluster(), getTraitSet().replace(DruidConvention.instance()), left, - right, + RelOptRule.convert(right, DruidConvention.instance()), leftExpressions, rightKeys, - maxSemiJoinRowsInMemory + maxSemiJoinRowsInMemory, + getQueryMaker() ); } @Override public int getQueryCount() { - return left.getQueryCount() + right.getQueryCount(); + return ((DruidRel) left).getQueryCount() + ((DruidRel) right).getQueryCount(); } @Override @@ -202,14 +217,47 @@ public class DruidSemiJoin extends DruidRel return left.getRowType(); } + @Override + public List getInputs() + { + return ImmutableList.of(right); + } + + @Override + public RelNode copy(final RelTraitSet traitSet, final List inputs) + { + return new DruidSemiJoin( + getCluster(), + getTraitSet(), + left, + Iterables.getOnlyElement(inputs), + leftExpressions, + rightKeys, + maxSemiJoinRowsInMemory, + getQueryMaker() + ); + } + @Override public RelWriter explainTerms(RelWriter pw) { - return pw - .item("leftExpressions", leftExpressions) - .item("leftQuery", left.getQueryBuilder()) - .item("rightKeys", rightKeys) - .item("rightQuery", right.getQueryBuilder()); + final TableDataSource dummyDataSource = new TableDataSource("__subquery__"); + final String queryString; + + try { + queryString = getQueryMaker() + .getJsonMapper() + .writeValueAsString(left.getQueryBuilder().toQuery(dummyDataSource, getPlannerContext())); + } + catch (IOException e) { + throw new RuntimeException(e); + } + + return super.explainTerms(pw) + .input("right", right) + .item("query", queryString) + .item("leftExpressions", leftExpressions) + .item("rightKeys", rightKeys); } @Override @@ -224,9 +272,11 @@ public class DruidSemiJoin extends DruidRel */ private DruidRel getLeftRelWithFilter() { + final DruidRel druidRight = (DruidRel) this.right; + // Build list of acceptable values from right side. final Set> valuess = Sets.newHashSet(); - final List filters = right.runQuery().accumulate( + final List filters = druidRight.runQuery().accumulate( new ArrayList<>(), new Accumulator, Object[]>() { @@ -266,7 +316,7 @@ public class DruidSemiJoin extends DruidRel } else { bounds.add( new ExpressionDimFilter( - String.format( + StringUtils.format( "(%s == %s)", leftExpression.getExpression(), DruidExpression.stringLiteral(values.get(i)) diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java index 2faf3db2bd8..57ce93a3310 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/QueryMaker.java @@ -19,19 +19,20 @@ package io.druid.sql.calcite.rel; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Iterables; import com.google.common.primitives.Ints; import io.druid.data.input.Row; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.ISE; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.math.expr.Evals; import io.druid.query.DataSource; import io.druid.query.Query; -import io.druid.query.QueryDataSource; import io.druid.query.Result; import io.druid.query.groupby.GroupByQuery; import io.druid.query.select.EventHolder; @@ -67,14 +68,17 @@ public class QueryMaker { private final QueryLifecycleFactory queryLifecycleFactory; private final PlannerContext plannerContext; + private final ObjectMapper jsonMapper; public QueryMaker( final QueryLifecycleFactory queryLifecycleFactory, - final PlannerContext plannerContext + final PlannerContext plannerContext, + final ObjectMapper jsonMapper ) { this.queryLifecycleFactory = queryLifecycleFactory; this.plannerContext = plannerContext; + this.jsonMapper = jsonMapper; } public PlannerContext getPlannerContext() @@ -82,42 +86,29 @@ public class QueryMaker return plannerContext; } + public ObjectMapper getJsonMapper() + { + return jsonMapper; + } + public Sequence runQuery( final DataSource dataSource, final DruidQueryBuilder queryBuilder ) { - if (dataSource instanceof QueryDataSource) { - final GroupByQuery outerQuery = queryBuilder.toGroupByQuery(dataSource, plannerContext); - if (outerQuery == null) { - // Bug in the planner rules. They shouldn't allow this to happen. - throw new IllegalStateException("Can't use QueryDataSource without an outer groupBy query!"); - } + final Query query = queryBuilder.toQuery(dataSource, plannerContext); - return executeGroupBy(queryBuilder, outerQuery); + if (query instanceof TimeseriesQuery) { + return executeTimeseries(queryBuilder, (TimeseriesQuery) query); + } else if (query instanceof TopNQuery) { + return executeTopN(queryBuilder, (TopNQuery) query); + } else if (query instanceof GroupByQuery) { + return executeGroupBy(queryBuilder, (GroupByQuery) query); + } else if (query instanceof SelectQuery) { + return executeSelect(queryBuilder, (SelectQuery) query); + } else { + throw new ISE("Cannot run query of class[%s]", query.getClass().getName()); } - - final TimeseriesQuery tsQuery = queryBuilder.toTimeseriesQuery(dataSource, plannerContext); - if (tsQuery != null) { - return executeTimeseries(queryBuilder, tsQuery); - } - - final TopNQuery topNQuery = queryBuilder.toTopNQuery(dataSource, plannerContext); - if (topNQuery != null) { - return executeTopN(queryBuilder, topNQuery); - } - - final GroupByQuery groupByQuery = queryBuilder.toGroupByQuery(dataSource, plannerContext); - if (groupByQuery != null) { - return executeGroupBy(queryBuilder, groupByQuery); - } - - final SelectQuery selectQuery = queryBuilder.toSelectQuery(dataSource, plannerContext); - if (selectQuery != null) { - return executeSelect(queryBuilder, selectQuery); - } - - throw new IllegalStateException("WTF?! Cannot execute query even though we planned it?"); } private Sequence executeSelect( @@ -373,33 +364,9 @@ public class QueryMaker } else if (value == null) { coercedValue = null; } else if (sqlType == SqlTypeName.DATE) { - final DateTime dateTime; - - if (value instanceof Number) { - dateTime = new DateTime(((Number) value).longValue()); - } else if (value instanceof String) { - dateTime = new DateTime(Long.parseLong((String) value)); - } else if (value instanceof DateTime) { - dateTime = (DateTime) value; - } else { - throw new ISE("Cannot coerce[%s] to %s", value.getClass().getName(), sqlType); - } - - return Calcites.jodaToCalciteDate(dateTime, plannerContext.getTimeZone()); + return Calcites.jodaToCalciteDate(coerceDateTime(value, sqlType), plannerContext.getTimeZone()); } else if (sqlType == SqlTypeName.TIMESTAMP) { - final DateTime dateTime; - - if (value instanceof Number) { - dateTime = new DateTime(((Number) value).longValue()); - } else if (value instanceof String) { - dateTime = new DateTime(Long.parseLong((String) value)); - } else if (value instanceof DateTime) { - dateTime = (DateTime) value; - } else { - throw new ISE("Cannot coerce[%s] to %s", value.getClass().getName(), sqlType); - } - - return Calcites.jodaToCalciteTimestamp(dateTime, plannerContext.getTimeZone()); + return Calcites.jodaToCalciteTimestamp(coerceDateTime(value, sqlType), plannerContext.getTimeZone()); } else if (sqlType == SqlTypeName.BOOLEAN) { if (value instanceof String) { coercedValue = Evals.asBoolean(((String) value)); @@ -446,4 +413,20 @@ public class QueryMaker return coercedValue; } + + private static DateTime coerceDateTime(Object value, SqlTypeName sqlType) + { + final DateTime dateTime; + + if (value instanceof Number) { + dateTime = DateTimes.utc(((Number) value).longValue()); + } else if (value instanceof String) { + dateTime = DateTimes.utc(Long.parseLong((String) value)); + } else if (value instanceof DateTime) { + dateTime = (DateTime) value; + } else { + throw new ISE("Cannot coerce[%s] to %s", value.getClass().getName(), sqlType); + } + return dateTime; + } } diff --git a/sql/src/main/java/io/druid/sql/calcite/rule/DruidFilterRule.java b/sql/src/main/java/io/druid/sql/calcite/rule/DruidFilterRule.java index f1cbf181406..7181c705817 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rule/DruidFilterRule.java +++ b/sql/src/main/java/io/druid/sql/calcite/rule/DruidFilterRule.java @@ -33,18 +33,22 @@ public class DruidFilterRule extends RelOptRule super(operand(Filter.class, operand(DruidRel.class, none()))); } + @Override + public boolean matches(final RelOptRuleCall call) + { + final DruidRel druidRel = call.rel(1); + + return druidRel.getQueryBuilder().getFilter() == null + && druidRel.getQueryBuilder().getSelectProjection() == null + && druidRel.getQueryBuilder().getGrouping() == null; + } + @Override public void onMatch(RelOptRuleCall call) { final Filter filter = call.rel(0); final DruidRel druidRel = call.rel(1); - if (druidRel.getQueryBuilder().getFilter() != null - || druidRel.getQueryBuilder().getSelectProjection() != null - || druidRel.getQueryBuilder().getGrouping() != null) { - return; - } - final DimFilter dimFilter = Expressions.toFilter( druidRel.getPlannerContext(), druidRel.getSourceRowSignature(), @@ -53,8 +57,7 @@ public class DruidFilterRule extends RelOptRule if (dimFilter != null) { call.transformTo( druidRel.withQueryBuilder( - druidRel.getQueryBuilder() - .withFilter(dimFilter) + druidRel.getQueryBuilder().withFilter(dimFilter) ) ); } diff --git a/sql/src/main/java/io/druid/sql/calcite/rule/DruidTableScanRule.java b/sql/src/main/java/io/druid/sql/calcite/rule/DruidTableScanRule.java index 7a8c947235a..f01b361d52f 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rule/DruidTableScanRule.java +++ b/sql/src/main/java/io/druid/sql/calcite/rule/DruidTableScanRule.java @@ -19,7 +19,6 @@ package io.druid.sql.calcite.rule; -import io.druid.sql.calcite.planner.PlannerContext; import io.druid.sql.calcite.rel.DruidQueryRel; import io.druid.sql.calcite.rel.QueryMaker; import io.druid.sql.calcite.table.DruidTable; @@ -30,16 +29,11 @@ import org.apache.calcite.rel.logical.LogicalTableScan; public class DruidTableScanRule extends RelOptRule { - private final PlannerContext plannerContext; private final QueryMaker queryMaker; - public DruidTableScanRule( - final PlannerContext plannerContext, - final QueryMaker queryMaker - ) + public DruidTableScanRule(final QueryMaker queryMaker) { super(operand(LogicalTableScan.class, any())); - this.plannerContext = plannerContext; this.queryMaker = queryMaker; } @@ -51,7 +45,7 @@ public class DruidTableScanRule extends RelOptRule final DruidTable druidTable = table.unwrap(DruidTable.class); if (druidTable != null) { call.transformTo( - DruidQueryRel.fullScan(scan.getCluster(), table, druidTable, plannerContext, queryMaker) + DruidQueryRel.fullScan(scan.getCluster(), table, druidTable, queryMaker) ); } } diff --git a/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java b/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java index 4e19c617cbf..5d5780a3a71 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java +++ b/sql/src/main/java/io/druid/sql/calcite/rule/GroupByRules.java @@ -25,6 +25,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.math.expr.ExprType; import io.druid.query.aggregation.AggregatorFactory; @@ -60,7 +61,7 @@ import io.druid.sql.calcite.expression.SimpleExtraction; import io.druid.sql.calcite.filtration.Filtration; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.PlannerContext; -import io.druid.sql.calcite.rel.DruidNestedGroupBy; +import io.druid.sql.calcite.rel.DruidOuterQueryRel; import io.druid.sql.calcite.rel.DruidRel; import io.druid.sql.calcite.rel.Grouping; import io.druid.sql.calcite.table.RowSignature; @@ -114,7 +115,7 @@ public class GroupByRules { private DruidAggregateRule() { - super(operand(Aggregate.class, operand(DruidRel.class, none()))); + super(operand(Aggregate.class, operand(DruidRel.class, any()))); } @Override @@ -141,7 +142,7 @@ public class GroupByRules { private DruidAggregateProjectRule() { - super(operand(Aggregate.class, operand(Project.class, operand(DruidRel.class, none())))); + super(operand(Aggregate.class, operand(Project.class, operand(DruidRel.class, any())))); } @Override @@ -170,7 +171,7 @@ public class GroupByRules { private DruidAggregateProjectFilterRule() { - super(operand(Aggregate.class, operand(Project.class, operand(Filter.class, operand(DruidRel.class, none()))))); + super(operand(Aggregate.class, operand(Project.class, operand(Filter.class, operand(DruidRel.class, any()))))); } @Override @@ -206,7 +207,7 @@ public class GroupByRules { private DruidGroupByPostAggregationRule() { - super(operand(Project.class, operand(DruidRel.class, none()))); + super(operand(Project.class, operand(DruidRel.class, any()))); } @Override @@ -232,7 +233,7 @@ public class GroupByRules { private DruidGroupByHavingRule() { - super(operand(Filter.class, operand(DruidRel.class, none()))); + super(operand(Filter.class, operand(DruidRel.class, any()))); } @Override @@ -258,7 +259,7 @@ public class GroupByRules { private DruidGroupByLimitRule() { - super(operand(Sort.class, operand(DruidRel.class, none()))); + super(operand(Sort.class, operand(DruidRel.class, any()))); } @Override @@ -442,7 +443,7 @@ public class GroupByRules if (isNestedQuery) { // Nested groupBy. - return DruidNestedGroupBy.from(druidRel, filter, grouping, aggregate.getRowType(), rowOrder); + return DruidOuterQueryRel.from(druidRel, filter, grouping, aggregate.getRowType(), rowOrder); } else { // groupBy on a base dataSource or semiJoin. return druidRel.withQueryBuilder( @@ -585,8 +586,7 @@ public class GroupByRules if (dimFilter != null) { return druidRel.withQueryBuilder( - druidRel.getQueryBuilder() - .withHaving(dimFilter) + druidRel.getQueryBuilder().withHaving(dimFilter) ); } else { return null; @@ -856,8 +856,8 @@ public class GroupByRules createMaxAggregatorFactory(aggregationType, name, fieldName, expression, macroTable) ); } else if (kind == SqlKind.AVG) { - final String sumName = String.format("%s:sum", name); - final String countName = String.format("%s:count", name); + final String sumName = StringUtils.format("%s:sum", name); + final String countName = StringUtils.format("%s:count", name); final AggregatorFactory sum = createSumAggregatorFactory( aggregationType, sumName, diff --git a/sql/src/main/java/io/druid/sql/calcite/rule/SelectRules.java b/sql/src/main/java/io/druid/sql/calcite/rule/SelectRules.java index 83a880a9ef0..22f6bdb33a9 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rule/SelectRules.java +++ b/sql/src/main/java/io/druid/sql/calcite/rule/SelectRules.java @@ -158,8 +158,7 @@ public class SelectRules (orderBys.size() == 1 && orderBys.get(0).getDimension().equals(Column.TIME_COLUMN_NAME))) { call.transformTo( druidRel.withQueryBuilder( - druidRel.getQueryBuilder() - .withLimitSpec(limitSpec) + druidRel.getQueryBuilder().withLimitSpec(limitSpec) ) ); } diff --git a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java index 9915d84fd57..a662fc04f0a 100644 --- a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java @@ -33,6 +33,7 @@ import com.metamx.emitter.EmittingLogger; import io.druid.client.ServerView; import io.druid.client.TimelineServerView; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.Sequence; @@ -58,7 +59,6 @@ import io.druid.sql.calcite.view.ViewManager; import io.druid.timeline.DataSegment; import org.apache.calcite.schema.Table; import org.apache.calcite.schema.impl.AbstractSchema; -import org.joda.time.DateTime; import java.io.IOException; import java.util.Comparator; @@ -182,7 +182,8 @@ public class DruidSchema extends AbstractSchema try { synchronized (lock) { - final long nextRefreshNoFuzz = new DateTime(lastRefresh) + final long nextRefreshNoFuzz = DateTimes + .utc(lastRefresh) .plus(config.getMetadataRefreshPeriod()) .getMillis(); diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java index bd96ce11057..1e6a5bb9a0b 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -30,6 +30,7 @@ import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.Pair; import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; @@ -125,7 +126,8 @@ public class DruidAvaticaHandlerTest CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, macroTable, - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ), AVATICA_CONFIG ); @@ -199,8 +201,8 @@ public class DruidAvaticaHandlerTest Assert.assertEquals( ImmutableList.of( ImmutableMap.of( - "__time", new Timestamp(new DateTime("2000-01-01T00:00:00.000Z").getMillis()), - "t2", new Date(new DateTime("2000-01-01").getMillis()) + "__time", new Timestamp(DateTimes.of("2000-01-01T00:00:00.000Z").getMillis()), + "t2", new Date(DateTimes.of("2000-01-01").getMillis()) ) ), getRows(resultSet) @@ -256,7 +258,7 @@ public class DruidAvaticaHandlerTest ImmutableList.of( ImmutableMap.of( "PLAN", - "DruidQueryRel(dataSource=[foo], dimensions=[[]], aggregations=[[Aggregation{virtualColumns=[], aggregatorFactories=[CountAggregatorFactory{name='a0'}], postAggregator=null}]])\n" + "DruidQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"context\":{\"skipEmptyBuckets\":true}}])\n" ) ), getRows(resultSet) @@ -564,7 +566,8 @@ public class DruidAvaticaHandlerTest CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, macroTable, - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ), smallFrameConfig ) diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java index af10663a8a3..88e9fbd2415 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidStatementTest.java @@ -21,6 +21,7 @@ package io.druid.sql.avatica; import com.google.common.base.Function; import com.google.common.collect.Lists; +import io.druid.java.util.common.DateTimes; import io.druid.math.expr.ExprMacroTable; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.DruidOperatorTable; @@ -32,7 +33,6 @@ import io.druid.sql.calcite.util.QueryLogHook; import io.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.avatica.Meta; -import org.joda.time.DateTime; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -70,7 +70,8 @@ public class DruidStatementTest CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, macroTable, - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ); } @@ -133,12 +134,12 @@ public class DruidStatementTest 0, true, Lists.newArrayList( - new Object[]{new DateTime("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, - new Object[]{new DateTime("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}, - new Object[]{new DateTime("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, - new Object[]{new DateTime("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, - new Object[]{new DateTime("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, - new Object[]{new DateTime("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} + new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, + new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f}, + new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, + new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, + new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, + new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} ) ), frame @@ -159,8 +160,8 @@ public class DruidStatementTest 0, false, Lists.newArrayList( - new Object[]{new DateTime("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, - new Object[]{new DateTime("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f} + new Object[]{DateTimes.of("2000-01-01").getMillis(), 1L, "", "a", 1.0f}, + new Object[]{DateTimes.of("2000-01-02").getMillis(), 1L, "10.1", "", 2.0f} ) ), frame @@ -174,10 +175,10 @@ public class DruidStatementTest 2, true, Lists.newArrayList( - new Object[]{new DateTime("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, - new Object[]{new DateTime("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, - new Object[]{new DateTime("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, - new Object[]{new DateTime("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} + new Object[]{DateTimes.of("2000-01-03").getMillis(), 1L, "2", "", 3.0f}, + new Object[]{DateTimes.of("2001-01-01").getMillis(), 1L, "1", "a", 4.0f}, + new Object[]{DateTimes.of("2001-01-02").getMillis(), 1L, "def", "abc", 5.0f}, + new Object[]{DateTimes.of("2001-01-03").getMillis(), 1L, "abc", "", 6.0f} ) ), frame diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 156ed170bb3..5f13c8a2b89 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -23,6 +23,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.hll.HLLCV1; +import io.druid.java.util.common.DateTimes; +import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -100,6 +102,7 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.Period; +import org.joda.time.chrono.ISOChronology; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -415,7 +418,7 @@ public class CalciteQueryTest ImmutableList.of(), ImmutableList.of( new Object[]{ - "DruidQueryRel(dataSource=[foo])\n" + "DruidQueryRel(query=[{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dummy\",\"outputName\":\"dummy\",\"outputType\":\"STRING\"}],\"metrics\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"virtualColumns\":[],\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":0,\"fromNext\":true},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807}}])\n" } ) ); @@ -639,8 +642,8 @@ public class CalciteQueryTest new Object[]{ "BindableProject(dim1=[$9], dim10=[$2], dim2=[$3])\n" + " BindableJoin(condition=[=($9, $3)], joinType=[inner])\n" - + " DruidQueryRel(dataSource=[foo])\n" - + " DruidQueryRel(dataSource=[foo], filter=[!dim1 = ])\n" + + " DruidQueryRel(query=[{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dummy\",\"outputName\":\"dummy\",\"outputType\":\"STRING\"}],\"metrics\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"virtualColumns\":[],\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":0,\"fromNext\":true},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807}}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"select\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dummy\",\"outputName\":\"dummy\",\"outputType\":\"STRING\"}],\"metrics\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"m1\",\"m2\",\"unique_dim1\"],\"virtualColumns\":[],\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":0,\"fromNext\":true},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807}}])\n" } ) ); @@ -1599,11 +1602,17 @@ public class CalciteQueryTest ImmutableList.of(), ImmutableList.of( new Object[]{ - "DruidQueryRel(dataSource=[foo], " - + "filter=[(dim2 = a && !substring(0, 1)(dim1) = z)], " - + "dimensions=[[]], " - + "aggregations=[[Aggregation{virtualColumns=[], " - + "aggregatorFactories=[CountAggregatorFactory{name='a0'}], postAggregator=null}]])\n" + "DruidQueryRel(query=[{" + + "\"queryType\":\"timeseries\"," + + "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"}," + + "\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]}," + + "\"descending\":false," + + "\"virtualColumns\":[]," + + "\"filter\":{\"type\":\"and\",\"fields\":[{\"type\":\"selector\",\"dimension\":\"dim2\",\"value\":\"a\",\"extractionFn\":null},{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"z\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}}]}," + + "\"granularity\":{\"type\":\"all\"}," + + "\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}]," + + "\"postAggregations\":[]," + + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"skipEmptyBuckets\":true,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"}}])\n" } ) ); @@ -2363,7 +2372,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2386,7 +2395,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2406,7 +2415,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2000-01-01T00:00:00.001"))) + .intervals(QSS(Intervals.of("2000-01-01/2000-01-01T00:00:00.001"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -2429,8 +2438,8 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals( QSS( - new Interval("2000-01-01/2000-01-01T00:00:00.001"), - new Interval("2000-01-02/2000-01-02T00:00:00.001") + Intervals.of("2000-01-01/2000-01-01T00:00:00.001"), + Intervals.of("2000-01-02/2000-01-02T00:00:00.001") ) ) .granularity(Granularities.ALL) @@ -2460,7 +2469,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/2001"), new Interval("2002-05-01/2003-05-01"))) + .intervals(QSS(Intervals.of("2000/2001"), Intervals.of("2002-05-01/2003-05-01"))) .granularity(Granularities.ALL) .filters( AND( @@ -2538,9 +2547,9 @@ public class CalciteQueryTest .dataSource(CalciteTests.DATASOURCE1) .intervals( QSS( - new Interval(Filtration.eternity().getStart(), new DateTime("2000")), - new Interval("2001/2003"), - new Interval(new DateTime("2004"), Filtration.eternity().getEnd()) + new Interval(DateTimes.MIN, DateTimes.of("2000")), + Intervals.of("2001/2003"), + new Interval(DateTimes.of("2004"), DateTimes.MAX) ) ) .filters(NOT(SELECTOR("dim1", "xxx", null))) @@ -2565,7 +2574,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-01-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-01-01"))) .filters(NOT(SELECTOR("dim2", "a", null))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) @@ -2629,8 +2638,8 @@ public class CalciteQueryTest .filters( BOUND( "cnt", - String.valueOf(new DateTime("1970-01-01").getMillis()), - String.valueOf(new DateTime("1970-01-02").getMillis()), + String.valueOf(DateTimes.of("1970-01-01").getMillis()), + String.valueOf(DateTimes.of("1970-01-02").getMillis()), false, true, null, @@ -2662,8 +2671,8 @@ public class CalciteQueryTest .filters( BOUND( "cnt", - String.valueOf(new DateTime("1970-01-01").getMillis()), - String.valueOf(new DateTime("1970-01-02").getMillis()), + String.valueOf(DateTimes.of("1970-01-01").getMillis()), + String.valueOf(DateTimes.of("1970-01-02").getMillis()), false, true, null, @@ -2760,8 +2769,8 @@ public class CalciteQueryTest .setDimFilter( BOUND( "cnt", - String.valueOf(new DateTime("1970-01-01").getMillis()), - String.valueOf(new DateTime("1970-01-02").getMillis()), + String.valueOf(DateTimes.of("1970-01-01").getMillis()), + String.valueOf(DateTimes.of("1970-01-02").getMillis()), false, true, null, @@ -2976,9 +2985,10 @@ public class CalciteQueryTest "a1", null, DIMS(new DefaultDimensionSpec("dim2", null)), - false + false, + true ), - new HyperUniquesAggregatorFactory("a2", "unique_dim1") + new HyperUniquesAggregatorFactory("a2", "unique_dim1", false, true) ) ) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3047,7 +3057,8 @@ public class CalciteQueryTest "a0", null, DIMS(new DefaultDimensionSpec("dim2", null)), - false + false, + true ) ) ) @@ -3077,8 +3088,8 @@ public class CalciteQueryTest .setInterval(QSS(Filtration.eternity())) .setGranularity(Granularities.ALL) .setDimensions(DIMS( - new DefaultDimensionSpec("dim2", "d0"), - new DefaultDimensionSpec("dim1", "d1") + new DefaultDimensionSpec("dim1", "d0"), + new DefaultDimensionSpec("dim2", "d1") )) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .setContext(QUERY_CONTEXT_DEFAULT) @@ -3087,12 +3098,12 @@ public class CalciteQueryTest ) .setInterval(QSS(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("d0", "d0"))) + .setDimensions(DIMS(new DefaultDimensionSpec("d1", "d0"))) .setAggregatorSpecs(AGGS( new LongSumAggregatorFactory("a0", "a0"), new FilteredAggregatorFactory( new CountAggregatorFactory("a1"), - NOT(SELECTOR("d1", "", null)) + NOT(SELECTOR("d0", "", null)) ) )) .setContext(QUERY_CONTEXT_DEFAULT) @@ -3133,19 +3144,22 @@ public class CalciteQueryTest "a1", null, DIMS(new DefaultDimensionSpec("dim2", "dim2")), - false + false, + true ), new FilteredAggregatorFactory( new CardinalityAggregatorFactory( "a2", null, DIMS(new DefaultDimensionSpec("dim2", "dim2")), - false + false, + true ), NOT(SELECTOR("dim2", "", null)) ), new CardinalityAggregatorFactory( "a3", + null, DIMS( new ExtractionDimensionSpec( "dim2", @@ -3154,14 +3168,17 @@ public class CalciteQueryTest new SubstringDimExtractionFn(0, 1) ) ), - false + false, + true ), new CardinalityAggregatorFactory( "a4", + null, DIMS(new DefaultDimensionSpec("a4:v", "a4:v", ValueType.STRING)), - false + false, + true ), - new HyperUniquesAggregatorFactory("a5", "unique_dim1") + new HyperUniquesAggregatorFactory("a5", "unique_dim1", false, true) ) ) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3227,6 +3244,32 @@ public class CalciteQueryTest ); } + @Test + public void testExplainDoubleNestedGroupBy() throws Exception + { + testQuery( + "EXPLAIN PLAN FOR SELECT SUM(cnt), COUNT(*) FROM (\n" + + " SELECT dim2, SUM(t1.cnt) cnt FROM (\n" + + " SELECT\n" + + " dim1,\n" + + " dim2,\n" + + " COUNT(*) cnt\n" + + " FROM druid.foo\n" + + " GROUP BY dim1, dim2\n" + + " ) t1\n" + + " GROUP BY dim2\n" + + ") t2", + ImmutableList.of(), + ImmutableList.of( + new Object[]{ + "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"a0\",\"fieldName\":\"a0\",\"expression\":null},{\"type\":\"count\",\"name\":\"a1\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + + " DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"d1\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"longSum\",\"name\":\"a0\",\"fieldName\":\"a0\",\"expression\":null}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\"},{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d1\",\"outputType\":\"STRING\"}],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + } + ) + ); + } + @Test public void testExactCountDistinctUsingSubquery() throws Exception { @@ -3381,9 +3424,6 @@ public class CalciteQueryTest @Test public void testExactCountDistinctOfSemiJoinResult() throws Exception { - // TODO(gianm): Would be good to have existing filters like IN able to work on expressions? - // TODO(gianm): Would be good to have expression filters optimize for the case where they hit just one column. - testQuery( "SELECT COUNT(*)\n" + "FROM (\n" @@ -3438,6 +3478,29 @@ public class CalciteQueryTest ); } + @Test + public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception + { + testQuery( + "EXPLAIN PLAN FOR SELECT COUNT(*)\n" + + "FROM (\n" + + " SELECT DISTINCT dim2\n" + + " FROM druid.foo\n" + + " WHERE SUBSTRING(dim2, 1, 1) IN (\n" + + " SELECT SUBSTRING(dim1, 1, 1) FROM druid.foo WHERE dim1 <> ''\n" + + " )\n" + + ")", + ImmutableList.of(), + ImmutableList.of( + new Object[]{ + "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + + " DruidSemiJoin(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}], leftExpressions=[[DruidExpression{simpleExtraction=null, expression='substring(\"dim2\", 0, 1)'}]], rightKeys=[[0]])\n" + + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":\"\",\"extractionFn\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"aggregations\":[],\"postAggregations\":[],\"having\":null,\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807},\"descending\":false}])\n" + } + ) + ); + } + @Test public void testExactCountDistinctUsingSubqueryWithWherePushDown() throws Exception { @@ -3546,8 +3609,10 @@ public class CalciteQueryTest new CountAggregatorFactory("a0"), new CardinalityAggregatorFactory( "a1", + null, DIMS(new DefaultDimensionSpec("d0", null)), - false + false, + true ) )) .setPostAggregatorSpecs( @@ -3559,7 +3624,7 @@ public class CalciteQueryTest .build() ), ImmutableList.of( - new Object[]{5L, 5L, -0.12226936f} + new Object[]{5L, 5L, 0.0f} ) ); } @@ -3583,16 +3648,13 @@ public class CalciteQueryTest .setGranularity(Granularities.ALL) .setDimensions(DIMS(new DefaultDimensionSpec("dim2", "d0"))) .setAggregatorSpecs(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) - .setPostAggregatorSpecs(ImmutableList.of( - EXPRESSION_POST_AGG("p0", "CAST(\"a0\", 'STRING')") - )) .setContext(QUERY_CONTEXT_DEFAULT) .build() ) ) .setInterval(QSS(Filtration.eternity())) .setGranularity(Granularities.ALL) - .setDimensions(DIMS(new DefaultDimensionSpec("p0", "d0"))) + .setDimensions(DIMS(new DefaultDimensionSpec("a0", "d0"))) .setAggregatorSpecs(AGGS( new CountAggregatorFactory("a0") )) @@ -3680,7 +3742,8 @@ public class CalciteQueryTest "a1", null, DIMS(new DefaultDimensionSpec("dim2", null)), - false + false, + true ) ) ) @@ -3694,7 +3757,7 @@ public class CalciteQueryTest .build() ), ImmutableList.of( - new Object[]{6L, 3L, 3.0021994f, 1L, 4L, 4.9985347f} + new Object[]{6L, 3L, 3.0f, 2L, 5L, 5.0f} ) ); } @@ -3714,6 +3777,7 @@ public class CalciteQueryTest AGGS( new CardinalityAggregatorFactory( "a0", + null, DIMS( new ExtractionDimensionSpec( "dim1", @@ -3721,7 +3785,8 @@ public class CalciteQueryTest new SubstringDimExtractionFn(0, 1) ) ), - false + false, + true ) ) ) @@ -3911,7 +3976,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/P2M"))) + .intervals(QSS(Intervals.of("2000/P2M"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3934,7 +3999,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01T01:02/2002"))) + .intervals(QSS(Intervals.of("2000-01-01T01:02/2002"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -3971,7 +4036,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-02T00Z/2002-01-01T08Z"))) + .intervals(QSS(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) @@ -3991,7 +4056,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-02/2002"))) + .intervals(QSS(Intervals.of("2000-01-02/2002"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4016,7 +4081,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-02T00Z/2002-01-01T08Z"))) + .intervals(QSS(Intervals.of("2000-01-02T00Z/2002-01-01T08Z"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_LOS_ANGELES) @@ -4039,8 +4104,8 @@ public class CalciteQueryTest Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) .intervals(QSS( - new Interval(Filtration.eternity().getStart(), new DateTime("2001-01-01")), - new Interval(new DateTime("2001-02-01"), Filtration.eternity().getEnd()) + new Interval(DateTimes.MIN, DateTimes.of("2001-01-01")), + new Interval(DateTimes.of("2001-02-01"), DateTimes.MAX) )) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) @@ -4063,7 +4128,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval(Filtration.eternity().getStart(), new DateTime("2000-02-01")))) + .intervals(QSS(new Interval(DateTimes.MIN, DateTimes.of("2000-02-01")))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4085,7 +4150,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval(Filtration.eternity().getStart(), new DateTime("2000-03-01")))) + .intervals(QSS(new Interval(DateTimes.MIN, DateTimes.of("2000-03-01")))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4108,7 +4173,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/P1M"))) + .intervals(QSS(Intervals.of("2000/P1M"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4131,7 +4196,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-02-01/P2M"), new Interval("2000-05-01/P1M"))) + .intervals(QSS(Intervals.of("2000-02-01/P2M"), Intervals.of("2000-05-01/P1M"))) .granularity(Granularities.ALL) .aggregators(AGGS(new CountAggregatorFactory("a0"))) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4405,8 +4470,10 @@ public class CalciteQueryTest .aggregators(AGGS( new CardinalityAggregatorFactory( "a0", + null, ImmutableList.of(new ExtractionDimensionSpec("dim1", null, extractionFn)), - false + false, + true ) )) .context(TIMESERIES_CONTEXT_DEFAULT) @@ -4460,7 +4527,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000-01-01/2001-02-01"))) + .intervals(QSS(Intervals.of("2000-01-01/2001-02-01"))) .granularity(Granularities.ALL) .aggregators(AGGS( new FilteredAggregatorFactory( @@ -4618,7 +4685,7 @@ public class CalciteQueryTest .granularity( new PeriodGranularity( Period.months(1), - new DateTime("1970-01-01T01:02:03"), + DateTimes.of("1970-01-01T01:02:03"), DateTimeZone.UTC ) ) @@ -4710,7 +4777,7 @@ public class CalciteQueryTest ImmutableList.of( Druids.newTimeseriesQueryBuilder() .dataSource(CalciteTests.DATASOURCE1) - .intervals(QSS(new Interval("2000/2000-01-02"))) + .intervals(QSS(Intervals.of("2000/2000-01-02"))) .granularity(new PeriodGranularity(Period.hours(1), null, DateTimeZone.UTC)) .aggregators(AGGS(new LongSumAggregatorFactory("a0", "cnt"))) .context(QUERY_CONTEXT_DONT_SKIP_EMPTY_BUCKETS) @@ -5444,7 +5511,8 @@ public class CalciteQueryTest CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, macroTable, - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ); viewManager.createView( @@ -5507,7 +5575,7 @@ public class CalciteQueryTest // Generate timestamps for expected results private static long T(final String timeString) { - return Calcites.jodaToCalciteTimestamp(new DateTime(timeString), DateTimeZone.UTC); + return Calcites.jodaToCalciteTimestamp(DateTimes.of(timeString), DateTimeZone.UTC); } // Generate timestamps for expected results @@ -5520,7 +5588,7 @@ public class CalciteQueryTest // Generate day numbers for expected results private static int D(final String dayString) { - return (int) (new Interval(T("1970"), T(dayString)).toDurationMillis() / (86400L * 1000L)); + return (int) (Intervals.utc(T("1970"), T(dayString)).toDurationMillis() / (86400L * 1000L)); } private static QuerySegmentSpec QSS(final Interval... intervals) @@ -5583,7 +5651,7 @@ public class CalciteQueryTest private static BoundDimFilter TIME_BOUND(final Object intervalObj) { - final Interval interval = new Interval(intervalObj); + final Interval interval = new Interval(intervalObj, ISOChronology.getInstanceUTC()); return new BoundDimFilter( Column.TIME_COLUMN_NAME, String.valueOf(interval.getStartMillis()), diff --git a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java index b3498bd5862..a0ec577305a 100644 --- a/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/expression/ExpressionsTest.java @@ -21,6 +21,7 @@ package io.druid.sql.calcite.expression; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.math.expr.ExprEval; @@ -75,7 +76,7 @@ public class ExpressionsTest .add("dstr", ValueType.STRING) .build(); private final Map bindings = ImmutableMap.builder() - .put("t", new DateTime("2000-02-03T04:05:06").getMillis()) + .put("t", DateTimes.of("2000-02-03T04:05:06").getMillis()) .put("a", 10) .put("b", 20) .put("x", 2.5) @@ -155,11 +156,11 @@ public class ExpressionsTest testExpression( rexBuilder.makeCall( new TimeFloorOperatorConversion().calciteOperator(), - timestampLiteral(new DateTime("2000-02-03T04:05:06Z")), + timestampLiteral(DateTimes.of("2000-02-03T04:05:06Z")), rexBuilder.makeLiteral("PT1H") ), DruidExpression.fromExpression("timestamp_floor(949550706000,'PT1H','','UTC')"), - new DateTime("2000-02-03T04:00:00").getMillis() + DateTimes.of("2000-02-03T04:00:00").getMillis() ); testExpression( @@ -183,7 +184,7 @@ public class ExpressionsTest ), "timestamp_floor(\"t\",'P1D','','America/Los_Angeles')" ), - new DateTime("2000-02-02T08:00:00").getMillis() + DateTimes.of("2000-02-02T08:00:00").getMillis() ); } @@ -205,7 +206,7 @@ public class ExpressionsTest ), "timestamp_floor(\"t\",'P1Y','','UTC')" ), - new DateTime("2000").getMillis() + DateTimes.of("2000").getMillis() ); } @@ -221,7 +222,7 @@ public class ExpressionsTest rexBuilder.makeFlag(TimeUnitRange.YEAR) ), DruidExpression.fromExpression("timestamp_ceil(\"t\",'P1Y','','UTC')"), - new DateTime("2001").getMillis() + DateTimes.of("2001").getMillis() ); } @@ -236,7 +237,7 @@ public class ExpressionsTest rexBuilder.makeLiteral(-3, typeFactory.createSqlType(SqlTypeName.INTEGER), true) ), DruidExpression.fromExpression("timestamp_shift(\"t\",'PT2H',-3)"), - new DateTime("2000-02-02T22:05:06").getMillis() + DateTimes.of("2000-02-02T22:05:06").getMillis() ); } @@ -292,7 +293,7 @@ public class ExpressionsTest null, "(\"t\" + 90060000)" ), - new DateTime("2000-02-03T04:05:06").plus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").plus(period).getMillis() ); } @@ -314,7 +315,7 @@ public class ExpressionsTest null, "timestamp_shift(\"t\",concat('P', 13, 'M'),1)" ), - new DateTime("2000-02-03T04:05:06").plus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").plus(period).getMillis() ); } @@ -339,7 +340,7 @@ public class ExpressionsTest null, "(\"t\" - 90060000)" ), - new DateTime("2000-02-03T04:05:06").minus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").minus(period).getMillis() ); } @@ -364,7 +365,7 @@ public class ExpressionsTest null, "timestamp_shift(\"t\",concat('P', 13, 'M'),-1)" ), - new DateTime("2000-02-03T04:05:06").minus(period).getMillis() + DateTimes.of("2000-02-03T04:05:06").minus(period).getMillis() ); } @@ -378,7 +379,7 @@ public class ExpressionsTest rexBuilder.makeLiteral("yyyy-MM-dd HH:mm:ss") ), DruidExpression.fromExpression("timestamp_parse(\"tstr\",'yyyy-MM-dd HH:mm:ss')"), - new DateTime("2000-02-03T04:05:06").getMillis() + DateTimes.of("2000-02-03T04:05:06").getMillis() ); testExpression( @@ -389,7 +390,7 @@ public class ExpressionsTest rexBuilder.makeLiteral("America/Los_Angeles") ), DruidExpression.fromExpression("timestamp_parse(\"tstr\",'yyyy-MM-dd HH:mm:ss','America/Los_Angeles')"), - new DateTime("2000-02-03T04:05:06-08:00").getMillis() + DateTimes.of("2000-02-03T04:05:06-08:00").getMillis() ); } @@ -481,7 +482,7 @@ public class ExpressionsTest SimpleExtraction.of("t", null), "\"t\"" ), - new DateTime("2000-02-03T04:05:06Z").getMillis() + DateTimes.of("2000-02-03T04:05:06Z").getMillis() ); testExpression( @@ -493,7 +494,7 @@ public class ExpressionsTest null, "timestamp_parse(\"tstr\",'yyyy-MM-dd HH:mm:ss')" ), - new DateTime("2000-02-03T04:05:06Z").getMillis() + DateTimes.of("2000-02-03T04:05:06Z").getMillis() ); } @@ -526,7 +527,7 @@ public class ExpressionsTest SimpleExtraction.of("t", null), "\"t\"" ), - new DateTime("2000-02-03T04:05:06").getMillis() + DateTimes.of("2000-02-03T04:05:06").getMillis() ); } @@ -542,7 +543,7 @@ public class ExpressionsTest SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)), "timestamp_floor(\"t\",'P1D','','UTC')" ), - new DateTime("2000-02-03").getMillis() + DateTimes.of("2000-02-03").getMillis() ); testExpression( @@ -553,7 +554,7 @@ public class ExpressionsTest DruidExpression.fromExpression( "timestamp_floor(timestamp_parse(\"dstr\",'yyyy-MM-dd'),'P1D','','UTC')" ), - new DateTime("2000-02-03").getMillis() + DateTimes.of("2000-02-03").getMillis() ); } @@ -586,7 +587,7 @@ public class ExpressionsTest SimpleExtraction.of("t", new TimeFormatExtractionFn(null, null, null, Granularities.DAY, true)), "timestamp_floor(\"t\",'P1D','','UTC')" ), - new DateTime("2000-02-03").getMillis() + DateTimes.of("2000-02-03").getMillis() ); } diff --git a/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java b/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java index e38d12c399c..059d62fb259 100644 --- a/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/filtration/FiltrationTest.java @@ -20,10 +20,10 @@ package io.druid.sql.calcite.filtration; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.Intervals; import io.druid.query.filter.IntervalDimFilter; import io.druid.query.filter.NotDimFilter; import io.druid.segment.column.Column; -import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -36,7 +36,7 @@ public class FiltrationTest new NotDimFilter( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - ImmutableList.of(new Interval("2000/2001"), new Interval("2002/2003")), + ImmutableList.of(Intervals.of("2000/2001"), Intervals.of("2002/2003")), null ) ), @@ -52,7 +52,7 @@ public class FiltrationTest new NotDimFilter( new IntervalDimFilter( Column.TIME_COLUMN_NAME, - ImmutableList.of(new Interval("2000/2001"), new Interval("2002/2003")), + ImmutableList.of(Intervals.of("2000/2001"), Intervals.of("2002/2003")), null ) ), diff --git a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java index ab75a343d84..9113868f861 100644 --- a/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/http/SqlResourceTest.java @@ -84,7 +84,8 @@ public class SqlResourceTest CalciteTests.createMockQueryLifecycleFactory(walker), operatorTable, macroTable, - plannerConfig + plannerConfig, + CalciteTests.getJsonMapper() ) ); } @@ -187,7 +188,7 @@ public class SqlResourceTest ImmutableList.of( ImmutableMap.of( "PLAN", - "DruidQueryRel(dataSource=[foo], dimensions=[[]], aggregations=[[Aggregation{virtualColumns=[], aggregatorFactories=[CountAggregatorFactory{name='a0'}], postAggregator=null}]])\n" + "DruidQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"context\":{\"skipEmptyBuckets\":true}}])\n" ) ), rows diff --git a/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java index 49497ac5fa4..119342966bd 100644 --- a/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/schema/DruidSchemaTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.druid.data.input.InputRow; +import io.druid.java.util.common.Intervals; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -45,7 +46,6 @@ import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.schema.Table; import org.apache.calcite.sql.type.SqlTypeName; -import org.joda.time.Interval; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -116,7 +116,7 @@ public class DruidSchemaTest walker = new SpecificSegmentsQuerySegmentWalker(CalciteTests.queryRunnerFactoryConglomerate()).add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) - .interval(new Interval("2000/P1Y")) + .interval(Intervals.of("2000/P1Y")) .version("1") .shardSpec(new LinearShardSpec(0)) .build(), @@ -124,7 +124,7 @@ public class DruidSchemaTest ).add( DataSegment.builder() .dataSource(CalciteTests.DATASOURCE1) - .interval(new Interval("2001/P1Y")) + .interval(Intervals.of("2001/P1Y")) .version("1") .shardSpec(new LinearShardSpec(0)) .build(), diff --git a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java index fc3f30e0c0a..27908c311e0 100644 --- a/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/io/druid/sql/calcite/util/CalciteTests.java @@ -99,6 +99,7 @@ import io.druid.sql.guice.SqlModule; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.joda.time.DateTime; +import org.joda.time.chrono.ISOChronology; import java.io.File; import java.nio.ByteBuffer; @@ -315,6 +316,11 @@ public class CalciteTests ); } + public static ObjectMapper getJsonMapper() + { + return INJECTOR.getInstance(Key.get(ObjectMapper.class, Json.class)); + } + public static SpecificSegmentsQuerySegmentWalker createMockWalker(final File tmpDir) { final QueryableIndex index1 = IndexBuilder.create() @@ -423,7 +429,7 @@ public class CalciteTests { return PARSER.parse( ImmutableMap.of( - "t", new DateTime(t).getMillis(), + "t", new DateTime(t, ISOChronology.getInstanceUTC()).getMillis(), "dim1", dim1, "dim2", dim2, "m1", m1