mirror of https://github.com/apache/druid.git
Merge branch 'master' of https://github.com/druid-io/druid into streaming-grouper
This commit is contained in:
commit
c0eecc0faa
|
@ -13,3 +13,4 @@ target
|
|||
*.log
|
||||
*.DS_Store
|
||||
_site
|
||||
dependency-reduced-pom.xml
|
||||
|
|
11
.travis.yml
11
.travis.yml
|
@ -1,6 +1,6 @@
|
|||
language: java
|
||||
|
||||
dist: precise
|
||||
dist: trusty
|
||||
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
|
@ -19,17 +19,16 @@ matrix:
|
|||
|
||||
# processing module test
|
||||
- sudo: false
|
||||
install: mvn install -q -ff -DskipTests -B
|
||||
script: mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing
|
||||
install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
|
||||
script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl processing
|
||||
|
||||
# non-processing modules test
|
||||
- sudo: false
|
||||
install: mvn install -q -ff -DskipTests -B
|
||||
script: mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing'
|
||||
install: echo "MAVEN_OPTS='-Xmx3000m'" > ~/.mavenrc && mvn install -q -ff -DskipTests -B
|
||||
script: echo "MAVEN_OPTS='-Xmx512m'" > ~/.mavenrc && mvn test -B -Pparallel-test -Dmaven.fork.count=2 -pl '!processing'
|
||||
|
||||
# run integration tests
|
||||
- sudo: required
|
||||
dist: trusty
|
||||
services:
|
||||
- docker
|
||||
env:
|
||||
|
|
|
@ -18,15 +18,19 @@
|
|||
*/
|
||||
|
||||
package io.druid.data.input;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
/**
|
||||
* Committer includes a Runnable and a Jackson-serialized metadata object containing the offset
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface Committer extends Runnable
|
||||
{
|
||||
/**
|
||||
* @return A json serialized representation of commit metadata,
|
||||
* which needs to be serialized and deserialized by Jackson.
|
||||
* Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
|
||||
* */
|
||||
public Object getMetadata();
|
||||
/**
|
||||
* @return A json serialized representation of commit metadata,
|
||||
* which needs to be serialized and deserialized by Jackson.
|
||||
* Commit metadata can be a complex type, but we recommend keeping it to List/Map/"Primitive JSON" types
|
||||
*/
|
||||
public Object getMetadata();
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package io.druid.data.input;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.Closeable;
|
||||
|
||||
|
@ -36,6 +38,7 @@ import java.io.Closeable;
|
|||
* which will be called on another thread, so the operations inside of that callback must be thread-safe.
|
||||
* </p>
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface Firehose extends Closeable
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -22,6 +22,7 @@ package io.druid.data.input;
|
|||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.druid.data.input.impl.InputRowParser;
|
||||
import io.druid.data.input.impl.PrefetchableTextFilesFirehoseFactory;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.java.util.common.parsers.ParseException;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -32,6 +33,7 @@ import java.io.IOException;
|
|||
* It currently provides two methods for creating a {@link Firehose} and their default implementations call each other
|
||||
* for the backward compatibility. Implementations of this interface must implement one of these methods.
|
||||
*/
|
||||
@ExtensionPoint
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
|
||||
public interface FirehoseFactory<T extends InputRowParser>
|
||||
{
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package io.druid.data.input;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
|
||||
import io.druid.data.input.impl.InputRowParser;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.java.util.common.parsers.ParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -37,6 +37,7 @@ import java.io.IOException;
|
|||
* value will throw a surprising NPE. Throwing IOException on connection failure or runtime exception on
|
||||
* invalid configuration is preferred over returning null.
|
||||
*/
|
||||
@ExtensionPoint
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
|
||||
public interface FirehoseFactoryV2<T extends InputRowParser>
|
||||
{
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package io.druid.data.input;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.io.Closeable;
|
||||
/**
|
||||
* This is an interface that holds onto the stream of incoming data. Realtime data ingestion is built around this
|
||||
|
@ -44,6 +46,7 @@ import java.io.Closeable;
|
|||
* which will be called on another thread, so the operations inside of that callback must be thread-safe.
|
||||
* </p>
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface FirehoseV2 extends Closeable
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package io.druid.data.input;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -28,8 +30,8 @@ import java.util.List;
|
|||
* implement "schema-less" data ingestion that allows the system to add new dimensions as they appear.
|
||||
*
|
||||
*/
|
||||
public interface
|
||||
InputRow extends Row
|
||||
@ExtensionPoint
|
||||
public interface InputRow extends Row
|
||||
{
|
||||
/**
|
||||
* Returns the dimensions that exist in this row.
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package io.druid.data.input;
|
||||
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -26,6 +28,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class MapBasedInputRow extends MapBasedRow implements InputRow
|
||||
{
|
||||
private final List<String> dimensions;
|
||||
|
@ -60,7 +63,7 @@ public class MapBasedInputRow extends MapBasedRow implements InputRow
|
|||
public String toString()
|
||||
{
|
||||
return "MapBasedInputRow{" +
|
||||
"timestamp=" + new DateTime(getTimestampFromEpoch()) +
|
||||
"timestamp=" + DateTimes.utc(getTimestampFromEpoch()) +
|
||||
", event=" + getEvent() +
|
||||
", dimensions=" + dimensions +
|
||||
'}';
|
||||
|
|
|
@ -22,6 +22,8 @@ package io.druid.data.input;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.collect.Lists;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import io.druid.java.util.common.parsers.ParseException;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
|
@ -32,6 +34,7 @@ import java.util.regex.Pattern;
|
|||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class MapBasedRow implements Row
|
||||
{
|
||||
private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+");
|
||||
|
@ -54,7 +57,7 @@ public class MapBasedRow implements Row
|
|||
Map<String, Object> event
|
||||
)
|
||||
{
|
||||
this(new DateTime(timestamp), event);
|
||||
this(DateTimes.utc(timestamp), event);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,6 +21,7 @@ package io.druid.data.input;
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -29,6 +30,7 @@ import java.util.List;
|
|||
* A Row of data. This can be used for both input and output into various parts of the system. It assumes
|
||||
* that the user already knows the schema of the row and can query for the parts that they care about.
|
||||
*/
|
||||
@PublicApi
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version", defaultImpl = MapBasedRow.class)
|
||||
@JsonSubTypes(value = {
|
||||
@JsonSubTypes.Type(name = "v1", value = MapBasedRow.class)
|
||||
|
|
|
@ -23,33 +23,14 @@ import com.google.common.collect.ImmutableList;
|
|||
import com.google.common.collect.ImmutableSortedSet;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import io.druid.java.util.common.ISE;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class Rows
|
||||
{
|
||||
public static InputRow toCaseInsensitiveInputRow(final Row row, final List<String> dimensions)
|
||||
{
|
||||
if (row instanceof MapBasedRow) {
|
||||
MapBasedRow mapBasedRow = (MapBasedRow) row;
|
||||
|
||||
TreeMap<String, Object> caseInsensitiveMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
|
||||
caseInsensitiveMap.putAll(mapBasedRow.getEvent());
|
||||
return new MapBasedInputRow(
|
||||
mapBasedRow.getTimestamp(),
|
||||
dimensions,
|
||||
caseInsensitiveMap
|
||||
);
|
||||
}
|
||||
throw new ISE("Can only convert MapBasedRow objects because we are ghetto like that.");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param timeStamp rollup up timestamp to be used to create group key
|
||||
* @param inputRow input row
|
||||
|
|
|
@ -26,10 +26,12 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
|
|||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
import com.google.common.base.Preconditions;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.StringUtils;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringDimensionSchema.class)
|
||||
@JsonSubTypes(value = {
|
||||
@JsonSubTypes.Type(name = DimensionSchema.STRING_TYPE_NAME, value = StringDimensionSchema.class),
|
||||
|
|
|
@ -28,7 +28,7 @@ import com.google.common.collect.ImmutableList;
|
|||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.parsers.ParserUtils;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -37,7 +37,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
||||
@PublicApi
|
||||
public class DimensionsSpec
|
||||
{
|
||||
private final List<DimensionSchema> dimensions;
|
||||
|
|
|
@ -22,7 +22,9 @@ package io.druid.data.input.impl;
|
|||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.druid.data.input.InputRow;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
@ExtensionPoint
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = StringInputRowParser.class)
|
||||
@JsonSubTypes(value = {
|
||||
@JsonSubTypes.Type(name = "string", value = StringInputRowParser.class),
|
||||
|
|
|
@ -22,7 +22,6 @@ package io.druid.data.input.impl;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import io.druid.java.util.common.parsers.JSONToLowerParser;
|
||||
import io.druid.java.util.common.parsers.Parser;
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
|||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonParser.Feature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import io.druid.java.util.common.parsers.JSONPathParser;
|
||||
import io.druid.java.util.common.parsers.Parser;
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ package io.druid.data.input.impl;
|
|||
import com.fasterxml.jackson.annotation.JacksonInject;
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import io.druid.java.util.common.ISE;
|
||||
import io.druid.java.util.common.parsers.JavaScriptParser;
|
||||
import io.druid.java.util.common.parsers.Parser;
|
||||
|
|
|
@ -22,13 +22,12 @@ package io.druid.data.input.impl;
|
|||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.java.util.common.parsers.Parser;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ExtensionPoint
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "format", defaultImpl = DelimitedParseSpec.class)
|
||||
@JsonSubTypes(value = {
|
||||
@JsonSubTypes.Type(name = "json", value = JSONParseSpec.class),
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator;
|
|||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Optional;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
import io.druid.java.util.common.parsers.Parser;
|
||||
import io.druid.java.util.common.parsers.RegexParser;
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ package io.druid.data.input.impl;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Function;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.parsers.TimestampParser;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
|
@ -31,6 +32,7 @@ import java.util.Objects;
|
|||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class TimestampSpec
|
||||
{
|
||||
private static class ParseCtx
|
||||
|
|
|
@ -22,16 +22,18 @@ package io.druid.guice;
|
|||
import com.google.inject.Binder;
|
||||
import com.google.inject.Key;
|
||||
import com.google.inject.multibindings.MapBinder;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.segment.loading.DataSegmentArchiver;
|
||||
import io.druid.segment.loading.DataSegmentFinder;
|
||||
import io.druid.segment.loading.DataSegmentMover;
|
||||
import io.druid.segment.loading.DataSegmentKiller;
|
||||
import io.druid.segment.loading.DataSegmentMover;
|
||||
import io.druid.segment.loading.DataSegmentPuller;
|
||||
import io.druid.segment.loading.DataSegmentPusher;
|
||||
import io.druid.tasklogs.TaskLogs;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class Binders
|
||||
{
|
||||
public static MapBinder<String, DataSegmentPuller> dataSegmentPullerBinder(Binder binder)
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.google.common.base.Predicate;
|
|||
import com.google.inject.Binder;
|
||||
import com.google.inject.TypeLiteral;
|
||||
import com.google.inject.multibindings.Multibinder;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.util.Properties;
|
||||
|
@ -43,6 +44,7 @@ import java.util.Properties;
|
|||
* At injection time, you will get the items that satisfy their corresponding predicates by calling
|
||||
* injector.getInstance(Key.get(new TypeLiteral<Set<Animal>>(){}))
|
||||
*/
|
||||
@PublicApi
|
||||
public class ConditionalMultibind<T>
|
||||
{
|
||||
|
||||
|
|
|
@ -21,9 +21,11 @@ package io.druid.guice;
|
|||
|
||||
import com.google.inject.Binder;
|
||||
import com.google.inject.Module;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class DruidGuiceExtensions implements Module
|
||||
{
|
||||
@Override
|
||||
|
|
|
@ -23,9 +23,11 @@ import com.google.inject.Key;
|
|||
import com.google.inject.Provider;
|
||||
import com.google.inject.Scope;
|
||||
import com.google.inject.Scopes;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class DruidScopes
|
||||
{
|
||||
public static final Scope SINGLETON = new Scope()
|
||||
|
|
|
@ -23,9 +23,11 @@ import com.google.inject.Binder;
|
|||
import com.google.inject.TypeLiteral;
|
||||
import com.google.inject.multibindings.Multibinder;
|
||||
import io.druid.guice.annotations.JSR311Resource;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class Jerseys
|
||||
{
|
||||
public static void addResource(Binder binder, Class<?> resourceClazz)
|
||||
|
|
|
@ -26,6 +26,7 @@ import com.google.inject.Inject;
|
|||
import com.google.inject.Key;
|
||||
import com.google.inject.Provider;
|
||||
import com.google.inject.util.Types;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.ParameterizedType;
|
||||
|
@ -76,6 +77,7 @@ import java.util.Properties;
|
|||
*
|
||||
* @param <T> type of config object to provide.
|
||||
*/
|
||||
@PublicApi
|
||||
public class JsonConfigProvider<T> implements Provider<Supplier<T>>
|
||||
{
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.guice;
|
||||
|
||||
import com.google.inject.ScopeAnnotation;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
|
@ -31,6 +32,7 @@ import java.lang.annotation.Target;
|
|||
@Target({ElementType.TYPE, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@ScopeAnnotation
|
||||
@PublicApi
|
||||
public @interface LazySingleton
|
||||
{
|
||||
}
|
||||
|
|
|
@ -27,7 +27,6 @@ import com.google.inject.Provides;
|
|||
import com.google.inject.TypeLiteral;
|
||||
import com.google.inject.multibindings.Multibinder;
|
||||
import com.google.inject.name.Names;
|
||||
|
||||
import io.druid.java.util.common.lifecycle.Lifecycle;
|
||||
|
||||
import java.lang.annotation.Annotation;
|
||||
|
|
|
@ -23,7 +23,6 @@ import com.google.common.collect.Lists;
|
|||
import com.google.inject.Key;
|
||||
import com.google.inject.Provider;
|
||||
import com.google.inject.Scope;
|
||||
|
||||
import io.druid.java.util.common.lifecycle.Lifecycle;
|
||||
import io.druid.java.util.common.logger.Logger;
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.guice;
|
||||
|
||||
import com.google.inject.ScopeAnnotation;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
|
@ -34,6 +35,7 @@ import java.lang.annotation.Target;
|
|||
@Target({ ElementType.TYPE, ElementType.METHOD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@ScopeAnnotation
|
||||
@PublicApi
|
||||
public @interface ManageLifecycle
|
||||
{
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.guice;
|
||||
|
||||
import com.google.inject.ScopeAnnotation;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
|
@ -34,6 +35,7 @@ import java.lang.annotation.Target;
|
|||
@Target({ ElementType.TYPE, ElementType.METHOD })
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@ScopeAnnotation
|
||||
@PublicApi
|
||||
public @interface ManageLifecycleLast
|
||||
{
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import com.google.inject.TypeLiteral;
|
|||
import com.google.inject.binder.ScopedBindingBuilder;
|
||||
import com.google.inject.multibindings.MapBinder;
|
||||
import com.google.inject.util.Types;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.StringUtils;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
@ -45,6 +46,7 @@ import java.util.Properties;
|
|||
* returned by the optionBinder() method. Multiple different modules can call optionBinder and all options will be
|
||||
* reflected at injection time as long as equivalent interface Key objects are passed into the various methods.
|
||||
*/
|
||||
@PublicApi
|
||||
public class PolyBind
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.guice.annotations;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Signifies that the annotated type is an extension point. Extension points are interfaces or non-final classes that
|
||||
* may be subclassed in extensions in order to add functionality to Druid. Extension points may change in breaking ways
|
||||
* only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Extension points
|
||||
* may change at any time in non-breaking ways, however, such as by adding new default methods to an interface.
|
||||
*
|
||||
* All public and protected fields, methods, and constructors of annotated classes and interfaces are considered
|
||||
* stable in this sense. If a class is not annotated, but an individual field, method, or constructor is
|
||||
* annotated, then only that particular field, method, or constructor is considered an extension API.
|
||||
*
|
||||
* Extension points are all considered public APIs in the sense of {@link PublicApi}, even if not explicitly annotated
|
||||
* as such.
|
||||
*
|
||||
* Note that there are number of injectable interfaces that are not annotated with {@code ExtensionPoint}. You may
|
||||
* still extend these interfaces in extensions, but your extension may need to be recompiled even for a minor
|
||||
* update of Druid.
|
||||
*
|
||||
* @see PublicApi
|
||||
*/
|
||||
@Target({ElementType.TYPE})
|
||||
@Retention(RetentionPolicy.SOURCE)
|
||||
public @interface ExtensionPoint
|
||||
{
|
||||
}
|
|
@ -31,6 +31,7 @@ import java.lang.annotation.Target;
|
|||
@BindingAnnotation
|
||||
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@PublicApi
|
||||
public @interface Global
|
||||
{
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.lang.annotation.Target;
|
|||
@BindingAnnotation
|
||||
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@PublicApi
|
||||
public @interface JSR311Resource
|
||||
{
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.lang.annotation.Target;
|
|||
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@BindingAnnotation
|
||||
@PublicApi
|
||||
public @interface Json
|
||||
{
|
||||
}
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Metamarkets licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package io.druid.guice.annotations;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
/**
|
||||
* Signifies that the annotated entity is a public API for extension authors. Public APIs may change in breaking ways
|
||||
* only between major Druid release lines (e.g. 0.10.x -> 0.11.0), but otherwise must remain stable. Public APIs may
|
||||
* change at any time in non-breaking ways, however, such as by adding new fields, methods, or constructors.
|
||||
*
|
||||
* Note that interfaces annotated with {@code PublicApi} but not with {@link ExtensionPoint} are not meant to be
|
||||
* subclassed in extensions. In this case, the annotation simply signifies that the interface is stable for callers.
|
||||
* In particular, since it is not meant to be subclassed, new non-default methods may be added to an interface and
|
||||
* new abstract methods may be added to a class.
|
||||
*
|
||||
* If a class or interface is annotated, then all public and protected fields, methods, and constructors that class
|
||||
* or interface are considered stable in this sense. If a class is not annotated, but an individual field, method, or
|
||||
* constructor is annotated, then only that particular field, method, or constructor is considered a public API.
|
||||
*
|
||||
* Classes, fields, method, and constructors _not_ annotated with {@code @PublicApi} may be modified or removed
|
||||
* in any Druid release, unless they are annotated with {@link ExtensionPoint} (which implies they are a public API
|
||||
* as well).
|
||||
*
|
||||
* @see ExtensionPoint
|
||||
*/
|
||||
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.CONSTRUCTOR})
|
||||
@Retention(RetentionPolicy.SOURCE)
|
||||
public @interface PublicApi
|
||||
{
|
||||
}
|
|
@ -31,6 +31,7 @@ import java.lang.annotation.Target;
|
|||
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@BindingAnnotation
|
||||
@PublicApi
|
||||
public @interface Self
|
||||
{
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.lang.annotation.Target;
|
|||
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@BindingAnnotation
|
||||
@PublicApi
|
||||
public @interface Smile
|
||||
{
|
||||
}
|
||||
|
|
|
@ -20,11 +20,13 @@
|
|||
package io.druid.initialization;
|
||||
|
||||
import com.fasterxml.jackson.databind.Module;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface DruidModule extends com.google.inject.Module
|
||||
{
|
||||
public List<? extends Module> getJacksonModules();
|
||||
|
|
|
@ -21,7 +21,13 @@ package io.druid.js;
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
/**
|
||||
* Should be used by extension filters, aggregators, etc, that use JavaScript to determine if JavaScript is enabled
|
||||
* or not.
|
||||
*/
|
||||
@PublicApi
|
||||
public class JavaScriptConfig
|
||||
{
|
||||
public static final int DEFAULT_OPTIMIZATION_LEVEL = 9;
|
||||
|
|
|
@ -21,6 +21,7 @@ package io.druid.segment;
|
|||
|
||||
import com.google.common.io.Files;
|
||||
import com.google.common.primitives.Ints;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.IOE;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -29,7 +30,9 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* Utility methods useful for implementing deep storage extensions.
|
||||
*/
|
||||
@PublicApi
|
||||
public class SegmentUtils
|
||||
{
|
||||
public static int getVersionFromDir(File inDir) throws IOException
|
||||
|
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package io.druid.segment.loading;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
@ExtensionPoint
|
||||
public interface DataSegmentArchiver
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package io.druid.segment.loading;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
||||
import java.util.Set;
|
||||
|
@ -27,6 +28,7 @@ import java.util.Set;
|
|||
* A DataSegmentFinder is responsible for finding Druid segments underneath a specified directory and optionally updates
|
||||
* all descriptor.json files on deep storage with correct loadSpec.
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface DataSegmentFinder
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -19,12 +19,14 @@
|
|||
|
||||
package io.druid.segment.loading;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface DataSegmentKiller
|
||||
{
|
||||
void kill(DataSegment segments) throws SegmentLoadingException;
|
||||
|
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package io.druid.segment.loading;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@ExtensionPoint
|
||||
public interface DataSegmentMover
|
||||
{
|
||||
public DataSegment move(DataSegment segment, Map<String, Object> targetLoadSpec) throws SegmentLoadingException;
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package io.druid.segment.loading;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
||||
import java.io.File;
|
||||
|
@ -26,6 +27,7 @@ import java.io.File;
|
|||
/**
|
||||
* A DataSegmentPuller is responsible for pulling data for a particular segment into a particular directory
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface DataSegmentPuller
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.segment.loading;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
import io.druid.java.util.common.StringUtils;
|
||||
import io.druid.timeline.DataSegment;
|
||||
|
||||
|
@ -30,6 +31,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ExtensionPoint
|
||||
public interface DataSegmentPusher
|
||||
{
|
||||
Joiner JOINER = Joiner.on("/").skipNulls();
|
||||
|
|
|
@ -20,12 +20,14 @@
|
|||
package io.druid.segment.loading;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/**
|
||||
* A means of pulling segment files into a destination directory
|
||||
*/
|
||||
@ExtensionPoint
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
|
||||
public interface LoadSpec
|
||||
{
|
||||
|
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package io.druid.segment.loading;
|
||||
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.StringUtils;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class SegmentLoadingException extends Exception
|
||||
{
|
||||
public SegmentLoadingException(
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.segment.loading;
|
||||
|
||||
import com.google.common.base.Predicate;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -28,6 +29,7 @@ import java.net.URI;
|
|||
/**
|
||||
* A URIDataPuller has handlings for URI based data
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface URIDataPuller
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -21,7 +21,6 @@ package io.druid.tasklogs;
|
|||
|
||||
import com.google.common.base.Optional;
|
||||
import com.google.common.io.ByteSource;
|
||||
|
||||
import io.druid.java.util.common.logger.Logger;
|
||||
|
||||
import java.io.File;
|
||||
|
|
|
@ -19,10 +19,13 @@
|
|||
|
||||
package io.druid.tasklogs;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface TaskLogKiller
|
||||
{
|
||||
void killAll() throws IOException;
|
||||
|
|
|
@ -19,12 +19,15 @@
|
|||
|
||||
package io.druid.tasklogs;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Something that knows how to persist local task logs to some form of long-term storage.
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface TaskLogPusher
|
||||
{
|
||||
public void pushTaskLog(String taskid, File logFile) throws IOException;
|
||||
|
|
|
@ -21,12 +21,14 @@ package io.druid.tasklogs;
|
|||
|
||||
import com.google.common.base.Optional;
|
||||
import com.google.common.io.ByteSource;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Something that knows how to stream logs for tasks.
|
||||
*/
|
||||
@ExtensionPoint
|
||||
public interface TaskLogStreamer
|
||||
{
|
||||
/**
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
|
||||
package io.druid.tasklogs;
|
||||
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
@ExtensionPoint
|
||||
public interface TaskLogs extends TaskLogStreamer, TaskLogPusher, TaskLogKiller
|
||||
{
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import com.google.common.collect.ImmutableMap;
|
|||
import com.google.common.collect.Interner;
|
||||
import com.google.common.collect.Interners;
|
||||
import com.google.common.collect.Iterables;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.jackson.CommaListJoinDeserializer;
|
||||
import io.druid.jackson.CommaListJoinSerializer;
|
||||
import io.druid.java.util.common.granularity.Granularities;
|
||||
|
@ -46,6 +47,7 @@ import java.util.Map;
|
|||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class DataSegment implements Comparable<DataSegment>
|
||||
{
|
||||
public static String delimiter = "_";
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.timeline;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.IAE;
|
||||
import io.druid.java.util.common.StringUtils;
|
||||
import io.druid.java.util.common.logger.Logger;
|
||||
|
@ -33,6 +34,7 @@ import java.util.Objects;
|
|||
/**
|
||||
* identifier to DataSegment.
|
||||
*/
|
||||
@PublicApi
|
||||
public class DataSegmentUtils
|
||||
{
|
||||
private static final Logger LOGGER = new Logger(DataSegmentUtils.class);
|
||||
|
@ -91,7 +93,7 @@ public class DataSegmentUtils
|
|||
|
||||
return new SegmentIdentifierParts(
|
||||
dataSource,
|
||||
new Interval(start.getMillis(), end.getMillis()),
|
||||
new Interval(start, end),
|
||||
version,
|
||||
trail
|
||||
);
|
||||
|
|
|
@ -28,7 +28,8 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A Marker interface that exists to combine ShardSpec objects together for Jackson
|
||||
* A Marker interface that exists to combine ShardSpec objects together for Jackson. Note that this is not an
|
||||
* extension API. Extensions are not expected to create new kinds of ShardSpecs.
|
||||
*/
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
|
||||
@JsonSubTypes({
|
||||
|
|
|
@ -20,15 +20,17 @@
|
|||
package io.druid.utils;
|
||||
|
||||
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import io.druid.java.util.common.logger.Logger;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import io.druid.java.util.common.logger.Logger;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class CompressionUtils
|
||||
{
|
||||
private static final Logger log = new Logger(CompressionUtils.class);
|
||||
|
|
|
@ -19,8 +19,11 @@
|
|||
|
||||
package io.druid.utils;
|
||||
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
|
||||
/**
|
||||
*/
|
||||
@PublicApi
|
||||
public class Runnables
|
||||
{
|
||||
public static Runnable getNoopRunnable()
|
||||
|
|
|
@ -29,6 +29,7 @@ import com.fasterxml.jackson.databind.SerializationFeature;
|
|||
import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import org.joda.time.Interval;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -61,7 +62,7 @@ public class TestObjectMapper extends ObjectMapper
|
|||
JsonParser jsonParser, DeserializationContext deserializationContext
|
||||
) throws IOException, JsonProcessingException
|
||||
{
|
||||
return new Interval(jsonParser.getText());
|
||||
return Intervals.of(jsonParser.getText());
|
||||
}
|
||||
}
|
||||
);
|
||||
|
|
|
@ -19,19 +19,18 @@
|
|||
|
||||
package io.druid.data.input;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
public class MapBasedRowTest
|
||||
{
|
||||
@Test
|
||||
public void testGetLongMetricFromString()
|
||||
{
|
||||
MapBasedRow row = new MapBasedRow(
|
||||
new DateTime(),
|
||||
DateTimes.nowUtc(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put("k0", "-1.2")
|
||||
.put("k1", "1.23")
|
||||
|
|
|
@ -27,8 +27,8 @@ import com.google.common.collect.Lists;
|
|||
import io.druid.TestObjectMapper;
|
||||
import io.druid.data.input.ByteBufferInputRowParser;
|
||||
import io.druid.data.input.InputRow;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import io.druid.java.util.common.StringUtils;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class InputRowParserSerdeTest
|
|||
Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
|
||||
Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
|
||||
Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
|
||||
Assert.assertEquals(new DateTime("2000").getMillis(), parsed.getTimestampFromEpoch());
|
||||
Assert.assertEquals(DateTimes.of("2000").getMillis(), parsed.getTimestampFromEpoch());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -79,7 +79,7 @@ public class InputRowParserSerdeTest
|
|||
Assert.assertEquals(ImmutableList.of("foo", "bar"), parsed.getDimensions());
|
||||
Assert.assertEquals(ImmutableList.of("x"), parsed.getDimension("foo"));
|
||||
Assert.assertEquals(ImmutableList.of("y"), parsed.getDimension("bar"));
|
||||
Assert.assertEquals(new DateTime("3000").getMillis(), parsed.getTimestampFromEpoch());
|
||||
Assert.assertEquals(DateTimes.of("3000").getMillis(), parsed.getTimestampFromEpoch());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -219,7 +219,7 @@ public class InputRowParserSerdeTest
|
|||
Assert.assertEquals(ImmutableList.of("asdf"), parsed.getDimension("hey0barx"));
|
||||
Assert.assertEquals(ImmutableList.of("456"), parsed.getDimension("metA"));
|
||||
Assert.assertEquals(ImmutableList.of("5"), parsed.getDimension("newmet"));
|
||||
Assert.assertEquals(new DateTime("2999").getMillis(), parsed.getTimestampFromEpoch());
|
||||
Assert.assertEquals(DateTimes.of("2999").getMillis(), parsed.getTimestampFromEpoch());
|
||||
|
||||
String testSpec = "{\"enabled\": true,\"useFieldDiscovery\": true, \"fields\": [\"parseThisRootField\"]}";
|
||||
final JSONPathSpec parsedSpec = jsonMapper.readValue(testSpec, JSONPathSpec.class);
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package io.druid.data.input.impl;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.druid.java.util.common.parsers.Parser;
|
||||
import junit.framework.Assert;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -20,9 +20,7 @@
|
|||
package io.druid.data.input.impl;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.druid.java.util.common.parsers.ParseException;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
|
|
@ -24,10 +24,10 @@ import com.google.common.base.Preconditions;
|
|||
import com.google.common.collect.Lists;
|
||||
import io.druid.data.input.Firehose;
|
||||
import io.druid.data.input.Row;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.io.filefilter.TrueFileFilter;
|
||||
import org.hamcrest.CoreMatchers;
|
||||
import org.joda.time.DateTime;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
|
@ -124,7 +124,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
for (int i = 0; i < 10; i++) {
|
||||
for (int j = 0; j < 100; j++) {
|
||||
final Row row = rows.get(i * 100 + j);
|
||||
Assert.assertEquals(new DateTime(20171220 + i), row.getTimestamp());
|
||||
Assert.assertEquals(DateTimes.utc(20171220 + i), row.getTimestamp());
|
||||
Assert.assertEquals(String.valueOf(i), row.getDimension("a").get(0));
|
||||
Assert.assertEquals(String.valueOf(j), row.getDimension("b").get(0));
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.data.input.impl;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
import org.junit.Assert;
|
||||
|
@ -32,7 +33,7 @@ public class TimestampSpecTest
|
|||
{
|
||||
TimestampSpec spec = new TimestampSpec("TIMEstamp", "yyyy-MM-dd", null);
|
||||
Assert.assertEquals(
|
||||
new DateTime("2014-03-01"),
|
||||
DateTimes.of("2014-03-01"),
|
||||
spec.extractTimestamp(ImmutableMap.<String, Object>of("TIMEstamp", "2014-03-01"))
|
||||
);
|
||||
}
|
||||
|
@ -40,9 +41,9 @@ public class TimestampSpecTest
|
|||
@Test
|
||||
public void testExtractTimestampWithMissingTimestampColumn() throws Exception
|
||||
{
|
||||
TimestampSpec spec = new TimestampSpec(null, null, new DateTime(0));
|
||||
TimestampSpec spec = new TimestampSpec(null, null, DateTimes.EPOCH);
|
||||
Assert.assertEquals(
|
||||
new DateTime("1970-01-01"),
|
||||
DateTimes.of("1970-01-01"),
|
||||
spec.extractTimestamp(ImmutableMap.<String, Object>of("dim", "foo"))
|
||||
);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package io.druid.timeline;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
@ -28,11 +27,13 @@ import com.google.common.collect.Range;
|
|||
import com.google.common.collect.Sets;
|
||||
import io.druid.TestObjectMapper;
|
||||
import io.druid.data.input.InputRow;
|
||||
import io.druid.java.util.common.jackson.JacksonUtils;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.timeline.partition.NoneShardSpec;
|
||||
import io.druid.timeline.partition.PartitionChunk;
|
||||
import io.druid.timeline.partition.ShardSpec;
|
||||
import io.druid.timeline.partition.ShardSpecLookup;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
@ -90,7 +91,7 @@ public class DataSegmentTest
|
|||
public void testV1Serialization() throws Exception
|
||||
{
|
||||
|
||||
final Interval interval = new Interval("2011-10-01/2011-10-02");
|
||||
final Interval interval = Intervals.of("2011-10-01/2011-10-02");
|
||||
final ImmutableMap<String, Object> loadSpec = ImmutableMap.<String, Object>of("something", "or_other");
|
||||
|
||||
DataSegment segment = new DataSegment(
|
||||
|
@ -107,9 +108,7 @@ public class DataSegmentTest
|
|||
|
||||
final Map<String, Object> objectMap = mapper.readValue(
|
||||
mapper.writeValueAsString(segment),
|
||||
new TypeReference<Map<String, Object>>()
|
||||
{
|
||||
}
|
||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||
);
|
||||
|
||||
Assert.assertEquals(10, objectMap.size());
|
||||
|
@ -150,8 +149,8 @@ public class DataSegmentTest
|
|||
{
|
||||
final DataSegment segment = DataSegment.builder()
|
||||
.dataSource("foo")
|
||||
.interval(new Interval("2012-01-01/2012-01-02"))
|
||||
.version(new DateTime("2012-01-01T11:22:33.444Z").toString())
|
||||
.interval(Intervals.of("2012-01-01/2012-01-02"))
|
||||
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
||||
.shardSpec(NoneShardSpec.instance())
|
||||
.build();
|
||||
|
||||
|
@ -166,8 +165,8 @@ public class DataSegmentTest
|
|||
{
|
||||
final DataSegment segment = DataSegment.builder()
|
||||
.dataSource("foo")
|
||||
.interval(new Interval("2012-01-01/2012-01-02"))
|
||||
.version(new DateTime("2012-01-01T11:22:33.444Z").toString())
|
||||
.interval(Intervals.of("2012-01-01/2012-01-02"))
|
||||
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
||||
.shardSpec(getShardSpec(0))
|
||||
.build();
|
||||
|
||||
|
@ -182,8 +181,8 @@ public class DataSegmentTest
|
|||
{
|
||||
final DataSegment segment = DataSegment.builder()
|
||||
.dataSource("foo")
|
||||
.interval(new Interval("2012-01-01/2012-01-02"))
|
||||
.version(new DateTime("2012-01-01T11:22:33.444Z").toString())
|
||||
.interval(Intervals.of("2012-01-01/2012-01-02"))
|
||||
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
||||
.shardSpec(getShardSpec(7))
|
||||
.build();
|
||||
|
||||
|
@ -198,8 +197,8 @@ public class DataSegmentTest
|
|||
{
|
||||
final DataSegment segment = DataSegment.builder()
|
||||
.dataSource("foo")
|
||||
.interval(new Interval("2012-01-01/2012-01-02"))
|
||||
.version(new DateTime("2012-01-01T11:22:33.444Z").toString())
|
||||
.interval(Intervals.of("2012-01-01/2012-01-02"))
|
||||
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
||||
.build();
|
||||
|
||||
final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
||||
|
@ -240,7 +239,7 @@ public class DataSegmentTest
|
|||
{
|
||||
return DataSegment.builder()
|
||||
.dataSource(dataSource)
|
||||
.interval(new Interval(interval))
|
||||
.interval(Intervals.of(interval))
|
||||
.version(version)
|
||||
.size(1)
|
||||
.build();
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
package io.druid.timeline;
|
||||
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.timeline.DataSegmentUtils.SegmentIdentifierParts;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -32,19 +32,19 @@ public class DataSegmentUtilsTest
|
|||
public void testBasic()
|
||||
{
|
||||
String datasource = "datasource";
|
||||
SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0");
|
||||
SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0");
|
||||
Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
|
||||
desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D"));
|
||||
desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
|
||||
Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
|
||||
desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", null);
|
||||
desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null);
|
||||
Assert.assertEquals("datasource_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
|
||||
desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D"));
|
||||
desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
|
||||
Assert.assertEquals("datasource_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
}
|
||||
|
@ -53,19 +53,19 @@ public class DataSegmentUtilsTest
|
|||
public void testDataSourceWithUnderscore1()
|
||||
{
|
||||
String datasource = "datasource_1";
|
||||
SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0");
|
||||
SegmentIdentifierParts desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0");
|
||||
Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
|
||||
desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D"));
|
||||
desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
|
||||
Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
|
||||
desc = new SegmentIdentifierParts(datasource, new Interval("2015-01-02/2015-01-03"), "ver", null);
|
||||
desc = new SegmentIdentifierParts(datasource, Intervals.of("2015-01-02/2015-01-03"), "ver", null);
|
||||
Assert.assertEquals("datasource_1_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
|
||||
desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D"));
|
||||
desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
|
||||
Assert.assertEquals("datasource_1_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver", desc.toString());
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(datasource, desc.toString()));
|
||||
}
|
||||
|
@ -74,28 +74,28 @@ public class DataSegmentUtilsTest
|
|||
public void testDataSourceWithUnderscore2()
|
||||
{
|
||||
String dataSource = "datasource_2015-01-01T00:00:00.000Z";
|
||||
SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, new Interval("2015-01-02/2015-01-03"), "ver", "0_0");
|
||||
SegmentIdentifierParts desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", "0_0");
|
||||
Assert.assertEquals(
|
||||
"datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver_0_0",
|
||||
desc.toString()
|
||||
);
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
|
||||
|
||||
desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D"));
|
||||
desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
|
||||
Assert.assertEquals(
|
||||
"datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver_0_0",
|
||||
desc.toString()
|
||||
);
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
|
||||
|
||||
desc = new SegmentIdentifierParts(dataSource, new Interval("2015-01-02/2015-01-03"), "ver", null);
|
||||
desc = new SegmentIdentifierParts(dataSource, Intervals.of("2015-01-02/2015-01-03"), "ver", null);
|
||||
Assert.assertEquals(
|
||||
"datasource_2015-01-01T00:00:00.000Z_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver",
|
||||
desc.toString()
|
||||
);
|
||||
Assert.assertEquals(desc, DataSegmentUtils.valueOf(dataSource, desc.toString()));
|
||||
|
||||
desc = desc.withInterval(new Interval("2014-10-20T00:00:00Z/P1D"));
|
||||
desc = desc.withInterval(Intervals.of("2014-10-20T00:00:00Z/P1D"));
|
||||
Assert.assertEquals(
|
||||
"datasource_2015-01-01T00:00:00.000Z_2014-10-20T00:00:00.000Z_2014-10-21T00:00:00.000Z_ver",
|
||||
desc.toString()
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.google.common.collect.Iterables;
|
|||
import io.druid.benchmark.datagen.BenchmarkColumnSchema;
|
||||
import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
|
||||
import io.druid.benchmark.datagen.SegmentGenerator;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.java.util.common.granularity.Granularities;
|
||||
import io.druid.java.util.common.guava.Sequence;
|
||||
import io.druid.java.util.common.guava.Sequences;
|
||||
|
@ -41,7 +42,6 @@ import io.druid.segment.VirtualColumns;
|
|||
import io.druid.segment.column.ValueType;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import io.druid.timeline.partition.LinearShardSpec;
|
||||
import org.joda.time.Interval;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Fork;
|
||||
|
@ -89,7 +89,7 @@ public class ExpressionBenchmark
|
|||
BenchmarkColumnSchema.makeNormal("y", ValueType.FLOAT, false, 1, 0d, 0d, 10000d, false)
|
||||
),
|
||||
ImmutableList.of(),
|
||||
new Interval("2000/P1D"),
|
||||
Intervals.of("2000/P1D"),
|
||||
false
|
||||
);
|
||||
|
||||
|
@ -176,7 +176,7 @@ public class ExpressionBenchmark
|
|||
Sequences.map(
|
||||
cursors,
|
||||
cursor -> {
|
||||
final BufferAggregator bufferAggregator = aggregatorFactory.apply(cursor);
|
||||
final BufferAggregator bufferAggregator = aggregatorFactory.apply(cursor.getColumnSelectorFactory());
|
||||
bufferAggregator.init(aggregationBuffer, 0);
|
||||
|
||||
while (!cursor.isDone()) {
|
||||
|
|
|
@ -514,7 +514,9 @@ public class FilterPartitionBenchmark
|
|||
{
|
||||
List<String> strings = new ArrayList<String>();
|
||||
List<DimensionSelector> selectors = new ArrayList<>();
|
||||
selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null)));
|
||||
selectors.add(
|
||||
input.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null))
|
||||
);
|
||||
//selectors.add(input.makeDimensionSelector(new DefaultDimensionSpec("dimB", null)));
|
||||
while (!input.isDone()) {
|
||||
for (DimensionSelector selector : selectors) {
|
||||
|
@ -540,7 +542,7 @@ public class FilterPartitionBenchmark
|
|||
public List<Long> apply(Cursor input)
|
||||
{
|
||||
List<Long> longvals = new ArrayList<Long>();
|
||||
LongColumnSelector selector = input.makeLongColumnSelector("sumLongSequential");
|
||||
LongColumnSelector selector = input.getColumnSelectorFactory().makeLongColumnSelector("sumLongSequential");
|
||||
while (!input.isDone()) {
|
||||
long rowval = selector.getLong();
|
||||
blackhole.consume(rowval);
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.openjdk.jmh.runner.options.OptionsBuilder;
|
|||
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@State(Scope.Benchmark)
|
||||
|
@ -60,7 +61,7 @@ public class TimeParseBenchmark
|
|||
@Setup
|
||||
public void setup()
|
||||
{
|
||||
SimpleDateFormat format = new SimpleDateFormat(DATA_FORMAT);
|
||||
SimpleDateFormat format = new SimpleDateFormat(DATA_FORMAT, Locale.ENGLISH);
|
||||
long start = System.currentTimeMillis();
|
||||
int rowsPerBatch = numRows / numBatches;
|
||||
int numRowInBatch = 0;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package io.druid.benchmark.datagen;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.query.aggregation.AggregatorFactory;
|
||||
import io.druid.query.aggregation.CountAggregatorFactory;
|
||||
import io.druid.query.aggregation.DoubleMinAggregatorFactory;
|
||||
|
@ -84,7 +85,7 @@ public class BenchmarkSchemas
|
|||
basicSchemaIngestAggs.add(new DoubleMinAggregatorFactory("minFloatZipf", "metFloatZipf"));
|
||||
basicSchemaIngestAggs.add(new HyperUniquesAggregatorFactory("hyper", "dimHyperUnique"));
|
||||
|
||||
Interval basicSchemaDataInterval = new Interval(0, 1000000);
|
||||
Interval basicSchemaDataInterval = Intervals.utc(0, 1000000);
|
||||
|
||||
BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo(
|
||||
basicSchemaColumns,
|
||||
|
@ -104,7 +105,7 @@ public class BenchmarkSchemas
|
|||
List<AggregatorFactory> basicSchemaIngestAggs = new ArrayList<>();
|
||||
basicSchemaIngestAggs.add(new CountAggregatorFactory("rows"));
|
||||
|
||||
Interval basicSchemaDataInterval = new Interval(0, 1000000);
|
||||
Interval basicSchemaDataInterval = Intervals.utc(0, 1000000);
|
||||
|
||||
BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo(
|
||||
basicSchemaColumns,
|
||||
|
@ -125,7 +126,7 @@ public class BenchmarkSchemas
|
|||
basicSchemaIngestAggs.add(new LongSumAggregatorFactory("dimSequential", "dimSequential"));
|
||||
basicSchemaIngestAggs.add(new CountAggregatorFactory("rows"));
|
||||
|
||||
Interval basicSchemaDataInterval = new Interval(0, 1000000);
|
||||
Interval basicSchemaDataInterval = Intervals.utc(0, 1000000);
|
||||
|
||||
BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo(
|
||||
basicSchemaColumns,
|
||||
|
@ -146,7 +147,7 @@ public class BenchmarkSchemas
|
|||
basicSchemaIngestAggs.add(new DoubleSumAggregatorFactory("dimSequential", "dimSequential"));
|
||||
basicSchemaIngestAggs.add(new CountAggregatorFactory("rows"));
|
||||
|
||||
Interval basicSchemaDataInterval = new Interval(0, 1000000);
|
||||
Interval basicSchemaDataInterval = Intervals.utc(0, 1000000);
|
||||
|
||||
BenchmarkSchemaInfo basicSchema = new BenchmarkSchemaInfo(
|
||||
basicSchemaColumns,
|
||||
|
|
|
@ -144,10 +144,10 @@ public class IncrementalIndexReadBenchmark
|
|||
Cursor cursor = Sequences.toList(Sequences.limit(cursors, 1), Lists.<Cursor>newArrayList()).get(0);
|
||||
|
||||
List<DimensionSelector> selectors = new ArrayList<>();
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null)));
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimZipf", null)));
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimUniform", null)));
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequentialHalfNull", null)));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimSequential"));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimZipf"));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimUniform"));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimSequentialHalfNull"));
|
||||
|
||||
cursor.reset();
|
||||
while (!cursor.isDone()) {
|
||||
|
@ -179,10 +179,10 @@ public class IncrementalIndexReadBenchmark
|
|||
Cursor cursor = Sequences.toList(Sequences.limit(cursors, 1), Lists.<Cursor>newArrayList()).get(0);
|
||||
|
||||
List<DimensionSelector> selectors = new ArrayList<>();
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequential", null)));
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimZipf", null)));
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimUniform", null)));
|
||||
selectors.add(cursor.makeDimensionSelector(new DefaultDimensionSpec("dimSequentialHalfNull", null)));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimSequential"));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimZipf"));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimUniform"));
|
||||
selectors.add(makeDimensionSelector(cursor, "dimSequentialHalfNull"));
|
||||
|
||||
cursor.reset();
|
||||
while (!cursor.isDone()) {
|
||||
|
@ -205,4 +205,9 @@ public class IncrementalIndexReadBenchmark
|
|||
null
|
||||
);
|
||||
}
|
||||
|
||||
private static DimensionSelector makeDimensionSelector(Cursor cursor, String name)
|
||||
{
|
||||
return cursor.getColumnSelectorFactory().makeDimensionSelector(new DefaultDimensionSpec(name, null));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,8 +25,8 @@ import com.google.common.io.Files;
|
|||
import io.druid.benchmark.datagen.BenchmarkSchemaInfo;
|
||||
import io.druid.benchmark.datagen.BenchmarkSchemas;
|
||||
import io.druid.benchmark.datagen.SegmentGenerator;
|
||||
import io.druid.common.utils.JodaUtils;
|
||||
import io.druid.data.input.Row;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.java.util.common.granularity.Granularities;
|
||||
import io.druid.java.util.common.guava.Sequence;
|
||||
import io.druid.java.util.common.guava.Sequences;
|
||||
|
@ -48,7 +48,6 @@ import io.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
|
|||
import io.druid.timeline.DataSegment;
|
||||
import io.druid.timeline.partition.LinearShardSpec;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.joda.time.Interval;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Fork;
|
||||
|
@ -119,12 +118,13 @@ public class SqlBenchmark
|
|||
CalciteTests.createMockQueryLifecycleFactory(walker),
|
||||
CalciteTests.createOperatorTable(),
|
||||
CalciteTests.createExprMacroTable(),
|
||||
plannerConfig
|
||||
plannerConfig,
|
||||
CalciteTests.getJsonMapper()
|
||||
);
|
||||
groupByQuery = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource("foo")
|
||||
.setInterval(new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT))
|
||||
.setInterval(Intervals.ETERNITY)
|
||||
.setDimensions(
|
||||
Arrays.<DimensionSpec>asList(
|
||||
new DefaultDimensionSpec("dimZipf", "d0"),
|
||||
|
|
|
@ -30,6 +30,7 @@ import io.druid.concurrent.Execs;
|
|||
import io.druid.data.input.InputRow;
|
||||
import io.druid.hll.HyperLogLogHash;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.java.util.common.granularity.Granularities;
|
||||
import io.druid.java.util.common.guava.Sequence;
|
||||
import io.druid.java.util.common.guava.Sequences;
|
||||
|
@ -72,7 +73,6 @@ import io.druid.segment.column.ColumnConfig;
|
|||
import io.druid.segment.incremental.IncrementalIndex;
|
||||
import io.druid.segment.serde.ComplexMetrics;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.joda.time.Interval;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Fork;
|
||||
|
@ -214,7 +214,7 @@ public class TimeseriesBenchmark
|
|||
basicQueries.put("timeFilterAlphanumeric", timeFilterQuery);
|
||||
}
|
||||
{
|
||||
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(200000, 300000)));
|
||||
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(Intervals.utc(200000, 300000)));
|
||||
List<AggregatorFactory> queryAggs = new ArrayList<>();
|
||||
LongSumAggregatorFactory lsaf = new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential");
|
||||
queryAggs.add(lsaf);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package io.druid.server.coordinator;
|
||||
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import io.druid.timeline.DataSegment;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
|
@ -39,7 +40,7 @@ import java.util.concurrent.TimeUnit;
|
|||
@State(Scope.Benchmark)
|
||||
public class CostBalancerStrategyBenchmark
|
||||
{
|
||||
private final static DateTime t0 = new DateTime("2016-01-01T01:00:00Z");
|
||||
private final static DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
|
||||
|
||||
private List<DataSegment> segments;
|
||||
private DataSegment segment;
|
||||
|
|
|
@ -22,8 +22,8 @@ package io.druid.benchmark;
|
|||
import io.druid.benchmark.datagen.BenchmarkColumnSchema;
|
||||
import io.druid.benchmark.datagen.BenchmarkDataGenerator;
|
||||
import io.druid.data.input.InputRow;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.segment.column.ValueType;
|
||||
import org.joda.time.Interval;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
@ -378,13 +378,13 @@ public class BenchmarkDataGeneratorTest
|
|||
)
|
||||
);
|
||||
|
||||
BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 600000), 100);
|
||||
BenchmarkDataGenerator dataGenerator = new BenchmarkDataGenerator(schemas, 9999, Intervals.utc(50000, 600000), 100);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
InputRow row = dataGenerator.nextRow();
|
||||
//System.out.println("S-ROW: " + row);
|
||||
}
|
||||
|
||||
BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, new Interval(50000, 50001), 100);
|
||||
BenchmarkDataGenerator dataGenerator2 = new BenchmarkDataGenerator(schemas, 9999, Intervals.utc(50000, 50001), 100);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
InputRow row = dataGenerator2.nextRow();
|
||||
//System.out.println("S2-ROW: " + row);
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
@defaultMessage Uses default time zone
|
||||
org.joda.time.DateTime#<init>()
|
||||
org.joda.time.DateTime#<init>(long)
|
||||
org.joda.time.DateTime#<init>(java.lang.Object)
|
||||
org.joda.time.DateTime#<init>(int, int, int, int, int)
|
||||
org.joda.time.DateTime#<init>(int, int, int, int, int, int)
|
||||
org.joda.time.DateTime#<init>(int, int, int, int, int, int, int)
|
||||
org.joda.time.DateTime#now()
|
||||
org.joda.time.Instant#toDateTime()
|
||||
org.joda.time.Instant#toMutableDateTime()
|
||||
org.joda.time.Instant#toMutableDateTimeISO()
|
||||
org.joda.time.base.AbstractInstant#toDateTimeISO()
|
||||
org.joda.time.base.AbstractInstant#toDateTime()
|
||||
org.joda.time.base.AbstractInstant#toMutableDateTime()
|
||||
org.joda.time.base.AbstractInstant#toMutableDateTimeISO()
|
||||
org.joda.time.LocalDateTime#<init>()
|
||||
org.joda.time.LocalDateTime#<init>(long)
|
||||
org.joda.time.LocalDateTime#<init>(java.lang.Object)
|
||||
org.joda.time.LocalDateTime#now()
|
||||
org.joda.time.LocalDateTime#fromDateFields(java.util.Date)
|
||||
org.joda.time.LocalDateTime#toDate()
|
||||
org.joda.time.LocalDateTime#toDateTime()
|
||||
org.joda.time.LocalDate#<init>()
|
||||
org.joda.time.LocalDate#<init>(long)
|
||||
org.joda.time.LocalDate#<init>(java.lang.Object)
|
||||
org.joda.time.LocalDate#fromDateFields(java.util.Date)
|
||||
org.joda.time.LocalDate#now()
|
||||
org.joda.time.LocalDate#toDate()
|
||||
org.joda.time.LocalDate#toDateTime(org.joda.time.LocalTime)
|
||||
org.joda.time.LocalDate#toDateTimeAtCurrentTime()
|
||||
org.joda.time.LocalDate#toDateTimeAtStartOfDay()
|
||||
org.joda.time.LocalDate#toInterval()
|
||||
org.joda.time.LocalTime#<init>()
|
||||
org.joda.time.LocalTime#<init>(long)
|
||||
org.joda.time.LocalTime#<init>(java.lang.Object)
|
||||
org.joda.time.LocalTime#fromDateFields(java.util.Date)
|
||||
org.joda.time.LocalTime#now()
|
||||
org.joda.time.LocalTime#toDateTimeToday()
|
||||
org.joda.time.Interval#<init>(long, long)
|
||||
org.joda.time.Interval#<init>(java.lang.Object)
|
||||
org.joda.time.Interval#parse(java.lang.String)
|
||||
org.joda.time.Interval#parseWithOffset(java.lang.String)
|
||||
|
||||
@defaultMessage Doesn't handle edge cases where the start of day isn't midnight.
|
||||
org.joda.time.LocalDate#toDateTimeAtMidnight()
|
||||
org.joda.time.DateMidnight
|
|
@ -22,6 +22,7 @@ package io.druid.audit;
|
|||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.google.common.base.Preconditions;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
/**
|
||||
|
@ -50,7 +51,7 @@ public class AuditEntry
|
|||
this.key = key;
|
||||
this.type = type;
|
||||
this.auditInfo = authorInfo;
|
||||
this.auditTime = auditTime == null ? DateTime.now() : auditTime;
|
||||
this.auditTime = auditTime == null ? DateTimes.nowUtc() : auditTime;
|
||||
this.payload = payload;
|
||||
}
|
||||
|
||||
|
@ -151,7 +152,7 @@ public class AuditEntry
|
|||
this.key = null;
|
||||
this.auditInfo = null;
|
||||
this.payload = null;
|
||||
this.auditTime = DateTime.now();
|
||||
this.auditTime = DateTimes.nowUtc();
|
||||
}
|
||||
|
||||
public Builder key(String key)
|
||||
|
|
|
@ -30,7 +30,9 @@ import org.joda.time.format.ISODateTimeFormat;
|
|||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Do NOT remove "unused" members in this class. They are used by generated Antlr
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
interface Function
|
||||
{
|
||||
String name();
|
||||
|
@ -1024,7 +1026,7 @@ interface Function
|
|||
}
|
||||
|
||||
final String arg = args.get(0).eval(bindings).asString();
|
||||
return ExprEval.of(Strings.nullToEmpty(arg).toLowerCase());
|
||||
return ExprEval.of(StringUtils.toLowerCase(Strings.nullToEmpty(arg)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1044,7 +1046,7 @@ interface Function
|
|||
}
|
||||
|
||||
final String arg = args.get(0).eval(bindings).asString();
|
||||
return ExprEval.of(Strings.nullToEmpty(arg).toUpperCase());
|
||||
return ExprEval.of(StringUtils.toUpperCase(Strings.nullToEmpty(arg)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,11 +21,13 @@ package io.druid.metadata;
|
|||
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import io.druid.guice.annotations.ExtensionPoint;
|
||||
|
||||
|
||||
/**
|
||||
* Implement this for different ways to (optionally securely) access secrets.
|
||||
*/
|
||||
@ExtensionPoint
|
||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type", defaultImpl = DefaultPasswordProvider.class)
|
||||
@JsonSubTypes(value = {
|
||||
@JsonSubTypes.Type(name = "default", value = DefaultPasswordProvider.class),
|
||||
|
|
|
@ -19,8 +19,10 @@
|
|||
|
||||
package io.druid.timeline;
|
||||
|
||||
import io.druid.guice.annotations.PublicApi;
|
||||
import org.joda.time.Interval;
|
||||
|
||||
@PublicApi
|
||||
public interface LogicalSegment
|
||||
{
|
||||
public Interval getInterval();
|
||||
|
|
|
@ -25,7 +25,7 @@ import com.google.common.collect.Lists;
|
|||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Ordering;
|
||||
import com.google.common.collect.Sets;
|
||||
import io.druid.common.utils.JodaUtils;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import io.druid.java.util.common.UOE;
|
||||
import io.druid.java.util.common.guava.Comparators;
|
||||
import io.druid.timeline.partition.ImmutablePartitionHolder;
|
||||
|
@ -288,7 +288,7 @@ public class VersionedIntervalTimeline<VersionType, ObjectType> implements Timel
|
|||
}
|
||||
|
||||
Interval lower = completePartitionsTimeline.floorKey(
|
||||
new Interval(interval.getStartMillis(), JodaUtils.MAX_INSTANT)
|
||||
new Interval(interval.getStart(), DateTimes.MAX)
|
||||
);
|
||||
|
||||
if (lower == null || !lower.overlaps(interval)) {
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
|
||||
package io.druid.common.utils;
|
||||
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.java.util.common.JodaUtils;
|
||||
import org.joda.time.Duration;
|
||||
import org.joda.time.Interval;
|
||||
import org.joda.time.Period;
|
||||
|
@ -37,18 +39,18 @@ public class JodaUtilsTest
|
|||
public void testUmbrellaIntervalsSimple() throws Exception
|
||||
{
|
||||
List<Interval> intervals = Arrays.asList(
|
||||
new Interval("2011-03-03/2011-03-04"),
|
||||
new Interval("2011-01-01/2011-01-02"),
|
||||
new Interval("2011-02-01/2011-02-05"),
|
||||
new Interval("2011-02-03/2011-02-08"),
|
||||
new Interval("2011-01-01/2011-01-03"),
|
||||
new Interval("2011-03-01/2011-03-02"),
|
||||
new Interval("2011-03-05/2011-03-06"),
|
||||
new Interval("2011-02-01/2011-02-02")
|
||||
Intervals.of("2011-03-03/2011-03-04"),
|
||||
Intervals.of("2011-01-01/2011-01-02"),
|
||||
Intervals.of("2011-02-01/2011-02-05"),
|
||||
Intervals.of("2011-02-03/2011-02-08"),
|
||||
Intervals.of("2011-01-01/2011-01-03"),
|
||||
Intervals.of("2011-03-01/2011-03-02"),
|
||||
Intervals.of("2011-03-05/2011-03-06"),
|
||||
Intervals.of("2011-02-01/2011-02-02")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
new Interval("2011-01-01/2011-03-06"),
|
||||
Intervals.of("2011-01-01/2011-03-06"),
|
||||
JodaUtils.umbrellaInterval(intervals)
|
||||
);
|
||||
}
|
||||
|
@ -71,23 +73,23 @@ public class JodaUtilsTest
|
|||
public void testCondenseIntervalsSimple() throws Exception
|
||||
{
|
||||
List<Interval> intervals = Arrays.asList(
|
||||
new Interval("2011-01-01/2011-01-02"),
|
||||
new Interval("2011-01-02/2011-01-03"),
|
||||
new Interval("2011-02-01/2011-02-05"),
|
||||
new Interval("2011-02-01/2011-02-02"),
|
||||
new Interval("2011-02-03/2011-02-08"),
|
||||
new Interval("2011-03-01/2011-03-02"),
|
||||
new Interval("2011-03-03/2011-03-04"),
|
||||
new Interval("2011-03-05/2011-03-06")
|
||||
Intervals.of("2011-01-01/2011-01-02"),
|
||||
Intervals.of("2011-01-02/2011-01-03"),
|
||||
Intervals.of("2011-02-01/2011-02-05"),
|
||||
Intervals.of("2011-02-01/2011-02-02"),
|
||||
Intervals.of("2011-02-03/2011-02-08"),
|
||||
Intervals.of("2011-03-01/2011-03-02"),
|
||||
Intervals.of("2011-03-03/2011-03-04"),
|
||||
Intervals.of("2011-03-05/2011-03-06")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
Arrays.asList(
|
||||
new Interval("2011-01-01/2011-01-03"),
|
||||
new Interval("2011-02-01/2011-02-08"),
|
||||
new Interval("2011-03-01/2011-03-02"),
|
||||
new Interval("2011-03-03/2011-03-04"),
|
||||
new Interval("2011-03-05/2011-03-06")
|
||||
Intervals.of("2011-01-01/2011-01-03"),
|
||||
Intervals.of("2011-02-01/2011-02-08"),
|
||||
Intervals.of("2011-03-01/2011-03-02"),
|
||||
Intervals.of("2011-03-03/2011-03-04"),
|
||||
Intervals.of("2011-03-05/2011-03-06")
|
||||
),
|
||||
JodaUtils.condenseIntervals(intervals)
|
||||
);
|
||||
|
@ -97,31 +99,31 @@ public class JodaUtilsTest
|
|||
public void testCondenseIntervalsMixedUp() throws Exception
|
||||
{
|
||||
List<Interval> intervals = Arrays.asList(
|
||||
new Interval("2011-01-01/2011-01-02"),
|
||||
new Interval("2011-01-02/2011-01-03"),
|
||||
new Interval("2011-02-01/2011-02-05"),
|
||||
new Interval("2011-02-01/2011-02-02"),
|
||||
new Interval("2011-02-03/2011-02-08"),
|
||||
new Interval("2011-03-01/2011-03-02"),
|
||||
new Interval("2011-03-03/2011-03-04"),
|
||||
new Interval("2011-03-05/2011-03-06"),
|
||||
new Interval("2011-04-01/2011-04-05"),
|
||||
new Interval("2011-04-02/2011-04-03"),
|
||||
new Interval("2011-05-01/2011-05-05"),
|
||||
new Interval("2011-05-02/2011-05-07")
|
||||
Intervals.of("2011-01-01/2011-01-02"),
|
||||
Intervals.of("2011-01-02/2011-01-03"),
|
||||
Intervals.of("2011-02-01/2011-02-05"),
|
||||
Intervals.of("2011-02-01/2011-02-02"),
|
||||
Intervals.of("2011-02-03/2011-02-08"),
|
||||
Intervals.of("2011-03-01/2011-03-02"),
|
||||
Intervals.of("2011-03-03/2011-03-04"),
|
||||
Intervals.of("2011-03-05/2011-03-06"),
|
||||
Intervals.of("2011-04-01/2011-04-05"),
|
||||
Intervals.of("2011-04-02/2011-04-03"),
|
||||
Intervals.of("2011-05-01/2011-05-05"),
|
||||
Intervals.of("2011-05-02/2011-05-07")
|
||||
);
|
||||
|
||||
for (int i = 0; i < 20; ++i) {
|
||||
Collections.shuffle(intervals);
|
||||
Assert.assertEquals(
|
||||
Arrays.asList(
|
||||
new Interval("2011-01-01/2011-01-03"),
|
||||
new Interval("2011-02-01/2011-02-08"),
|
||||
new Interval("2011-03-01/2011-03-02"),
|
||||
new Interval("2011-03-03/2011-03-04"),
|
||||
new Interval("2011-03-05/2011-03-06"),
|
||||
new Interval("2011-04-01/2011-04-05"),
|
||||
new Interval("2011-05-01/2011-05-07")
|
||||
Intervals.of("2011-01-01/2011-01-03"),
|
||||
Intervals.of("2011-02-01/2011-02-08"),
|
||||
Intervals.of("2011-03-01/2011-03-02"),
|
||||
Intervals.of("2011-03-03/2011-03-04"),
|
||||
Intervals.of("2011-03-05/2011-03-06"),
|
||||
Intervals.of("2011-04-01/2011-04-05"),
|
||||
Intervals.of("2011-05-01/2011-05-07")
|
||||
),
|
||||
JodaUtils.condenseIntervals(intervals)
|
||||
);
|
||||
|
@ -131,15 +133,13 @@ public class JodaUtilsTest
|
|||
@Test
|
||||
public void testMinMaxInterval()
|
||||
{
|
||||
final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT);
|
||||
Assert.assertEquals(Long.MAX_VALUE, interval.toDuration().getMillis());
|
||||
Assert.assertEquals(Long.MAX_VALUE, Intervals.ETERNITY.toDuration().getMillis());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMinMaxDuration()
|
||||
{
|
||||
final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT);
|
||||
final Duration duration = interval.toDuration();
|
||||
final Duration duration = Intervals.ETERNITY.toDuration();
|
||||
Assert.assertEquals(Long.MAX_VALUE, duration.getMillis());
|
||||
Assert.assertEquals("PT9223372036854775.807S", duration.toString());
|
||||
}
|
||||
|
@ -148,8 +148,7 @@ public class JodaUtilsTest
|
|||
@Test(expected = ArithmeticException.class)
|
||||
public void testMinMaxPeriod()
|
||||
{
|
||||
final Interval interval = new Interval(JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT);
|
||||
final Period period = interval.toDuration().toPeriod();
|
||||
final Period period = Intervals.ETERNITY.toDuration().toPeriod();
|
||||
Assert.assertEquals(Long.MAX_VALUE, period.getMinutes());
|
||||
}
|
||||
|
||||
|
|
|
@ -76,7 +76,7 @@ public class ExecsTest
|
|||
{
|
||||
for (int i = 0; i < nTasks; i++) {
|
||||
final int taskID = i;
|
||||
System.out.println("Produced task" + taskID);
|
||||
log.info("Produced task %d", taskID);
|
||||
blockingExecutor.submit(
|
||||
new Runnable()
|
||||
{
|
||||
|
|
|
@ -57,7 +57,7 @@ public class LifecycleLockTest
|
|||
finishLatch.countDown();
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}.start();
|
||||
|
@ -99,7 +99,7 @@ public class LifecycleLockTest
|
|||
finishLatch.countDown();
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}.start();
|
||||
|
|
|
@ -25,8 +25,8 @@ import com.google.common.collect.ImmutableSet;
|
|||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Ordering;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import io.druid.common.utils.JodaUtils;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import io.druid.java.util.common.Intervals;
|
||||
import io.druid.java.util.common.Pair;
|
||||
import io.druid.timeline.partition.ImmutablePartitionHolder;
|
||||
import io.druid.timeline.partition.IntegerPartitionChunk;
|
||||
|
@ -86,7 +86,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-04-02/2011-04-06", "2", 1),
|
||||
createExpected("2011-04-06/2011-04-09", "3", 4)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-09"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ public class VersionedIntervalTimelineTest
|
|||
{
|
||||
Assert.assertEquals(
|
||||
makeSingle(1),
|
||||
timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1))
|
||||
timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1))
|
||||
);
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
|
@ -104,7 +104,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-04-03/2011-04-06", "1", 3),
|
||||
createExpected("2011-04-06/2011-04-09", "3", 4)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-09"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -113,11 +113,11 @@ public class VersionedIntervalTimelineTest
|
|||
{
|
||||
Assert.assertEquals(
|
||||
makeSingle(1),
|
||||
timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1))
|
||||
timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1))
|
||||
);
|
||||
Assert.assertEquals(
|
||||
makeSingle(2),
|
||||
timeline.remove(new Interval("2011-04-01/2011-04-03"), "1", makeSingle(2))
|
||||
timeline.remove(Intervals.of("2011-04-01/2011-04-03"), "1", makeSingle(2))
|
||||
);
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
|
@ -125,7 +125,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-04-03/2011-04-06", "1", 3),
|
||||
createExpected("2011-04-06/2011-04-09", "3", 4)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-09"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-09"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ public class VersionedIntervalTimelineTest
|
|||
{
|
||||
Assert.assertEquals(
|
||||
makeSingle(1),
|
||||
timeline.remove(new Interval("2011-04-01/2011-04-09"), "2", makeSingle(1))
|
||||
timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "2", makeSingle(1))
|
||||
);
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
|
@ -142,7 +142,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-04-02/2011-04-03", "1", 2),
|
||||
createExpected("2011-04-03/2011-04-05", "1", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-05"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-05"))
|
||||
);
|
||||
|
||||
assertValues(
|
||||
|
@ -150,7 +150,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-04-02T18/2011-04-03", "1", 2),
|
||||
createExpected("2011-04-03/2011-04-04T01", "1", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-02T18/2011-04-04T01"))
|
||||
timeline.lookup(Intervals.of("2011-04-02T18/2011-04-04T01"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -161,21 +161,21 @@ public class VersionedIntervalTimelineTest
|
|||
Collections.singletonList(
|
||||
createExpected("2011-05-01/2011-05-09", "4", 9)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMay2() throws Exception
|
||||
{
|
||||
Assert.assertNotNull(timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(1)));
|
||||
Assert.assertNotNull(timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(1)));
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
createExpected("2011-05-01/2011-05-03", "2", 7),
|
||||
createExpected("2011-05-03/2011-05-04", "3", 8),
|
||||
createExpected("2011-05-04/2011-05-05", "2", 7)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -184,25 +184,25 @@ public class VersionedIntervalTimelineTest
|
|||
{
|
||||
Assert.assertEquals(
|
||||
makeSingle(9),
|
||||
timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(9))
|
||||
timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(9))
|
||||
);
|
||||
Assert.assertEquals(
|
||||
makeSingle(7),
|
||||
timeline.remove(new Interval("2011-05-01/2011-05-05"), "2", makeSingle(7))
|
||||
timeline.remove(Intervals.of("2011-05-01/2011-05-05"), "2", makeSingle(7))
|
||||
);
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
createExpected("2011-05-01/2011-05-02", "1", 6),
|
||||
createExpected("2011-05-03/2011-05-04", "3", 8)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInsertInWrongOrder() throws Exception
|
||||
{
|
||||
DateTime overallStart = new DateTime().minus(Hours.TWO);
|
||||
DateTime overallStart = DateTimes.nowUtc().minus(Hours.TWO);
|
||||
|
||||
Assert.assertTrue(
|
||||
"These timestamps have to be at the end AND include now for this test to work.",
|
||||
|
@ -241,32 +241,32 @@ public class VersionedIntervalTimelineTest
|
|||
{
|
||||
Assert.assertEquals(
|
||||
new ImmutablePartitionHolder<Integer>(new PartitionHolder<Integer>(makeSingle(1))),
|
||||
timeline.findEntry(new Interval("2011-10-01/2011-10-02"), "1")
|
||||
timeline.findEntry(Intervals.of("2011-10-01/2011-10-02"), "1")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
new ImmutablePartitionHolder<Integer>(new PartitionHolder<Integer>(makeSingle(1))),
|
||||
timeline.findEntry(new Interval("2011-10-01/2011-10-01T10"), "1")
|
||||
timeline.findEntry(Intervals.of("2011-10-01/2011-10-01T10"), "1")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
new ImmutablePartitionHolder<Integer>(new PartitionHolder<Integer>(makeSingle(1))),
|
||||
timeline.findEntry(new Interval("2011-10-01T02/2011-10-02"), "1")
|
||||
timeline.findEntry(Intervals.of("2011-10-01T02/2011-10-02"), "1")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
new ImmutablePartitionHolder<Integer>(new PartitionHolder<Integer>(makeSingle(1))),
|
||||
timeline.findEntry(new Interval("2011-10-01T04/2011-10-01T17"), "1")
|
||||
timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-01T17"), "1")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
null,
|
||||
timeline.findEntry(new Interval("2011-10-01T04/2011-10-01T17"), "2")
|
||||
timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-01T17"), "2")
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
null,
|
||||
timeline.findEntry(new Interval("2011-10-01T04/2011-10-02T17"), "1")
|
||||
timeline.findEntry(Intervals.of("2011-10-01T04/2011-10-02T17"), "1")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -280,7 +280,7 @@ public class VersionedIntervalTimelineTest
|
|||
|
||||
Assert.assertEquals(
|
||||
new ImmutablePartitionHolder<Integer>(new PartitionHolder<Integer>(makeSingle(1))),
|
||||
timeline.findEntry(new Interval("2011-01-02T02/2011-01-04"), "1")
|
||||
timeline.findEntry(Intervals.of("2011-01-02T02/2011-01-04"), "1")
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -301,7 +301,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-10-04/2011-10-05", "4", 4),
|
||||
createExpected("2011-10-05/2011-10-06", "5", 5)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-10-01/2011-10-06"))
|
||||
timeline.lookup(Intervals.of("2011-10-01/2011-10-06"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -313,14 +313,14 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-10-06/2011-10-07", "6", IntegerPartitionChunk.make(null, 10, 0, 60));
|
||||
assertValues(
|
||||
ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)),
|
||||
timeline.lookup(new Interval("2011-10-05/2011-10-07"))
|
||||
timeline.lookup(Intervals.of("2011-10-05/2011-10-07"))
|
||||
);
|
||||
Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty());
|
||||
|
||||
add("2011-10-06/2011-10-07", "6", IntegerPartitionChunk.make(10, 20, 1, 61));
|
||||
assertValues(
|
||||
ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)),
|
||||
timeline.lookup(new Interval("2011-10-05/2011-10-07"))
|
||||
timeline.lookup(Intervals.of("2011-10-05/2011-10-07"))
|
||||
);
|
||||
Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty());
|
||||
|
||||
|
@ -337,7 +337,7 @@ public class VersionedIntervalTimelineTest
|
|||
)
|
||||
)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-10-05/2011-10-07"))
|
||||
timeline.lookup(Intervals.of("2011-10-05/2011-10-07"))
|
||||
);
|
||||
Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty());
|
||||
}
|
||||
|
@ -368,10 +368,10 @@ public class VersionedIntervalTimelineTest
|
|||
testIncompletePartitionDoesNotOvershadow();
|
||||
|
||||
final IntegerPartitionChunk<Integer> chunk = IntegerPartitionChunk.make(null, 10, 0, 60);
|
||||
Assert.assertEquals(chunk, timeline.remove(new Interval("2011-10-05/2011-10-07"), "6", chunk));
|
||||
Assert.assertEquals(chunk, timeline.remove(Intervals.of("2011-10-05/2011-10-07"), "6", chunk));
|
||||
assertValues(
|
||||
ImmutableList.of(createExpected("2011-10-05/2011-10-06", "5", 5)),
|
||||
timeline.lookup(new Interval("2011-10-05/2011-10-07"))
|
||||
timeline.lookup(Intervals.of("2011-10-05/2011-10-07"))
|
||||
);
|
||||
Assert.assertTrue("Expected no overshadowed entries", timeline.findOvershadowed().isEmpty());
|
||||
}
|
||||
|
@ -384,18 +384,18 @@ public class VersionedIntervalTimelineTest
|
|||
Collections.singletonList(
|
||||
createExpected("2011-05-01/2011-05-09", "5", 10)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
makeSingle(10),
|
||||
timeline.remove(new Interval("2011-05-01/2011-05-10"), "5", makeSingle(10))
|
||||
timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "5", makeSingle(10))
|
||||
);
|
||||
assertValues(
|
||||
Collections.singletonList(
|
||||
createExpected("2011-05-01/2011-05-09", "4", 9)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
|
||||
add("2011-05-01/2011-05-10", "5", 10);
|
||||
|
@ -403,18 +403,18 @@ public class VersionedIntervalTimelineTest
|
|||
Collections.singletonList(
|
||||
createExpected("2011-05-01/2011-05-09", "5", 10)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
makeSingle(9),
|
||||
timeline.remove(new Interval("2011-05-01/2011-05-10"), "4", makeSingle(9))
|
||||
timeline.remove(Intervals.of("2011-05-01/2011-05-10"), "4", makeSingle(9))
|
||||
);
|
||||
assertValues(
|
||||
Collections.singletonList(
|
||||
createExpected("2011-05-01/2011-05-09", "5", 10)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-05-01/2011-05-09"))
|
||||
timeline.lookup(Intervals.of("2011-05-01/2011-05-09"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -454,7 +454,7 @@ public class VersionedIntervalTimelineTest
|
|||
Collections.singletonList(
|
||||
createExpected("2011-01-01/2011-01-10", "2", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-10"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-10"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -475,7 +475,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "2", 3),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -496,7 +496,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "2", 3),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -515,7 +515,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-05", "1", 1),
|
||||
createExpected("2011-01-05/2011-01-15", "2", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-15"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-15"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -534,7 +534,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-05", "1", 1),
|
||||
createExpected("2011-01-05/2011-01-15", "2", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-15"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-15"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -553,7 +553,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-10", "2", 1),
|
||||
createExpected("2011-01-10/2011-01-15", "1", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-15"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-15"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -572,7 +572,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-10", "2", 1),
|
||||
createExpected("2011-01-10/2011-01-15", "1", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-15"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-15"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -592,7 +592,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "2", 3),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -612,7 +612,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "2", 3),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -632,7 +632,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-15", "2", 3),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -652,7 +652,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-15", "2", 3),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -674,7 +674,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-03/2011-01-06", "3", 3),
|
||||
createExpected("2011-01-06/2011-01-20", "2", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -696,7 +696,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-03/2011-01-06", "3", 3),
|
||||
createExpected("2011-01-06/2011-01-20", "2", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -718,7 +718,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-03/2011-01-06", "3", 3),
|
||||
createExpected("2011-01-06/2011-01-20", "2", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -741,7 +741,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-06/2011-01-10", "2", 2),
|
||||
createExpected("2011-01-10/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -764,7 +764,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-06/2011-01-10", "2", 2),
|
||||
createExpected("2011-01-10/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -787,7 +787,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-06/2011-01-10", "2", 2),
|
||||
createExpected("2011-01-10/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -811,7 +811,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-13/2011-01-15", "1", 2),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -835,7 +835,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-13/2011-01-15", "1", 2),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -855,7 +855,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-10", "2", 2),
|
||||
createExpected("2011-01-10/2011-01-20", "2", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -875,7 +875,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-10", "2", 2),
|
||||
createExpected("2011-01-10/2011-01-20", "2", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -897,7 +897,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "2", 2),
|
||||
createExpected("2011-01-15/2011-01-25", "3", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-25"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-25"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -919,7 +919,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "2", 2),
|
||||
createExpected("2011-01-15/2011-01-25", "3", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-25"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-25"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -941,7 +941,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-13", "1", 1),
|
||||
createExpected("2011-01-13/2011-01-20", "2", 2)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -961,7 +961,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01/2011-01-15", "2", 2),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -983,7 +983,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-13/2011-01-17", "2", 3),
|
||||
createExpected("2011-01-17/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1005,7 +1005,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-13/2011-01-17", "2", 3),
|
||||
createExpected("2011-01-17/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1026,7 +1026,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "1", 1),
|
||||
createExpected("2011-01-15/2011-01-20", "2", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1047,7 +1047,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-15", "1", 1),
|
||||
createExpected("2011-01-15/2011-01-20", "2", 3)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1072,7 +1072,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-04/2011-01-05", "3", 3),
|
||||
createExpected("2011-01-05/2011-01-06", "4", 4)
|
||||
),
|
||||
timeline.lookup(new Interval("0000-01-01/3000-01-01"))
|
||||
timeline.lookup(Intervals.of("0000-01-01/3000-01-01"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1093,7 +1093,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-01T12/2011-01-02", "3", 3),
|
||||
createExpected("2011-01-02/3011-01-03", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/3011-01-03"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/3011-01-03"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1175,13 +1175,13 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-01-01/2011-01-20", "1", 1);
|
||||
add("2011-01-10/2011-01-15", "2", 2);
|
||||
|
||||
timeline.remove(new Interval("2011-01-10/2011-01-15"), "2", makeSingle(2));
|
||||
timeline.remove(Intervals.of("2011-01-10/2011-01-15"), "2", makeSingle(2));
|
||||
|
||||
assertValues(
|
||||
Collections.singletonList(
|
||||
createExpected("2011-01-01/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1194,7 +1194,7 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-01-10/2011-01-20", "2", 2);
|
||||
add("2011-01-20/2011-01-30", "3", 4);
|
||||
|
||||
timeline.remove(new Interval("2011-01-10/2011-01-20"), "2", makeSingle(2));
|
||||
timeline.remove(Intervals.of("2011-01-10/2011-01-20"), "2", makeSingle(2));
|
||||
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
|
@ -1202,7 +1202,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-20/2011-01-30", "3", 4)
|
||||
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-30"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-30"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1215,15 +1215,15 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-01-02/2011-01-03", "2", 2);
|
||||
add("2011-01-10/2011-01-14", "2", 3);
|
||||
|
||||
timeline.remove(new Interval("2011-01-02/2011-01-03"), "2", makeSingle(2));
|
||||
timeline.remove(new Interval("2011-01-10/2011-01-14"), "2", makeSingle(3));
|
||||
timeline.remove(Intervals.of("2011-01-02/2011-01-03"), "2", makeSingle(2));
|
||||
timeline.remove(Intervals.of("2011-01-10/2011-01-14"), "2", makeSingle(3));
|
||||
|
||||
assertValues(
|
||||
Collections.singletonList(
|
||||
createExpected("2011-01-01/2011-01-20", "1", 1)
|
||||
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1236,7 +1236,7 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-01-10/2011-01-15", "2", 2);
|
||||
add("2011-01-15/2011-01-20", "2", 3);
|
||||
|
||||
timeline.remove(new Interval("2011-01-15/2011-01-20"), "2", makeSingle(3));
|
||||
timeline.remove(Intervals.of("2011-01-15/2011-01-20"), "2", makeSingle(3));
|
||||
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
|
@ -1244,7 +1244,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-10/2011-01-15", "2", 2),
|
||||
createExpected("2011-01-15/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1255,14 +1255,14 @@ public class VersionedIntervalTimelineTest
|
|||
|
||||
add("2011-01-01/2011-01-20", "1", 1);
|
||||
add("2011-01-10/2011-01-15", "2", 2);
|
||||
timeline.remove(new Interval("2011-01-10/2011-01-15"), "2", makeSingle(2));
|
||||
timeline.remove(Intervals.of("2011-01-10/2011-01-15"), "2", makeSingle(2));
|
||||
add("2011-01-01/2011-01-20", "1", 1);
|
||||
|
||||
assertValues(
|
||||
Collections.singletonList(
|
||||
createExpected("2011-01-01/2011-01-20", "1", 1)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-01-01/2011-01-20"))
|
||||
timeline.lookup(Intervals.of("2011-01-01/2011-01-20"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1271,11 +1271,11 @@ public class VersionedIntervalTimelineTest
|
|||
{
|
||||
Assert.assertNull(
|
||||
"Don't have it, should be null",
|
||||
timeline.remove(new Interval("1970-01-01/2025-04-20"), "1", makeSingle(1))
|
||||
timeline.remove(Intervals.of("1970-01-01/2025-04-20"), "1", makeSingle(1))
|
||||
);
|
||||
Assert.assertNull(
|
||||
"Don't have it, should be null",
|
||||
timeline.remove(new Interval("2011-04-01/2011-04-09"), "version does not exist", makeSingle(1))
|
||||
timeline.remove(Intervals.of("2011-04-01/2011-04-09"), "version does not exist", makeSingle(1))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1289,7 +1289,7 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-01-10/2011-01-15", "3", 3);
|
||||
add("2011-01-15/2011-01-20", "4", 4);
|
||||
|
||||
timeline.remove(new Interval("2011-01-15/2011-01-20"), "4", makeSingle(4));
|
||||
timeline.remove(Intervals.of("2011-01-15/2011-01-20"), "4", makeSingle(4));
|
||||
|
||||
assertValues(
|
||||
Arrays.asList(
|
||||
|
@ -1297,7 +1297,7 @@ public class VersionedIntervalTimelineTest
|
|||
createExpected("2011-01-05/2011-01-10", "2", 2),
|
||||
createExpected("2011-01-10/2011-01-15", "3", 3)
|
||||
),
|
||||
timeline.lookup(new Interval(new DateTime(0), new DateTime(JodaUtils.MAX_INSTANT)))
|
||||
timeline.lookup(new Interval(DateTimes.EPOCH, DateTimes.MAX))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1504,7 +1504,7 @@ public class VersionedIntervalTimelineTest
|
|||
|
||||
add("2011-04-01/2011-04-09", "1", 1);
|
||||
|
||||
Assert.assertTrue(timeline.lookup(Interval.parse("1970/1980")).isEmpty());
|
||||
Assert.assertTrue(timeline.lookup(Intervals.of("1970/1980")).isEmpty());
|
||||
}
|
||||
|
||||
// https://github.com/druid-io/druid/issues/3010
|
||||
|
@ -1526,7 +1526,7 @@ public class VersionedIntervalTimelineTest
|
|||
)
|
||||
)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-02"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-02"))
|
||||
);
|
||||
|
||||
add("2011-04-01/2011-04-02", "3", IntegerPartitionChunk.make(null, 1, 0, 110));
|
||||
|
@ -1540,7 +1540,7 @@ public class VersionedIntervalTimelineTest
|
|||
)
|
||||
)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-02"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-02"))
|
||||
);
|
||||
assertValues(
|
||||
Sets.newHashSet(
|
||||
|
@ -1564,7 +1564,7 @@ public class VersionedIntervalTimelineTest
|
|||
)
|
||||
)
|
||||
),
|
||||
timeline.lookup(new Interval("2011-04-01/2011-04-02"))
|
||||
timeline.lookup(Intervals.of("2011-04-01/2011-04-02"))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -1579,58 +1579,58 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-04-15/2011-04-17", "1", new SingleElementPartitionChunk<Integer>(1));
|
||||
add("2011-04-17/2011-04-19", "1", new SingleElementPartitionChunk<Integer>(1));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-03"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-05"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-06"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-07"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-08"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-30"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-03"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-05"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-06"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-07"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-08"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "2"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "2"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "2"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "2"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "1"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "2"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "2"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "2"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "2"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-30"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-07/2011-04-30"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-07/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-08/2011-04-30"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-08/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-09/2011-04-30"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-09/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-16"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-17"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-18"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-30"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-16"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-17"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-18"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-19/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-21/2011-04-22"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-19/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-21/2011-04-22"), "0"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -1645,77 +1645,77 @@ public class VersionedIntervalTimelineTest
|
|||
add("2011-04-17/2011-04-21", "11", new SingleElementPartitionChunk<Integer>(1));
|
||||
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-03"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-05"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-06"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-07"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-08"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-11"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-01/2011-04-30"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-03"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-05"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-06"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-07"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-08"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-09"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-10"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-11"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-01/2011-04-30"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "12"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "12"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-06"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-07"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-08"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-09"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-10"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-11"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-06"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-07"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-08"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-09"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-10"), "13"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-11"), "13"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-12"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-16"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-12"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-16"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-18"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-05/2011-04-22"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-18"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-05/2011-04-22"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-09"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-10"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-11"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-07"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-08"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-09"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-10"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-11"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-12"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-16"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-12"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-16"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-18"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-06/2011-04-22"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-18"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-06/2011-04-22"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-16"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-15"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-16"), "0"));
|
||||
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-18"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-12/2011-04-22"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-17"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-18"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-19"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-20"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-12/2011-04-22"), "0"));
|
||||
|
||||
Assert.assertTrue(timeline.isOvershadowed(new Interval("2011-04-15/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(new Interval("2011-04-21/2011-04-22"), "0"));
|
||||
Assert.assertTrue(timeline.isOvershadowed(Intervals.of("2011-04-15/2011-04-21"), "0"));
|
||||
Assert.assertFalse(timeline.isOvershadowed(Intervals.of("2011-04-21/2011-04-22"), "0"));
|
||||
}
|
||||
|
||||
private Pair<Interval, Pair<String, PartitionHolder<Integer>>> createExpected(
|
||||
|
@ -1738,7 +1738,7 @@ public class VersionedIntervalTimelineTest
|
|||
)
|
||||
{
|
||||
return Pair.of(
|
||||
new Interval(intervalString),
|
||||
Intervals.of(intervalString),
|
||||
Pair.of(version, new PartitionHolder<Integer>(values))
|
||||
);
|
||||
}
|
||||
|
@ -1750,17 +1750,17 @@ public class VersionedIntervalTimelineTest
|
|||
|
||||
private void add(String interval, String version, Integer value)
|
||||
{
|
||||
add(new Interval(interval), version, value);
|
||||
add(Intervals.of(interval), version, value);
|
||||
}
|
||||
|
||||
private void add(Interval interval, String version, Integer value)
|
||||
{
|
||||
add(new Interval(interval), version, makeSingle(value));
|
||||
add(interval, version, makeSingle(value));
|
||||
}
|
||||
|
||||
private void add(String interval, String version, PartitionChunk<Integer> value)
|
||||
{
|
||||
add(new Interval(interval), version, value);
|
||||
add(Intervals.of(interval), version, value);
|
||||
}
|
||||
|
||||
private void add(Interval interval, String version, PartitionChunk<Integer> value)
|
||||
|
|
|
@ -32,10 +32,11 @@ digraph g {
|
|||
peon_12[label = "peon"]
|
||||
peon_13[label = "peon"]
|
||||
|
||||
mm1 -> peon_11 [label = "new_task"]
|
||||
mm1 -> { peon_12; peon_13 }
|
||||
mm1 -> {peon_11;peon_12}
|
||||
mm1 -> peon_13 [label = "new_task"]
|
||||
mm1 -> peon_13:e [label = "new_task_status" dir=back]
|
||||
}
|
||||
|
||||
zk_status:new_task:s -> peon_11:e [label = "new_task_status" dir = back]
|
||||
zk_status:new_task:s -> mm1:e [label = "new_task_status" dir = back]
|
||||
overlord:e -> zk_status:new_task:n [dir=back label="new_task_status"]
|
||||
}
|
||||
|
|
|
@ -71,7 +71,6 @@ The indexing service also uses its own set of paths. These configs can be includ
|
|||
|`druid.zk.paths.indexer.announcementsPath`|Middle managers announce themselves here.|`${druid.zk.paths.indexer.base}/announcements`|
|
||||
|`druid.zk.paths.indexer.tasksPath`|Used to assign tasks to middle managers.|`${druid.zk.paths.indexer.base}/tasks`|
|
||||
|`druid.zk.paths.indexer.statusPath`|Parent path for announcement of task statuses.|`${druid.zk.paths.indexer.base}/status`|
|
||||
|`druid.zk.paths.indexer.leaderLatchPath`|Used for Overlord leader election.|`${druid.zk.paths.indexer.base}/leaderLatchPath`|
|
||||
|
||||
If `druid.zk.paths.base` and `druid.zk.paths.indexer.base` are both set, and none of the other `druid.zk.paths.*` or `druid.zk.paths.indexer.*` values are set, then the other properties will be evaluated relative to their respective `base`.
|
||||
For example, if `druid.zk.paths.base` is set to `/druid1` and `druid.zk.paths.indexer.base` is set to `/druid2` then `druid.zk.paths.announcementsPath` will default to `/druid1/announcements` while `druid.zk.paths.indexer.announcementsPath` will default to `/druid2/announcements`.
|
||||
|
|
|
@ -156,7 +156,7 @@ A sample worker config spec is shown below:
|
|||
```json
|
||||
{
|
||||
"selectStrategy": {
|
||||
"type": "fillCapacityWithAffinity",
|
||||
"type": "fillCapacity",
|
||||
"affinityConfig": {
|
||||
"affinity": {
|
||||
"datasource1": ["host1:port", "host2:port"],
|
||||
|
@ -193,7 +193,7 @@ Issuing a GET request at the same URL will return the current worker config spec
|
|||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`selectStrategy`|How to assign tasks to middle managers. Choices are `fillCapacity`, `fillCapacityWithAffinity`, `equalDistribution`, `equalDistributionWithAffinity` and `javascript`.|equalDistribution|
|
||||
|`selectStrategy`|How to assign tasks to middle managers. Choices are `fillCapacity`, `equalDistribution`, and `javascript`.|equalDistribution|
|
||||
|`autoScaler`|Only used if autoscaling is enabled. See below.|null|
|
||||
|
||||
To view the audit history of worker config issue a GET request to the URL -
|
||||
|
@ -212,48 +212,31 @@ http://<OVERLORD_IP>:<port>/druid/indexer/v1/worker/history?count=<n>
|
|||
|
||||
#### Worker Select Strategy
|
||||
|
||||
##### Fill Capacity
|
||||
|
||||
Workers are assigned tasks until capacity.
|
||||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`type`|`fillCapacity`.|required; must be `fillCapacity`|
|
||||
|
||||
Note that, if `druid.indexer.runner.pendingTasksRunnerNumThreads` is set to n (> 1) then it means to fill n workers upto capacity simultaneously and then moving on.
|
||||
|
||||
##### Fill Capacity With Affinity
|
||||
|
||||
An affinity config can be provided.
|
||||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`type`|`fillCapacityWithAffinity`.|required; must be `fillCapacityWithAffinity`|
|
||||
|`affinity`|JSON object mapping a datasource String name to a list of indexing service middle manager host:port String values. Druid doesn't perform DNS resolution, so the 'host' value must match what is configured on the middle manager and what the middle manager announces itself as (examine the Overlord logs to see what your middle manager announces itself as).|{}|
|
||||
|
||||
Tasks will try to be assigned to preferred workers. Fill capacity strategy is used if no preference for a datasource specified.
|
||||
|
||||
Note that, if `druid.indexer.runner.pendingTasksRunnerNumThreads` is set to n (> 1) then it means to fill n preferred workers upto capacity simultaneously and then moving on.
|
||||
Worker select strategies control how Druid assigns tasks to middleManagers.
|
||||
|
||||
##### Equal Distribution
|
||||
|
||||
The workers with the least amount of tasks is assigned the task.
|
||||
Tasks are assigned to the middleManager with the most available capacity at the time the task begins running. This is
|
||||
useful if you want work evenly distributed across your middleManagers.
|
||||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`type`|`equalDistribution`.|required; must be `equalDistribution`|
|
||||
|`affinityConfig`|[Affinity config](#affinity) object|null (no affinity)|
|
||||
|
||||
##### Equal Distribution With Affinity
|
||||
##### Fill Capacity
|
||||
|
||||
An affinity config can be provided.
|
||||
Tasks are assigned to the worker with the most currently-running tasks at the time the task begins running. This is
|
||||
useful in situations where you are elastically auto-scaling middleManagers, since it will tend to pack some full and
|
||||
leave others empty. The empty ones can be safely terminated.
|
||||
|
||||
Note that if `druid.indexer.runner.pendingTasksRunnerNumThreads` is set to _N_ > 1, then this strategy will fill _N_
|
||||
middleManagers up to capacity simultaneously, rather than a single middleManager.
|
||||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`type`|`equalDistributionWithAffinity`.|required; must be `equalDistributionWithAffinity`|
|
||||
|`affinity`|Exactly same with `fillCapacityWithAffinity` 's affinity.|{}|
|
||||
|
||||
Tasks will try to be assigned to preferred workers. Equal Distribution strategy is used if no preference for a datasource specified.
|
||||
|
||||
|`type`|`fillCapacity`.|required; must be `fillCapacity`|
|
||||
|`affinityConfig`|[Affinity config](#affinity) object|null (no affinity)|
|
||||
|
||||
##### Javascript
|
||||
|
||||
|
@ -263,7 +246,6 @@ It can be used for rapid development of missing features where the worker select
|
|||
If the selection logic is quite complex and cannot be easily tested in javascript environment,
|
||||
its better to write a druid extension module with extending current worker selection strategies written in java.
|
||||
|
||||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`type`|`javascript`.|required; must be `javascript`|
|
||||
|
@ -282,6 +264,16 @@ Example: a function that sends batch_index_task to workers 10.0.0.1 and 10.0.0.2
|
|||
JavaScript-based functionality is disabled by default. Please refer to the Druid <a href="../development/javascript.html">JavaScript programming guide</a> for guidelines about using Druid's JavaScript functionality, including instructions on how to enable it.
|
||||
</div>
|
||||
|
||||
##### Affinity
|
||||
|
||||
Affinity configs can be provided to the _equalDistribution_ and _fillCapacity_ strategies using the "affinityConfig"
|
||||
field. If not provided, the default is to not use affinity at all.
|
||||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`affinity`|JSON object mapping a datasource String name to a list of indexing service middleManager host:port String values. Druid doesn't perform DNS resolution, so the 'host' value must match what is configured on the middleManager and what the middleManager announces itself as (examine the Overlord logs to see what your middleManager announces itself as).|{}|
|
||||
|`strong`|With weak affinity (the default), tasks for a dataSource may be assigned to other middleManagers if their affinity-mapped middleManagers are not able to run all pending tasks in the queue for that dataSource. With strong affinity, tasks for a dataSource will only ever be assigned to their affinity-mapped middleManagers, and will wait in the pending queue if necessary.|false|
|
||||
|
||||
#### Autoscaler
|
||||
|
||||
Amazon's EC2 is currently the only supported autoscaler.
|
||||
|
@ -300,7 +292,7 @@ Middle managers pass their configurations down to their child peons. The middle
|
|||
|
||||
|Property|Description|Default|
|
||||
|--------|-----------|-------|
|
||||
|`druid.indexer.runner.allowedPrefixes`|Whitelist of prefixes for configs that can be passed down to child peons.|"com.metamx", "druid", "io.druid", "user.timezone","file.encoding"|
|
||||
|`druid.indexer.runner.allowedPrefixes`|Whitelist of prefixes for configs that can be passed down to child peons.|"com.metamx", "druid", "io.druid", "user.timezone", "file.encoding", "java.io.tmpdir", "hadoop"|
|
||||
|`druid.indexer.runner.compressZnodes`|Indicates whether or not the middle managers should compress Znodes.|true|
|
||||
|`druid.indexer.runner.classpath`|Java classpath for the peon.|System.getProperty("java.class.path")|
|
||||
|`druid.indexer.runner.javaCommand`|Command required to execute java.|java|
|
||||
|
|
|
@ -9,7 +9,8 @@ To use this extension, make sure to [include](../../operations/including-extensi
|
|||
## Introduction
|
||||
|
||||
This extension emits druid metrics to a graphite carbon server.
|
||||
Events are sent after been [pickled](http://graphite.readthedocs.org/en/latest/feeding-carbon.html#the-pickle-protocol); the size of the batch is configurable.
|
||||
Metrics can be sent by using [plaintext](http://graphite.readthedocs.io/en/latest/feeding-carbon.html#the-plaintext-protocol) or [pickle](http://graphite.readthedocs.io/en/latest/feeding-carbon.html#the-pickle-protocol) protocol.
|
||||
The pickle protocol is more efficient and supports sending batches of metrics (plaintext protocol send only one metric) in one request; batch size is configurable.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
@ -19,8 +20,9 @@ All the configuration parameters for graphite emitter are under `druid.emitter.g
|
|||
|--------|-----------|---------|-------|
|
||||
|`druid.emitter.graphite.hostname`|The hostname of the graphite server.|yes|none|
|
||||
|`druid.emitter.graphite.port`|The port of the graphite server.|yes|none|
|
||||
|`druid.emitter.graphite.batchSize`|Number of events to send as one batch.|no|100|
|
||||
|`druid.emitter.graphite.eventConverter`| Filter and converter of druid events to graphite event(please see next section). |yes|none|
|
||||
|`druid.emitter.graphite.batchSize`|Number of events to send as one batch (only for pickle protocol)|no|100|
|
||||
|`druid.emitter.graphite.protocol`|Graphite protocol; available protocols: pickle, plaintext.|no|pickle|
|
||||
|`druid.emitter.graphite.eventConverter`| Filter and converter of druid events to graphite event (please see next section).|yes|none|
|
||||
|`druid.emitter.graphite.flushPeriod` | Queue flushing period in milliseconds. |no|1 minute|
|
||||
|`druid.emitter.graphite.maxQueueSize`| Maximum size of the queue used to buffer events. |no|`MAX_INT`|
|
||||
|`druid.emitter.graphite.alertEmitters`| List of emitters where alerts will be forwarded to. |no| empty list (no forwarding)|
|
||||
|
@ -52,10 +54,15 @@ The path will be in the form `<namespacePrefix>.[<druid service name>].[<druid h
|
|||
User has control of `<namespacePrefix>.[<druid service name>].[<druid hostname>].`
|
||||
|
||||
You can omit the hostname by setting `ignoreHostname=true`
|
||||
`druid.SERVICE_NAME.dataSourceName.queryType.query.time`
|
||||
`druid.SERVICE_NAME.dataSourceName.queryType.query/time`
|
||||
|
||||
You can omit the service name by setting `ignoreServiceName=true`
|
||||
`druid.HOSTNAME.dataSourceName.queryType.query.time`
|
||||
`druid.HOSTNAME.dataSourceName.queryType.query/time`
|
||||
|
||||
Elements in metric name by default are separated by "/", so graphite will create all metrics on one level. If you want to have metrics in the tree structure, you have to set `replaceSlashWithDot=true`
|
||||
Original: `druid.HOSTNAME.dataSourceName.queryType.query/time`
|
||||
Changed: `druid.HOSTNAME.dataSourceName.queryType.query.time`
|
||||
|
||||
|
||||
```json
|
||||
|
||||
|
@ -70,7 +77,7 @@ Same as for the `all` converter user has control of `<namespacePrefix>.[<druid s
|
|||
White-list based converter comes with the following default white list map located under resources in `./src/main/resources/defaultWhiteListMap.json`
|
||||
|
||||
Although user can override the default white list map by supplying a property called `mapPath`.
|
||||
This property is a String containing the path for the file containing **white list map Json object**.
|
||||
This property is a String containing the path for the file containing **white list map Json object**.
|
||||
For example the following converter will read the map from the file `/pathPrefix/fileName.json`.
|
||||
|
||||
```json
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
---
|
||||
layout: doc_page
|
||||
---
|
||||
|
||||
Druid Redis Cache
|
||||
--------------------
|
||||
|
||||
A cache implementation for Druid based on [Redis](https://github.com/antirez/redis).
|
||||
|
||||
# Configuration
|
||||
Below are the configuration options known to this module:
|
||||
|
||||
|`runtime.properties`|Description|Default|Required|
|
||||
|--------------------|-----------|-------|--------|
|
||||
|`druid.cache.host`|Redis server host|None|yes|
|
||||
|`druid.cache.port`|Redis server port|None|yes|
|
||||
|`druid.cache.expiration`|Expiration(in milliseconds) for cache entries|24 * 3600 * 1000|no|
|
||||
|`druid.cache.timeout`|Timeout(in milliseconds) for get cache entries from Redis|2000|no|
|
||||
|`druid.cache.maxTotalConnections`|Max total connections to Redis|8|no|
|
||||
|`druid.cache.maxIdleConnections`|Max idle connections to Redis|8|no|
|
||||
|`druid.cache.minIdleConnections`|Min idle connections to Redis|0|no|
|
||||
|
||||
# Enabling
|
||||
|
||||
To enable the redis cache, include this module on the loadList and set `druid.cache.type` to `redis` in your properties.
|
||||
|
||||
# Metrics
|
||||
In addition to the normal cache metrics, the redis cache implementation also reports the following in both `total` and `delta`
|
||||
|
||||
|Metric|Description|Normal value|
|
||||
|------|-----------|------------|
|
||||
|`query/cache/redis/*/requests`|Count of requests to redis cache|whatever request to redis will increase request count by 1|
|
|
@ -61,6 +61,7 @@ All of these community extensions can be downloaded using *pull-deps* with the c
|
|||
|druid-orc-extensions|Support for data in Apache Orc data format.|[link](../development/extensions-contrib/orc.html)|
|
||||
|druid-parquet-extensions|Support for data in Apache Parquet data format. Requires druid-avro-extensions to be loaded.|[link](../development/extensions-contrib/parquet.html)|
|
||||
|druid-rabbitmq|RabbitMQ firehose.|[link](../development/extensions-contrib/rabbitmq.html)|
|
||||
|druid-redis-cache|A cache implementation for Druid based on Redis.|[link](../development/extensions-contrib/redis-cache.html)|
|
||||
|druid-rocketmq|RocketMQ firehose.|[link](../development/extensions-contrib/rocketmq.html)|
|
||||
|druid-time-min-max|Min/Max aggregator for timestamp.|[link](../development/extensions-contrib/time-min-max.html)|
|
||||
|druid-google-extensions|Google Cloud Storage deep storage.|[link](../development/extensions-contrib/google.html)|
|
||||
|
|
|
@ -10,12 +10,21 @@ Druid uses a module system that allows for the addition of extensions at runtime
|
|||
|
||||
Druid's extensions leverage Guice in order to add things at runtime. Basically, Guice is a framework for Dependency Injection, but we use it to hold the expected object graph of the Druid process. Extensions can make any changes they want/need to the object graph via adding Guice bindings. While the extensions actually give you the capability to change almost anything however you want, in general, we expect people to want to extend one of the things listed below. This means that we honor our [versioning strategy](./versioning.html) for changes that affect the interfaces called out on this page, but other interfaces are deemed "internal" and can be changed in an incompatible manner even between patch releases.
|
||||
|
||||
1. Add a new deep storage implementation
|
||||
1. Add a new Firehose
|
||||
1. Add Aggregators
|
||||
1. Add Complex metrics
|
||||
1. Add new Query types
|
||||
1. Add new Jersey resources
|
||||
1. Add a new deep storage implementation by extending the `io.druid.segment.loading.DataSegment*` and
|
||||
`io.druid.tasklogs.TaskLog*` classes.
|
||||
1. Add a new Firehose by extending `io.druid.data.input.FirehoseFactory`.
|
||||
1. Add a new input parser by extending `io.druid.data.input.impl.InputRowParser`.
|
||||
1. Add a new string-based input format by extending `io.druid.data.input.impl.ParseSpec`.
|
||||
1. Add Aggregators by extending `io.druid.query.aggregation.AggregatorFactory`, `io.druid.query.aggregation.Aggregator`,
|
||||
and `io.druid.query.aggregation.BufferAggregator`.
|
||||
1. Add PostAggregators by extending `io.druid.query.aggregation.PostAggregator`.
|
||||
1. Add ExtractionFns by extending `io.druid.query.extraction.ExtractionFn`.
|
||||
1. Add Complex metrics by extending `io.druid.segment.serde.ComplexMetricsSerde`.
|
||||
1. Add new Query types by extending `io.druid.query.QueryRunnerFactory`, `io.druid.query.QueryToolChest`, and
|
||||
`io.druid.query.Query`.
|
||||
1. Add new Jersey resources by calling `Jerseys.addResource(binder, clazz)`.
|
||||
1. Add new Jetty filters by extending `io.druid.server.initialization.jetty.ServletFilterHolder`.
|
||||
1. Add new secret providers by extending `io.druid.metadata.PasswordProvider`.
|
||||
1. Bundle your extension with all the other Druid extensions
|
||||
|
||||
Extensions are added to the system via an implementation of `io.druid.initialization.DruidModule`.
|
||||
|
|
|
@ -227,12 +227,17 @@ instead of the cardinality aggregator if you do not care about the individual va
|
|||
"type": "cardinality",
|
||||
"name": "<output_name>",
|
||||
"fields": [ <dimension1>, <dimension2>, ... ],
|
||||
"byRow": <false | true> # (optional, defaults to false)
|
||||
"byRow": <false | true> # (optional, defaults to false),
|
||||
"round": <false | true> # (optional, defaults to false)
|
||||
}
|
||||
```
|
||||
|
||||
Each individual element of the "fields" list can be a String or [DimensionSpec](../querying/dimensionspecs.html). A String dimension in the fields list is equivalent to a DefaultDimensionSpec (no transformations).
|
||||
|
||||
The HyperLogLog algorithm generates decimal estimates with some error. "round" can be set to true to round off estimated
|
||||
values to whole numbers. Note that even with rounding, the cardinality is still an estimate. The "round" field only
|
||||
affects query-time behavior, and is ignored at ingestion-time.
|
||||
|
||||
#### Cardinality by value
|
||||
|
||||
When setting `byRow` to `false` (the default) it computes the cardinality of the set composed of the union of all dimension values for all the given dimensions.
|
||||
|
@ -315,12 +320,17 @@ Uses [HyperLogLog](http://algo.inria.fr/flajolet/Publications/FlFuGaMe07.pdf) to
|
|||
"type" : "hyperUnique",
|
||||
"name" : <output_name>,
|
||||
"fieldName" : <metric_name>,
|
||||
"isInputHyperUnique" : false
|
||||
"isInputHyperUnique" : false,
|
||||
"round" : false
|
||||
}
|
||||
```
|
||||
|
||||
isInputHyperUnique can be set to true to index pre-computed HLL (Base64 encoded output from druid-hll is expected).
|
||||
The isInputHyperUnique field only affects ingestion-time behavior, and is ignored at query time.
|
||||
"isInputHyperUnique" can be set to true to index pre-computed HLL (Base64 encoded output from druid-hll is expected).
|
||||
The "isInputHyperUnique" field only affects ingestion-time behavior, and is ignored at query-time.
|
||||
|
||||
The HyperLogLog algorithm generates decimal estimates with some error. "round" can be set to true to round off estimated
|
||||
values to whole numbers. Note that even with rounding, the cardinality is still an estimate. The "round" field only
|
||||
affects query-time behavior, and is ignored at ingestion-time.
|
||||
|
||||
For more approximate aggregators, please see [theta sketches](../development/extensions-core/datasketches-aggregators.html).
|
||||
|
||||
|
|
|
@ -36,16 +36,26 @@ postAggregation : {
|
|||
}
|
||||
```
|
||||
|
||||
### Field accessor post-aggregator
|
||||
### Field accessor post-aggregators
|
||||
|
||||
This returns the value produced by the specified [aggregator](../querying/aggregations.html).
|
||||
These post-aggregators return the value produced by the specified [aggregator](../querying/aggregations.html).
|
||||
|
||||
`fieldName` refers to the output name of the aggregator given in the [aggregations](../querying/aggregations.html) portion of the query.
|
||||
For complex aggregators, like "cardinality" and "hyperUnique", the `type` of the post-aggregator determines what
|
||||
the post-aggregator will return. Use type "fieldAccess" to return the raw aggregation object, or use type
|
||||
"finalizingFieldAccess" to return a finalized value, such as an estimated cardinality.
|
||||
|
||||
```json
|
||||
{ "type" : "fieldAccess", "name": <output_name>, "fieldName" : <aggregator_name> }
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```json
|
||||
{ "type" : "finalizingFieldAccess", "name": <output_name>, "fieldName" : <aggregator_name> }
|
||||
```
|
||||
|
||||
|
||||
### Constant post-aggregator
|
||||
|
||||
The constant post-aggregator always returns the specified value.
|
||||
|
@ -107,7 +117,11 @@ JavaScript-based functionality is disabled by default. Please refer to the Druid
|
|||
The hyperUniqueCardinality post aggregator is used to wrap a hyperUnique object such that it can be used in post aggregations.
|
||||
|
||||
```json
|
||||
{ "type" : "hyperUniqueCardinality", "name": <output name>, "fieldName" : <the name field value of the hyperUnique aggregator>}
|
||||
{
|
||||
"type" : "hyperUniqueCardinality",
|
||||
"name": <output name>,
|
||||
"fieldName" : <the name field value of the hyperUnique aggregator>
|
||||
}
|
||||
```
|
||||
|
||||
It can be used in a sample calculation as so:
|
||||
|
@ -128,6 +142,10 @@ It can be used in a sample calculation as so:
|
|||
}]
|
||||
```
|
||||
|
||||
This post-aggregator will inherit the rounding behavior of the aggregator it references. Note that this inheritance
|
||||
is only effective if you directly reference an aggregator. Going through another post-aggregator, for example, will
|
||||
cause the user-specified rounding behavior to get lost and default to "no rounding".
|
||||
|
||||
## Example Usage
|
||||
|
||||
In this example, let’s calculate a simple percentage using post aggregators. Let’s imagine our data set has a metric called "total".
|
||||
|
|
|
@ -22,6 +22,7 @@ package io.druid.emitter.ambari.metrics;
|
|||
import com.google.common.collect.Maps;
|
||||
import com.metamx.emitter.service.ServiceMetricEvent;
|
||||
import io.druid.jackson.DefaultObjectMapper;
|
||||
import io.druid.java.util.common.DateTimes;
|
||||
import junitparams.JUnitParamsRunner;
|
||||
import junitparams.Parameters;
|
||||
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
|
||||
|
@ -44,7 +45,7 @@ public class WhiteListBasedDruidToTimelineEventConverterTest
|
|||
new DefaultObjectMapper()
|
||||
);
|
||||
private ServiceMetricEvent event;
|
||||
private final DateTime createdTime = new DateTime();
|
||||
private final DateTime createdTime = DateTimes.nowUtc();
|
||||
private final String hostname = "testHost:8080";
|
||||
private final String serviceName = "historical";
|
||||
private final String defaultNamespace = prefix + "." + serviceName;
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue