1) Move various "api" classes to io.druid packages and make sure things compile and stuff

This commit is contained in:
cheddar 2013-08-28 15:50:59 -05:00
parent fc60158273
commit 9c30ced5ea
297 changed files with 1176 additions and 783 deletions

View File

@ -23,10 +23,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Duration;
import org.joda.time.Interval;

View File

@ -20,7 +20,6 @@
package com.metamx.druid;
import com.google.common.collect.Lists;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.query.filter.AndDimFilter;
import com.metamx.druid.query.filter.DimFilter;
@ -31,13 +30,15 @@ import com.metamx.druid.query.filter.SelectorDimFilter;
import com.metamx.druid.query.search.InsensitiveContainsSearchQuerySpec;
import com.metamx.druid.query.search.SearchQuery;
import com.metamx.druid.query.search.SearchQuerySpec;
import com.metamx.druid.query.segment.LegacySegmentSpec;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.query.timeboundary.TimeBoundaryQuery;
import com.metamx.druid.query.timeseries.TimeseriesQuery;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.SearchResultValue;
import com.metamx.druid.result.TimeBoundaryResultValue;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.spec.LegacySegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.DateTime;
import org.joda.time.Interval;

View File

@ -21,6 +21,7 @@ package com.metamx.druid;
import com.google.common.primitives.Longs;
import com.metamx.druid.result.Result;
import io.druid.granularity.QueryGranularity;
import java.util.Comparator;

View File

@ -26,6 +26,8 @@ import com.metamx.druid.query.search.SearchHit;
import com.metamx.druid.query.search.SearchSortSpec;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.SearchResultValue;
import io.druid.granularity.AllGranularity;
import io.druid.granularity.QueryGranularity;
import java.util.TreeSet;

View File

@ -20,12 +20,12 @@
package com.metamx.druid;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.druid.AllGranularity;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeseriesResultValue;
import io.druid.granularity.AllGranularity;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import java.util.LinkedHashMap;
import java.util.List;

View File

@ -30,9 +30,9 @@ import com.metamx.druid.client.selector.ServerSelector;
import com.metamx.druid.concurrent.Execs;
import com.metamx.druid.guice.annotations.Client;
import com.metamx.druid.partition.PartitionChunk;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChestWarehouse;
import com.metamx.http.client.HttpClient;
import io.druid.query.QueryRunner;
import java.util.Iterator;
import java.util.Map;

View File

@ -38,24 +38,24 @@ import com.metamx.common.guava.BaseSequence;
import com.metamx.common.guava.LazySequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.Query;
import com.metamx.druid.TimelineObjectHolder;
import com.metamx.druid.VersionedIntervalTimeline;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.client.cache.Cache;
import com.metamx.druid.client.selector.QueryableDruidServer;
import com.metamx.druid.client.selector.ServerSelector;
import com.metamx.druid.partition.PartitionChunk;
import com.metamx.druid.query.CacheStrategy;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.QueryToolChestWarehouse;
import com.metamx.druid.query.segment.MultipleSpecificSegmentSpec;
import com.metamx.druid.query.segment.SegmentDescriptor;
import com.metamx.druid.result.BySegmentResultValueClass;
import com.metamx.druid.result.Result;
import com.metamx.emitter.EmittingLogger;
import io.druid.query.CacheStrategy;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.SegmentDescriptor;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.query.spec.MultipleSpecificSegmentSpec;
import org.joda.time.DateTime;
import org.joda.time.Interval;

View File

@ -31,10 +31,9 @@ import com.google.common.collect.Iterables;
import com.metamx.common.Granularity;
import com.metamx.druid.jackson.CommaListJoinDeserializer;
import com.metamx.druid.jackson.CommaListJoinSerializer;
import com.metamx.druid.query.segment.SegmentDescriptor;
import com.metamx.druid.shard.NoneShardSpec;
import com.metamx.druid.shard.ShardSpec;
import io.druid.query.SegmentDescriptor;
import org.joda.time.DateTime;
import org.joda.time.Interval;

View File

@ -35,11 +35,6 @@ import com.metamx.common.guava.BaseSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.logger.Logger;
import com.metamx.druid.Query;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.QueryToolChestWarehouse;
import com.metamx.druid.result.BySegmentResultValueClass;
import com.metamx.druid.result.Result;
@ -47,6 +42,11 @@ import com.metamx.http.client.HttpClient;
import com.metamx.http.client.io.AppendableByteArrayInputStream;
import com.metamx.http.client.response.ClientResponse;
import com.metamx.http.client.response.InputStreamResponseHandler;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.MetricManipulationFn;
import org.jboss.netty.handler.codec.http.HttpChunk;
import org.jboss.netty.handler.codec.http.HttpHeaders;
import org.jboss.netty.handler.codec.http.HttpResponse;

View File

@ -21,7 +21,7 @@ package com.metamx.druid.client;
import com.metamx.druid.VersionedIntervalTimeline;
import com.metamx.druid.client.selector.ServerSelector;
import com.metamx.druid.query.QueryRunner;
import io.druid.query.QueryRunner;
/**
*/

View File

@ -21,8 +21,8 @@ package com.metamx.druid.client.indexing;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.client.DataSegment;
import io.druid.query.aggregation.AggregatorFactory;
import java.util.List;

View File

@ -5,8 +5,6 @@ import com.google.inject.Binder;
import com.google.inject.Module;
import com.google.inject.TypeLiteral;
import com.google.inject.multibindings.MapBinder;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.group.GroupByQuery;
import com.metamx.druid.query.group.GroupByQueryConfig;
import com.metamx.druid.query.group.GroupByQueryQueryToolChest;
@ -19,6 +17,8 @@ import com.metamx.druid.query.timeboundary.TimeBoundaryQuery;
import com.metamx.druid.query.timeboundary.TimeBoundaryQueryQueryToolChest;
import com.metamx.druid.query.timeseries.TimeseriesQuery;
import com.metamx.druid.query.timeseries.TimeseriesQueryQueryToolChest;
import io.druid.query.Query;
import io.druid.query.QueryToolChest;
import java.util.Map;

View File

@ -8,8 +8,8 @@ import com.metamx.druid.http.log.EmittingRequestLoggerProvider;
import com.metamx.druid.http.log.FileRequestLoggerProvider;
import com.metamx.druid.http.log.RequestLogger;
import com.metamx.druid.http.log.RequestLoggerProvider;
import com.metamx.druid.initialization.DruidModule;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import io.druid.initialization.DruidModule;
import java.util.Arrays;
import java.util.List;

View File

@ -21,17 +21,17 @@ package com.metamx.druid.http;
import com.google.common.base.Function;
import com.google.inject.Inject;
import com.metamx.druid.Query;
import com.metamx.druid.client.CachingClusteredClient;
import com.metamx.druid.query.FinalizeResultsQueryRunner;
import com.metamx.druid.query.MetricsEmittingQueryRunner;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.QueryToolChestWarehouse;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import com.metamx.druid.query.segment.SegmentDescriptor;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.SegmentDescriptor;
import org.joda.time.Interval;
import javax.annotation.Nullable;

View File

@ -19,13 +19,13 @@
package com.metamx.druid.http;
import com.metamx.druid.Query;
import com.metamx.druid.client.DirectDruidClient;
import com.metamx.druid.query.FinalizeResultsQueryRunner;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChestWarehouse;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import com.metamx.druid.query.segment.SegmentDescriptor;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.SegmentDescriptor;
import org.joda.time.Interval;
/**

View File

@ -29,7 +29,6 @@ import com.google.inject.Inject;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.logger.Logger;
import com.metamx.druid.Query;
import com.metamx.druid.guice.annotations.Json;
import com.metamx.druid.guice.annotations.Smile;
import com.metamx.druid.http.log.RequestLogger;
@ -37,6 +36,7 @@ import com.metamx.druid.query.segment.QuerySegmentWalker;
import com.metamx.emitter.service.AlertEvent;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.Query;
import org.eclipse.jetty.server.Request;
import org.joda.time.DateTime;

View File

@ -23,7 +23,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Joiner;
import com.metamx.druid.Query;
import io.druid.query.Query;
import org.joda.time.DateTime;
import java.util.Arrays;

View File

@ -22,11 +22,11 @@ package com.metamx.druid.http.log;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
import com.google.common.collect.ImmutableMap;
import com.metamx.druid.Query;
import com.metamx.druid.http.RequestLogLine;
import com.metamx.emitter.core.Event;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.emitter.service.ServiceEventBuilder;
import io.druid.query.Query;
import org.joda.time.DateTime;
import java.util.Map;

View File

@ -1,10 +0,0 @@
package com.metamx.druid.initialization;
import java.util.List;
/**
*/
public interface DruidModule extends com.google.inject.Module
{
public List<com.fasterxml.jackson.databind.Module> getJacksonModules();
}

View File

@ -33,6 +33,7 @@ import com.metamx.druid.guice.DruidSecondaryModule;
import com.metamx.druid.guice.annotations.Json;
import com.metamx.druid.guice.annotations.Smile;
import com.metamx.druid.jackson.JacksonModule;
import io.druid.initialization.DruidModule;
import javax.annotation.Nullable;
import java.util.Arrays;

View File

@ -26,9 +26,10 @@ import com.metamx.common.guava.Sequences;
import com.metamx.common.guava.Yielder;
import com.metamx.common.guava.Yielders;
import com.metamx.common.guava.YieldingAccumulator;
import com.metamx.druid.Query;
import com.metamx.druid.result.BySegmentResultValueClass;
import com.metamx.druid.result.Result;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import org.joda.time.DateTime;
import java.util.List;

View File

@ -20,7 +20,8 @@
package com.metamx.druid.query;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
/**
*/

View File

@ -30,7 +30,8 @@ import com.metamx.common.guava.MergeIterable;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.logger.Logger;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import java.util.Arrays;
import java.util.Iterator;

View File

@ -22,7 +22,8 @@ package com.metamx.druid.query;
import com.google.common.base.Function;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
/**
*/

View File

@ -24,10 +24,13 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.Query;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.result.BySegmentResultValueClass;
import com.metamx.druid.result.Result;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.MetricManipulationFn;
import javax.annotation.Nullable;

View File

@ -24,9 +24,10 @@ import com.google.common.collect.Lists;
import com.metamx.common.guava.FunctionalIterable;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.PeriodGranularity;
import com.metamx.druid.Query;
import com.metamx.druid.query.segment.MultipleIntervalSegmentSpec;
import io.druid.granularity.PeriodGranularity;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import org.joda.time.Interval;
import org.joda.time.Period;

View File

@ -1,7 +1,8 @@
package com.metamx.druid.query;
import com.google.inject.Inject;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryToolChest;
import java.util.Map;

View File

@ -24,9 +24,10 @@ import com.metamx.common.guava.Accumulator;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Yielder;
import com.metamx.common.guava.YieldingAccumulator;
import com.metamx.druid.Query;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import java.io.IOException;

View File

@ -21,7 +21,8 @@ package com.metamx.druid.query;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
/**
*/

View File

@ -23,8 +23,8 @@ import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import io.druid.query.aggregation.AggregatorFactory;
import javax.annotation.Nullable;
import java.util.List;

View File

@ -20,7 +20,7 @@
package com.metamx.druid.query;
import com.google.common.collect.Lists;
import com.metamx.druid.aggregation.AggregatorFactory;
import io.druid.query.aggregation.AggregatorFactory;
import java.nio.ByteBuffer;
import java.util.List;

View File

@ -19,7 +19,8 @@
package com.metamx.druid.query;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryToolChest;
/**
*/

View File

@ -19,7 +19,8 @@
package com.metamx.druid.query;
import com.metamx.druid.Query;
import io.druid.query.Query;
import io.druid.query.QueryToolChest;
/**
*/

View File

@ -22,8 +22,9 @@ package com.metamx.druid.query;
import com.google.common.collect.Ordering;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.druid.Query;
import com.metamx.druid.guava.CombiningSequence;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
/**
*/

View File

@ -31,9 +31,6 @@ import com.metamx.common.ISE;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.BaseQuery;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.input.Row;
import com.metamx.druid.query.Queries;
@ -45,8 +42,11 @@ import com.metamx.druid.query.group.orderby.DefaultLimitSpec;
import com.metamx.druid.query.group.orderby.LimitSpec;
import com.metamx.druid.query.group.orderby.NoopLimitSpec;
import com.metamx.druid.query.group.orderby.OrderByColumnSpec;
import com.metamx.druid.query.segment.LegacySegmentSpec;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Query;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.spec.LegacySegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import javax.annotation.Nullable;
import java.util.List;

View File

@ -32,18 +32,18 @@ import com.metamx.common.guava.Accumulator;
import com.metamx.common.guava.ConcatSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.index.v1.IncrementalIndex;
import com.metamx.druid.input.MapBasedRow;
import com.metamx.druid.input.Row;
import com.metamx.druid.input.Rows;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.dimension.DimensionSpec;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.MetricManipulationFn;
import org.joda.time.Interval;
import org.joda.time.Minutes;

View File

@ -31,10 +31,10 @@ import com.google.common.primitives.Longs;
import com.metamx.common.ISE;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.input.Row;
import com.metamx.druid.query.dimension.DimensionSpec;
import io.druid.query.aggregation.AggregatorFactory;
import javax.annotation.Nullable;
import java.util.ArrayList;

View File

@ -23,10 +23,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.google.common.base.Function;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.input.Row;
import com.metamx.druid.query.dimension.DimensionSpec;
import io.druid.query.aggregation.AggregatorFactory;
import java.util.List;

View File

@ -3,10 +3,10 @@ package com.metamx.druid.query.group.orderby;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.input.Row;
import com.metamx.druid.query.dimension.DimensionSpec;
import io.druid.query.aggregation.AggregatorFactory;
import java.util.List;

View File

@ -21,7 +21,7 @@ package com.metamx.druid.query.metadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.metamx.druid.index.column.ValueType;
import io.druid.segment.column.ValueType;
/**
*/

View File

@ -22,8 +22,8 @@ package com.metamx.druid.query.metadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.metamx.druid.BaseQuery;
import com.metamx.druid.Query;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import io.druid.query.Query;
import io.druid.query.spec.QuerySegmentSpec;
import java.util.Map;

View File

@ -31,15 +31,15 @@ import com.metamx.common.ISE;
import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.druid.Query;
import com.metamx.druid.collect.OrderedMergeSequence;
import com.metamx.druid.query.CacheStrategy;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.ResultMergeQueryRunner;
import com.metamx.druid.utils.JodaUtils;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.CacheStrategy;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.MetricManipulationFn;
import org.joda.time.Interval;
import org.joda.time.Minutes;

View File

@ -25,12 +25,12 @@ import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.metamx.druid.BaseQuery;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.query.filter.DimFilter;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.SearchResultValue;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Query;
import io.druid.query.spec.QuerySegmentSpec;
import javax.annotation.Nullable;
import java.util.List;

View File

@ -35,21 +35,21 @@ import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.druid.Query;
import com.metamx.druid.ResultGranularTimestampComparator;
import com.metamx.druid.SearchBinaryFn;
import com.metamx.druid.collect.OrderedMergeSequence;
import com.metamx.druid.query.CacheStrategy;
import com.metamx.druid.query.IntervalChunkingQueryRunner;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.ResultMergeQueryRunner;
import com.metamx.druid.query.filter.DimFilter;
import com.metamx.druid.result.BySegmentSearchResultValue;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.SearchResultValue;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.CacheStrategy;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.MetricManipulationFn;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.joda.time.Minutes;

View File

@ -19,6 +19,9 @@
package com.metamx.druid.query.segment;
import io.druid.query.spec.LegacySegmentSpec;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Interval;
import java.util.Arrays;

View File

@ -19,8 +19,9 @@
package com.metamx.druid.query.segment;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.SegmentDescriptor;
import org.joda.time.Interval;
/**

View File

@ -24,8 +24,9 @@ import com.metamx.common.guava.Accumulator;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Yielder;
import com.metamx.common.guava.YieldingAccumulator;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.spec.QuerySegmentSpec;
import java.io.IOException;
import java.util.concurrent.Callable;

View File

@ -19,8 +19,10 @@
package com.metamx.druid.query.segment;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryRunner;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.SegmentDescriptor;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Interval;
import java.util.Arrays;

View File

@ -25,12 +25,11 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.metamx.druid.BaseQuery;
import com.metamx.druid.Query;
import com.metamx.druid.query.segment.MultipleIntervalSegmentSpec;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeBoundaryResultValue;
import io.druid.query.Query;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.DateTime;
import org.joda.time.Interval;

View File

@ -29,17 +29,17 @@ import com.google.common.collect.Ordering;
import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.druid.LogicalSegment;
import com.metamx.druid.Query;
import com.metamx.druid.collect.OrderedMergeSequence;
import com.metamx.druid.query.BySegmentSkippingQueryRunner;
import com.metamx.druid.query.CacheStrategy;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeBoundaryResultValue;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.CacheStrategy;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.segment.LogicalSegment;
import org.joda.time.DateTime;
import javax.annotation.Nullable;

View File

@ -24,15 +24,15 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.google.common.collect.ImmutableList;
import com.metamx.druid.BaseQuery;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.query.Queries;
import com.metamx.druid.query.filter.DimFilter;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeseriesResultValue;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Query;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.spec.QuerySegmentSpec;
import java.util.List;
import java.util.Map;

View File

@ -28,25 +28,24 @@ import com.google.common.collect.Ordering;
import com.metamx.common.guava.MergeSequence;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.nary.BinaryFn;
import com.metamx.druid.Query;
import com.metamx.druid.QueryGranularity;
import com.metamx.druid.ResultGranularTimestampComparator;
import com.metamx.druid.TimeseriesBinaryFn;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.collect.OrderedMergeSequence;
import com.metamx.druid.query.CacheStrategy;
import com.metamx.druid.query.IntervalChunkingQueryRunner;
import com.metamx.druid.query.MetricManipulationFn;
import com.metamx.druid.query.QueryCacheHelper;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.QueryToolChest;
import com.metamx.druid.query.ResultMergeQueryRunner;
import com.metamx.druid.query.filter.DimFilter;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeseriesResultValue;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.granularity.QueryGranularity;
import io.druid.query.CacheStrategy;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChest;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.MetricManipulationFn;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.joda.time.Minutes;

View File

@ -9,8 +9,6 @@ import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.io.Closeables;
import com.metamx.druid.Druids;
import com.metamx.druid.Query;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.input.Row;
import com.metamx.druid.jackson.DefaultObjectMapper;
import com.metamx.druid.query.dimension.DimensionSpec;
@ -19,6 +17,8 @@ import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeseriesResultValue;
import com.metamx.druid.sql.antlr4.DruidSQLLexer;
import com.metamx.druid.sql.antlr4.DruidSQLParser;
import io.druid.query.Query;
import io.druid.query.aggregation.AggregatorFactory;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonTokenStream;

View File

@ -17,12 +17,11 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query;
package io.druid.query;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.Query;
/**
*/

View File

@ -17,20 +17,18 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid;
package io.druid.query;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.group.GroupByQuery;
import com.metamx.druid.query.metadata.SegmentMetadataQuery;
import com.metamx.druid.query.search.SearchQuery;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import com.metamx.druid.query.timeboundary.TimeBoundaryQuery;
import com.metamx.druid.query.timeseries.TimeseriesQuery;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Duration;
import org.joda.time.Interval;

View File

@ -17,10 +17,9 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query;
package io.druid.query;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.Query;
/**
*/

View File

@ -17,14 +17,15 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query;
package io.druid.query;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Function;
import com.metamx.common.guava.Sequence;
import com.metamx.druid.LogicalSegment;
import com.metamx.druid.Query;
import com.metamx.emitter.service.ServiceMetricEvent;
import io.druid.query.aggregation.MetricManipulationFn;
import io.druid.segment.LogicalSegment;
import java.util.List;
/**

View File

@ -17,7 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query.segment;
package io.druid.query;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;

View File

@ -17,9 +17,7 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query;
import com.metamx.druid.aggregation.AggregatorFactory;
package io.druid.query.aggregation;
/**
*/

View File

@ -17,13 +17,12 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query.segment;
package io.druid.query.spec;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.metamx.common.IAE;
import org.joda.time.Interval;
import java.util.Arrays;

View File

@ -17,14 +17,14 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query.segment;
package io.druid.query.spec;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import com.metamx.druid.utils.JodaUtils;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import org.joda.time.Interval;
import java.util.Collections;

View File

@ -17,16 +17,17 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query.segment;
package io.druid.query.spec;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import com.metamx.druid.utils.JodaUtils;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import io.druid.query.SegmentDescriptor;
import org.joda.time.Interval;
import java.util.List;

View File

@ -17,13 +17,13 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.query.segment;
package io.druid.query.spec;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.metamx.druid.Query;
import com.metamx.druid.query.QueryRunner;
import com.metamx.druid.query.segment.QuerySegmentWalker;
import io.druid.query.Query;
import io.druid.query.QueryRunner;
import org.joda.time.Interval;
import java.util.List;

View File

@ -20,6 +20,7 @@
package com.metamx.druid;
import com.metamx.druid.result.Result;
import io.druid.granularity.QueryGranularity;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Test;

View File

@ -25,6 +25,7 @@ import com.metamx.druid.query.search.SearchHit;
import com.metamx.druid.query.search.StrlenSearchSortSpec;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.SearchResultValue;
import io.druid.granularity.QueryGranularity;
import junit.framework.Assert;
import org.joda.time.DateTime;
import org.junit.Test;

View File

@ -21,7 +21,6 @@ package com.metamx.druid;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.CountAggregatorFactory;
import com.metamx.druid.aggregation.LongSumAggregatorFactory;
import com.metamx.druid.aggregation.post.ArithmeticPostAggregator;
@ -30,6 +29,8 @@ import com.metamx.druid.aggregation.post.FieldAccessPostAggregator;
import com.metamx.druid.aggregation.post.PostAggregator;
import com.metamx.druid.result.Result;
import com.metamx.druid.result.TimeseriesResultValue;
import io.druid.granularity.QueryGranularity;
import io.druid.query.aggregation.AggregatorFactory;
import junit.framework.Assert;
import org.joda.time.DateTime;
import org.junit.Test;

View File

@ -19,13 +19,13 @@
package com.metamx.druid.query;
import com.metamx.druid.aggregation.AggregatorFactory;
import com.metamx.druid.aggregation.CountAggregatorFactory;
import com.metamx.druid.aggregation.DoubleSumAggregatorFactory;
import com.metamx.druid.aggregation.post.ArithmeticPostAggregator;
import com.metamx.druid.aggregation.post.ConstantPostAggregator;
import com.metamx.druid.aggregation.post.FieldAccessPostAggregator;
import com.metamx.druid.aggregation.post.PostAggregator;
import io.druid.query.aggregation.AggregatorFactory;
import junit.framework.Assert;
import org.junit.Test;

View File

@ -23,12 +23,11 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.metamx.druid.jackson.DefaultObjectMapper;
import com.metamx.druid.query.segment.LegacySegmentSpec;
import com.metamx.druid.query.segment.MultipleIntervalSegmentSpec;
import com.metamx.druid.query.segment.MultipleSpecificSegmentSpec;
import com.metamx.druid.query.segment.QuerySegmentSpec;
import com.metamx.druid.query.segment.SegmentDescriptor;
import io.druid.query.SegmentDescriptor;
import io.druid.query.spec.LegacySegmentSpec;
import io.druid.query.spec.MultipleIntervalSegmentSpec;
import io.druid.query.spec.MultipleSpecificSegmentSpec;
import io.druid.query.spec.QuerySegmentSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Test;

View File

@ -19,7 +19,7 @@
package com.metamx.druid.query.timeboundary;
import com.metamx.druid.LogicalSegment;
import io.druid.segment.LogicalSegment;
import junit.framework.Assert;
import org.joda.time.Interval;
import org.junit.Test;

View File

@ -36,6 +36,13 @@
<groupId>com.metamx</groupId>
<artifactId>java-util</artifactId>
</dependency>
<!--
<dependency>
<groupId>io.druid</groupId>
<artifactId>druid-api</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>
-->
<dependency>
<groupId>commons-codec</groupId>

View File

@ -20,6 +20,7 @@
package com.metamx.druid;
import com.metamx.druid.partition.PartitionHolder;
import io.druid.segment.LogicalSegment;
import org.joda.time.Interval;
/**

View File

@ -19,6 +19,8 @@
package com.metamx.druid.aggregation;
import io.druid.query.aggregation.Aggregator;
import java.util.Comparator;
/**

View File

@ -24,7 +24,10 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Longs;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import java.util.Comparator;
import java.util.List;
@ -47,13 +50,13 @@ public class CountAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return new CountAggregator(name);
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return new CountBufferAggregator();
}

View File

@ -19,6 +19,8 @@
package com.metamx.druid.aggregation;
import io.druid.query.aggregation.BufferAggregator;
import java.nio.ByteBuffer;
/**

View File

@ -21,7 +21,8 @@ package com.metamx.druid.aggregation;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Doubles;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.segment.FloatMetricSelector;
import java.util.Comparator;

View File

@ -23,7 +23,10 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import java.nio.ByteBuffer;
import java.util.Arrays;
@ -53,7 +56,7 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return new DoubleSumAggregator(
name,
@ -62,7 +65,7 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return new DoubleSumBufferAggregator(metricFactory.makeFloatMetricSelector(fieldName));
}

View File

@ -19,7 +19,8 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.FloatMetricSelector;
import java.nio.ByteBuffer;

View File

@ -20,7 +20,8 @@
package com.metamx.druid.aggregation;
import com.google.common.primitives.Longs;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.segment.FloatMetricSelector;
import java.util.Comparator;

View File

@ -26,7 +26,10 @@ import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.primitives.Floats;
import com.google.common.primitives.Longs;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import org.apache.commons.codec.binary.Base64;
import java.nio.ByteBuffer;
@ -64,7 +67,7 @@ public class HistogramAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return new HistogramAggregator(
name,
@ -74,7 +77,7 @@ public class HistogramAggregatorFactory implements AggregatorFactory
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return new HistogramBufferAggregator(
metricFactory.makeFloatMetricSelector(fieldName),

View File

@ -21,7 +21,8 @@ package com.metamx.druid.aggregation;
import com.google.common.primitives.Floats;
import com.google.common.primitives.Longs;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.FloatMetricSelector;
import java.nio.ByteBuffer;
import java.util.Arrays;

View File

@ -20,7 +20,8 @@
package com.metamx.druid.aggregation;
import com.google.common.collect.Lists;
import com.metamx.druid.processing.ObjectColumnSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.segment.ObjectMetricSelector;
import java.util.List;
@ -28,7 +29,7 @@ public class JavaScriptAggregator implements Aggregator
{
static interface ScriptAggregator
{
public double aggregate(double current, ObjectColumnSelector[] selectorList);
public double aggregate(double current, ObjectMetricSelector[] selectorList);
public double combine(double a, double b);
@ -38,15 +39,15 @@ public class JavaScriptAggregator implements Aggregator
}
private final String name;
private final ObjectColumnSelector[] selectorList;
private final ObjectMetricSelector[] selectorList;
private final ScriptAggregator script;
private volatile double current;
public JavaScriptAggregator(String name, List<ObjectColumnSelector> selectorList, ScriptAggregator script)
public JavaScriptAggregator(String name, List<ObjectMetricSelector> selectorList, ScriptAggregator script)
{
this.name = name;
this.selectorList = Lists.newArrayList(selectorList).toArray(new ObjectColumnSelector[]{});
this.selectorList = Lists.newArrayList(selectorList).toArray(new ObjectMetricSelector[]{});
this.script = script;
this.current = script.reset();

View File

@ -26,8 +26,11 @@ import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.primitives.Doubles;
import com.metamx.druid.processing.ColumnSelectorFactory;
import com.metamx.druid.processing.ObjectColumnSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import io.druid.segment.ObjectMetricSelector;
import org.mozilla.javascript.Context;
import org.mozilla.javascript.ContextAction;
import org.mozilla.javascript.ContextFactory;
@ -80,16 +83,16 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(final ColumnSelectorFactory columnFactory)
public Aggregator factorize(final MetricSelectorFactory columnFactory)
{
return new JavaScriptAggregator(
name,
Lists.transform(
fieldNames,
new com.google.common.base.Function<String, ObjectColumnSelector>()
new com.google.common.base.Function<String, ObjectMetricSelector>()
{
@Override
public ObjectColumnSelector apply(@Nullable String s)
public ObjectMetricSelector apply(@Nullable String s)
{
return columnFactory.makeObjectColumnSelector(s);
}
@ -100,17 +103,17 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
}
@Override
public BufferAggregator factorizeBuffered(final ColumnSelectorFactory columnSelectorFactory)
public BufferAggregator factorizeBuffered(final MetricSelectorFactory metricSelectorFactory)
{
return new JavaScriptBufferAggregator(
Lists.transform(
fieldNames,
new com.google.common.base.Function<String, ObjectColumnSelector>()
new com.google.common.base.Function<String, ObjectMetricSelector>()
{
@Override
public ObjectColumnSelector apply(@Nullable String s)
public ObjectMetricSelector apply(@Nullable String s)
{
return columnSelectorFactory.makeObjectColumnSelector(s);
return metricSelectorFactory.makeObjectColumnSelector(s);
}
}
),
@ -254,7 +257,7 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory
return new JavaScriptAggregator.ScriptAggregator()
{
@Override
public double aggregate(final double current, final ObjectColumnSelector[] selectorList)
public double aggregate(final double current, final ObjectMetricSelector[] selectorList)
{
Context cx = Context.getCurrentContext();
if (cx == null) {

View File

@ -20,24 +20,23 @@
package com.metamx.druid.aggregation;
import com.google.common.collect.Lists;
import com.metamx.druid.processing.ComplexMetricSelector;
import com.metamx.druid.processing.FloatMetricSelector;
import com.metamx.druid.processing.ObjectColumnSelector;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.ObjectMetricSelector;
import java.nio.ByteBuffer;
import java.util.List;
public class JavaScriptBufferAggregator implements BufferAggregator
{
private final ObjectColumnSelector[] selectorList;
private final ObjectMetricSelector[] selectorList;
private final JavaScriptAggregator.ScriptAggregator script;
public JavaScriptBufferAggregator(
List<ObjectColumnSelector> selectorList,
List<ObjectMetricSelector> selectorList,
JavaScriptAggregator.ScriptAggregator script
)
{
this.selectorList = Lists.newArrayList(selectorList).toArray(new ObjectColumnSelector[]{});
this.selectorList = Lists.newArrayList(selectorList).toArray(new ObjectMetricSelector[]{});
this.script = script;
}

View File

@ -20,7 +20,8 @@
package com.metamx.druid.aggregation;
import com.google.common.primitives.Longs;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.segment.FloatMetricSelector;
import java.util.Comparator;

View File

@ -23,7 +23,10 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import java.nio.ByteBuffer;
import java.util.Arrays;
@ -53,7 +56,7 @@ public class LongSumAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return new LongSumAggregator(
name,
@ -62,7 +65,7 @@ public class LongSumAggregatorFactory implements AggregatorFactory
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return new LongSumBufferAggregator(metricFactory.makeFloatMetricSelector(fieldName));
}

View File

@ -19,7 +19,8 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.FloatMetricSelector;
import java.nio.ByteBuffer;

View File

@ -19,7 +19,8 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.segment.FloatMetricSelector;
import java.util.Comparator;

View File

@ -23,7 +23,10 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import java.nio.ByteBuffer;
import java.util.Arrays;
@ -53,13 +56,13 @@ public class MaxAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return new MaxAggregator(name, metricFactory.makeFloatMetricSelector(fieldName));
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return new MaxBufferAggregator(metricFactory.makeFloatMetricSelector(fieldName));
}

View File

@ -19,7 +19,8 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.FloatMetricSelector;
import java.nio.ByteBuffer;

View File

@ -19,7 +19,8 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.Aggregator;
import io.druid.segment.FloatMetricSelector;
import java.util.Comparator;

View File

@ -23,7 +23,10 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import java.nio.ByteBuffer;
import java.util.Arrays;
@ -53,13 +56,13 @@ public class MinAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return new MinAggregator(name, metricFactory.makeFloatMetricSelector(fieldName));
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return new MinBufferAggregator(metricFactory.makeFloatMetricSelector(fieldName));
}

View File

@ -19,7 +19,8 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.FloatMetricSelector;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.FloatMetricSelector;
import java.nio.ByteBuffer;

View File

@ -19,6 +19,8 @@
package com.metamx.druid.aggregation;
import io.druid.query.aggregation.Aggregator;
/**
*/
public class NoopAggregator implements Aggregator

View File

@ -19,6 +19,8 @@
package com.metamx.druid.aggregation;
import io.druid.query.aggregation.BufferAggregator;
import java.nio.ByteBuffer;
/**

View File

@ -19,7 +19,10 @@
package com.metamx.druid.aggregation;
import com.metamx.druid.processing.ColumnSelectorFactory;
import io.druid.query.aggregation.Aggregator;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.BufferAggregator;
import io.druid.segment.MetricSelectorFactory;
import java.util.Comparator;
import java.util.List;
@ -36,13 +39,13 @@ public class ToLowerCaseAggregatorFactory implements AggregatorFactory
}
@Override
public Aggregator factorize(ColumnSelectorFactory metricFactory)
public Aggregator factorize(MetricSelectorFactory metricFactory)
{
return baseAggregatorFactory.factorize(metricFactory);
}
@Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
public BufferAggregator factorizeBuffered(MetricSelectorFactory metricFactory)
{
return baseAggregatorFactory.factorizeBuffered(metricFactory);
}

View File

@ -0,0 +1,56 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.jackson;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.metamx.druid.aggregation.CountAggregatorFactory;
import com.metamx.druid.aggregation.DoubleSumAggregatorFactory;
import com.metamx.druid.aggregation.HistogramAggregatorFactory;
import com.metamx.druid.aggregation.JavaScriptAggregatorFactory;
import com.metamx.druid.aggregation.LongSumAggregatorFactory;
import com.metamx.druid.aggregation.MaxAggregatorFactory;
import com.metamx.druid.aggregation.MinAggregatorFactory;
import io.druid.query.aggregation.AggregatorFactory;
/**
*/
public class AggregatorFactoriesModule extends SimpleModule
{
public AggregatorFactoriesModule()
{
super("AggregatorFactories");
setMixInAnnotation(AggregatorFactory.class, AggregatorFactoryMixin.class);
}
@JsonTypeInfo(use= JsonTypeInfo.Id.NAME, property="type")
@JsonSubTypes(value={
@JsonSubTypes.Type(name="count", value=CountAggregatorFactory.class),
@JsonSubTypes.Type(name="longSum", value=LongSumAggregatorFactory.class),
@JsonSubTypes.Type(name="doubleSum", value=DoubleSumAggregatorFactory.class),
@JsonSubTypes.Type(name="max", value=MaxAggregatorFactory.class),
@JsonSubTypes.Type(name="min", value=MinAggregatorFactory.class),
@JsonSubTypes.Type(name="javascript", value=JavaScriptAggregatorFactory.class),
@JsonSubTypes.Type(name="histogram", value=HistogramAggregatorFactory.class)
})
public static interface AggregatorFactoryMixin {}
}

View File

@ -20,31 +20,11 @@
package com.metamx.druid.jackson;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.Version;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.fasterxml.jackson.datatype.guava.GuavaModule;
import com.google.common.base.Throwables;
import com.metamx.common.Granularity;
import com.metamx.common.guava.Accumulator;
import com.metamx.common.guava.Sequence;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.nio.ByteOrder;
import java.util.TimeZone;
/**
*/
@ -58,103 +38,10 @@ public class DefaultObjectMapper extends ObjectMapper
public DefaultObjectMapper(JsonFactory factory)
{
super(factory);
SimpleModule serializerModule = new SimpleModule("Druid default serializers", new Version(1, 0, 0, null));
JodaStuff.register(serializerModule);
serializerModule.addDeserializer(
Granularity.class,
new JsonDeserializer<Granularity>()
{
@Override
public Granularity deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException
{
return Granularity.valueOf(jp.getText().toUpperCase());
}
}
);
serializerModule.addDeserializer(
DateTimeZone.class,
new JsonDeserializer<DateTimeZone>()
{
@Override
public DateTimeZone deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException
{
String tzId = jp.getText();
try {
return DateTimeZone.forID(tzId);
} catch(IllegalArgumentException e) {
// also support Java timezone strings
return DateTimeZone.forTimeZone(TimeZone.getTimeZone(tzId));
}
}
}
);
serializerModule.addSerializer(
DateTimeZone.class,
new JsonSerializer<DateTimeZone>()
{
@Override
public void serialize(
DateTimeZone dateTimeZone,
JsonGenerator jsonGenerator,
SerializerProvider serializerProvider
)
throws IOException, JsonProcessingException
{
jsonGenerator.writeString(dateTimeZone.getID());
}
}
);
serializerModule.addSerializer(
Sequence.class,
new JsonSerializer<Sequence>()
{
@Override
public void serialize(Sequence value, final JsonGenerator jgen, SerializerProvider provider)
throws IOException, JsonProcessingException
{
jgen.writeStartArray();
value.accumulate(
null,
new Accumulator()
{
@Override
public Object accumulate(Object o, Object o1)
{
try {
jgen.writeObject(o1);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
return o;
}
}
);
jgen.writeEndArray();
}
}
);
serializerModule.addSerializer(ByteOrder.class, ToStringSerializer.instance);
serializerModule.addDeserializer(
ByteOrder.class,
new JsonDeserializer<ByteOrder>()
{
@Override
public ByteOrder deserialize(
JsonParser jp, DeserializationContext ctxt
) throws IOException, JsonProcessingException
{
if (ByteOrder.BIG_ENDIAN.toString().equals(jp.getText())) {
return ByteOrder.BIG_ENDIAN;
}
return ByteOrder.LITTLE_ENDIAN;
}
}
);
registerModule(serializerModule);
registerModule(new DruidDefaultSerializersModule());
registerModule(new GuavaModule());
registerModule(new QueryGranularityModule());
registerModule(new AggregatorFactoriesModule());
configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
configure(MapperFeature.AUTO_DETECT_GETTERS, false);

View File

@ -0,0 +1,144 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.jackson;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
import com.google.common.base.Throwables;
import com.metamx.common.Granularity;
import com.metamx.common.guava.Accumulator;
import com.metamx.common.guava.Sequence;
import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.nio.ByteOrder;
import java.util.TimeZone;
/**
*/
public class DruidDefaultSerializersModule extends SimpleModule
{
public DruidDefaultSerializersModule()
{
super("Druid default serializers");
JodaStuff.register(this);
addDeserializer(
Granularity.class,
new JsonDeserializer<Granularity>()
{
@Override
public Granularity deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException
{
return Granularity.valueOf(jp.getText().toUpperCase());
}
}
);
addDeserializer(
DateTimeZone.class,
new JsonDeserializer<DateTimeZone>()
{
@Override
public DateTimeZone deserialize(JsonParser jp, DeserializationContext ctxt)
throws IOException
{
String tzId = jp.getText();
try {
return DateTimeZone.forID(tzId);
} catch(IllegalArgumentException e) {
// also support Java timezone strings
return DateTimeZone.forTimeZone(TimeZone.getTimeZone(tzId));
}
}
}
);
addSerializer(
DateTimeZone.class,
new JsonSerializer<DateTimeZone>()
{
@Override
public void serialize(
DateTimeZone dateTimeZone,
JsonGenerator jsonGenerator,
SerializerProvider serializerProvider
)
throws IOException, JsonProcessingException
{
jsonGenerator.writeString(dateTimeZone.getID());
}
}
);
addSerializer(
Sequence.class,
new JsonSerializer<Sequence>()
{
@Override
public void serialize(Sequence value, final JsonGenerator jgen, SerializerProvider provider)
throws IOException, JsonProcessingException
{
jgen.writeStartArray();
value.accumulate(
null,
new Accumulator()
{
@Override
public Object accumulate(Object o, Object o1)
{
try {
jgen.writeObject(o1);
}
catch (IOException e) {
throw Throwables.propagate(e);
}
return o;
}
}
);
jgen.writeEndArray();
}
}
);
addSerializer(ByteOrder.class, ToStringSerializer.instance);
addDeserializer(
ByteOrder.class,
new JsonDeserializer<ByteOrder>()
{
@Override
public ByteOrder deserialize(
JsonParser jp, DeserializationContext ctxt
) throws IOException, JsonProcessingException
{
if (ByteOrder.BIG_ENDIAN.toString().equals(jp.getText())) {
return ByteOrder.BIG_ENDIAN;
}
return ByteOrder.LITTLE_ENDIAN;
}
}
);
}
}

View File

@ -39,9 +39,9 @@ import java.io.IOException;
/**
*/
public class JodaStuff
class JodaStuff
{
public static SimpleModule register(SimpleModule module)
static SimpleModule register(SimpleModule module)
{
module.addKeyDeserializer(DateTime.class, new DateTimeKeyDeserializer());
module.addDeserializer(DateTime.class, new DateTimeDeserializer());
@ -58,7 +58,7 @@ public class JodaStuff
/**
*/
public static class IntervalDeserializer extends StdDeserializer<Interval>
private static class IntervalDeserializer extends StdDeserializer<Interval>
{
public IntervalDeserializer()
{
@ -82,7 +82,7 @@ public class JodaStuff
}
}
public static class DateTimeDeserializer extends StdDeserializer<DateTime>
private static class DateTimeDeserializer extends StdDeserializer<DateTime>
{
public DateTimeDeserializer() {
super(DateTime.class);
@ -109,5 +109,4 @@ public class JodaStuff
throw ctxt.mappingException(getValueClass());
}
}
}

View File

@ -0,0 +1,50 @@
/*
* Druid - a distributed column store.
* Copyright (C) 2012, 2013 Metamarkets Group Inc.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package com.metamx.druid.jackson;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.databind.module.SimpleModule;
import io.druid.granularity.AllGranularity;
import io.druid.granularity.DurationGranularity;
import io.druid.granularity.NoneGranularity;
import io.druid.granularity.PeriodGranularity;
import io.druid.granularity.QueryGranularity;
/**
*/
public class QueryGranularityModule extends SimpleModule
{
public QueryGranularityModule()
{
super("QueryGranularityModule");
setMixInAnnotation(QueryGranularity.class, QueryGranularityMixin.class);
registerSubtypes(
new NamedType(PeriodGranularity.class, "period"),
new NamedType(DurationGranularity.class, "duration"),
new NamedType(AllGranularity.class, "all"),
new NamedType(NoneGranularity.class, "none")
);
}
@JsonTypeInfo(use= JsonTypeInfo.Id.NAME, property = "type", defaultImpl = QueryGranularity.class)
public static interface QueryGranularityMixin {}
}

Some files were not shown because too many files have changed in this diff Show More