diff --git a/client/src/main/java/com/metamx/druid/query/dimension/DefaultDimensionSpec.java b/client/src/main/java/com/metamx/druid/query/dimension/DefaultDimensionSpec.java index 4895378cc1b..a936023550f 100644 --- a/client/src/main/java/com/metamx/druid/query/dimension/DefaultDimensionSpec.java +++ b/client/src/main/java/com/metamx/druid/query/dimension/DefaultDimensionSpec.java @@ -39,7 +39,7 @@ public class DefaultDimensionSpec implements DimensionSpec @JsonProperty("outputName") String outputName ) { - this.dimension = dimension.toLowerCase(); + this.dimension = dimension; // Do null check for legacy backwards compatibility, callers should be setting the value. this.outputName = outputName == null ? dimension : outputName; diff --git a/client/src/main/java/com/metamx/druid/query/dimension/ExtractionDimensionSpec.java b/client/src/main/java/com/metamx/druid/query/dimension/ExtractionDimensionSpec.java index ed85b840d0d..fe80c24c94f 100644 --- a/client/src/main/java/com/metamx/druid/query/dimension/ExtractionDimensionSpec.java +++ b/client/src/main/java/com/metamx/druid/query/dimension/ExtractionDimensionSpec.java @@ -42,7 +42,7 @@ public class ExtractionDimensionSpec implements DimensionSpec @JsonProperty("dimExtractionFn") DimExtractionFn dimExtractionFn ) { - this.dimension = dimension.toLowerCase(); + this.dimension = dimension; this.dimExtractionFn = dimExtractionFn; // Do null check for backwards compatibility diff --git a/client/src/main/java/com/metamx/druid/query/filter/ExtractionDimFilter.java b/client/src/main/java/com/metamx/druid/query/filter/ExtractionDimFilter.java index dd5e7ad6ddd..31e14e0de12 100644 --- a/client/src/main/java/com/metamx/druid/query/filter/ExtractionDimFilter.java +++ b/client/src/main/java/com/metamx/druid/query/filter/ExtractionDimFilter.java @@ -42,7 +42,7 @@ public class ExtractionDimFilter implements DimFilter @JsonProperty("dimExtractionFn") DimExtractionFn dimExtractionFn ) { - this.dimension = dimension.toLowerCase(); + this.dimension = dimension; this.value = value; this.dimExtractionFn = dimExtractionFn; } diff --git a/client/src/main/java/com/metamx/druid/query/filter/SelectorDimFilter.java b/client/src/main/java/com/metamx/druid/query/filter/SelectorDimFilter.java index 73097ec0cd1..e8bdc6081f4 100644 --- a/client/src/main/java/com/metamx/druid/query/filter/SelectorDimFilter.java +++ b/client/src/main/java/com/metamx/druid/query/filter/SelectorDimFilter.java @@ -39,7 +39,7 @@ public class SelectorDimFilter implements DimFilter @JsonProperty("value") String value ) { - this.dimension = dimension.toLowerCase(); + this.dimension = dimension; this.value = value; } diff --git a/client/src/main/java/com/metamx/druid/query/search/SearchQuery.java b/client/src/main/java/com/metamx/druid/query/search/SearchQuery.java index 2a721431b89..cd18060637c 100644 --- a/client/src/main/java/com/metamx/druid/query/search/SearchQuery.java +++ b/client/src/main/java/com/metamx/druid/query/search/SearchQuery.java @@ -69,7 +69,7 @@ public class SearchQuery extends BaseQuery> @Override public String apply(@Nullable String input) { - return input.toLowerCase(); + return input; } } ); diff --git a/common/src/main/java/com/metamx/druid/aggregation/DoubleSumAggregatorFactory.java b/common/src/main/java/com/metamx/druid/aggregation/DoubleSumAggregatorFactory.java index d8166f841e7..dc8bc9a9842 100644 --- a/common/src/main/java/com/metamx/druid/aggregation/DoubleSumAggregatorFactory.java +++ b/common/src/main/java/com/metamx/druid/aggregation/DoubleSumAggregatorFactory.java @@ -45,7 +45,7 @@ public class DoubleSumAggregatorFactory implements AggregatorFactory ) { this.name = name; - this.fieldName = fieldName.toLowerCase(); + this.fieldName = fieldName; } @Override diff --git a/common/src/main/java/com/metamx/druid/aggregation/JavaScriptAggregatorFactory.java b/common/src/main/java/com/metamx/druid/aggregation/JavaScriptAggregatorFactory.java index f2dd1604af4..48e5cf776f5 100644 --- a/common/src/main/java/com/metamx/druid/aggregation/JavaScriptAggregatorFactory.java +++ b/common/src/main/java/com/metamx/druid/aggregation/JavaScriptAggregatorFactory.java @@ -57,7 +57,7 @@ public class JavaScriptAggregatorFactory implements AggregatorFactory ) { this.name = name; - this.script = expression.toLowerCase(); + this.script = expression; this.fieldNames = fieldNames; this.combiner = compileScript(script); } diff --git a/common/src/main/java/com/metamx/druid/aggregation/LongSumAggregatorFactory.java b/common/src/main/java/com/metamx/druid/aggregation/LongSumAggregatorFactory.java index a32887c2b68..75e59b39e2c 100644 --- a/common/src/main/java/com/metamx/druid/aggregation/LongSumAggregatorFactory.java +++ b/common/src/main/java/com/metamx/druid/aggregation/LongSumAggregatorFactory.java @@ -45,7 +45,7 @@ public class LongSumAggregatorFactory implements AggregatorFactory ) { this.name = name; - this.fieldName = fieldName.toLowerCase(); + this.fieldName = fieldName; } @Override diff --git a/common/src/main/java/com/metamx/druid/aggregation/MaxAggregatorFactory.java b/common/src/main/java/com/metamx/druid/aggregation/MaxAggregatorFactory.java index cd5aaf336db..85a5466ec53 100644 --- a/common/src/main/java/com/metamx/druid/aggregation/MaxAggregatorFactory.java +++ b/common/src/main/java/com/metamx/druid/aggregation/MaxAggregatorFactory.java @@ -45,7 +45,7 @@ public class MaxAggregatorFactory implements AggregatorFactory ) { this.name = name; - this.fieldName = fieldName.toLowerCase(); + this.fieldName = fieldName; } @Override diff --git a/common/src/main/java/com/metamx/druid/aggregation/MinAggregatorFactory.java b/common/src/main/java/com/metamx/druid/aggregation/MinAggregatorFactory.java index ee6e4d25a01..49b2ad154c9 100644 --- a/common/src/main/java/com/metamx/druid/aggregation/MinAggregatorFactory.java +++ b/common/src/main/java/com/metamx/druid/aggregation/MinAggregatorFactory.java @@ -45,7 +45,7 @@ public class MinAggregatorFactory implements AggregatorFactory ) { this.name = name; - this.fieldName = fieldName.toLowerCase(); + this.fieldName = fieldName; } @Override diff --git a/common/src/main/java/com/metamx/druid/input/InputRow.java b/common/src/main/java/com/metamx/druid/input/InputRow.java index bc94173374c..c08965c340e 100644 --- a/common/src/main/java/com/metamx/druid/input/InputRow.java +++ b/common/src/main/java/com/metamx/druid/input/InputRow.java @@ -22,8 +22,25 @@ package com.metamx.druid.input; import java.util.List; /** + * An InputRow is the interface definition of an event being input into the data ingestion layer. + * + * An InputRow is a Row with a self-describing list of the dimensions available. This list is used to + * implement "schema-less" data ingestion that allows the system to add new dimensions as they appear. + * + * Note, Druid is a case-insensitive system for parts of schema (column names), this has direct implications + * for the implementation of InputRows and Rows. The case-insensitiveness is implemented by lowercasing all + * schema elements before looking them up, this means that calls to getDimension() and getFloatMetric() will + * have all lowercase column names passed in no matter what is returned from getDimensions or passed in as the + * fieldName of an AggregatorFactory. Implementations of InputRow and Row should expect to get values back + * in all lowercase form (i.e. they should either have already turned everything into lowercase or they + * should operate in a case-insensitive manner). */ public interface InputRow extends Row { + /** + * Returns the dimensions that exist in this row. + * + * @return the dimensions that exist in this row. + */ public List getDimensions(); } diff --git a/common/src/main/java/com/metamx/druid/input/Row.java b/common/src/main/java/com/metamx/druid/input/Row.java index 6f6f3f2c17c..add4623d498 100644 --- a/common/src/main/java/com/metamx/druid/input/Row.java +++ b/common/src/main/java/com/metamx/druid/input/Row.java @@ -25,6 +25,16 @@ import org.codehaus.jackson.annotate.JsonTypeInfo; import java.util.List; /** + * A Row of data. This can be used for both input and output into various parts of the system. It assumes + * that the user already knows the schema of the row and can query for the parts that they care about. + * + * Note, Druid is a case-insensitive system for parts of schema (column names), this has direct implications + * for the implementation of InputRows and Rows. The case-insensitiveness is implemented by lowercasing all + * schema elements before looking them up, this means that calls to getDimension() and getFloatMetric() will + * have all lowercase column names passed in no matter what is returned from getDimensions or passed in as the + * fieldName of an AggregatorFactory. Implementations of InputRow and Row should expect to get values back + * in all lowercase form (i.e. they should either have already turned everything into lowercase or they + * should operate in a case-insensitive manner). */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "version") @JsonSubTypes(value = { @@ -32,7 +42,31 @@ import java.util.List; }) public interface Row { + /** + * Returns the timestamp from the epoch in milliseconds. If the event happened _right now_, this would return the + * same thing as System.currentTimeMillis(); + * + * @return the timestamp from the epoch in milliseconds. + */ public long getTimestampFromEpoch(); + + /** + * Returns the list of dimension values for the given column name. + * + * Column names are always all lowercase in order to support case-insensitive schemas. + * + * @param dimension the lowercase column name of the dimension requested + * @return the list of values for the provided column name + */ public List getDimension(String dimension); + + /** + * Returns the float value of the given metric column. + * + * Column names are always all lowercase in order to support case-insensitive schemas. + * + * @param metric the lowercase column name of the metric requested + * @return the float value for the provided column name. + */ public float getFloatMetric(String metric); } diff --git a/examples/rand/src/main/java/druid/examples/RandomFirehoseFactory.java b/examples/rand/src/main/java/druid/examples/RandomFirehoseFactory.java index 0fdd1bbb9b2..6b7e1fc4a18 100644 --- a/examples/rand/src/main/java/druid/examples/RandomFirehoseFactory.java +++ b/examples/rand/src/main/java/druid/examples/RandomFirehoseFactory.java @@ -162,8 +162,8 @@ public class RandomFirehoseFactory implements FirehoseFactory private final Runnable commitRunnable = new Runnable() { public void run() {} }; private final java.util.Random rand = (seed == 0L) ? new Random() : new Random(seed); private final LinkedList dimensions = new LinkedList(); - private final boolean placeholderForAdd = dimensions.add("inColumn".toLowerCase()); - private final boolean placeholderForAdd2 = dimensions.add("target".toLowerCase()); + private final boolean placeholderForAdd = dimensions.add("inColumn"); + private final boolean placeholderForAdd2 = dimensions.add("target"); private final Map theMap = new HashMap(2); private long rowCount = 0L; @@ -200,7 +200,7 @@ public class RandomFirehoseFactory implements FirehoseFactory } } rowCount++; - theMap.put("inColumn".toLowerCase(), anotherRand((int)nth)); + theMap.put("inColumn", anotherRand((int)nth)); theMap.put("target", ("a" + nth)); return new MapBasedInputRow(System.currentTimeMillis(), dimensions, theMap); } diff --git a/index-common/pom.xml b/index-common/pom.xml index 2e467448a2f..162c232fed5 100644 --- a/index-common/pom.xml +++ b/index-common/pom.xml @@ -79,4 +79,19 @@ test + + + + + maven-jar-plugin + + + + test-jar + + + + + + \ No newline at end of file diff --git a/index-common/src/main/java/com/metamx/druid/index/v1/CompressedLongsIndexedSupplier.java b/index-common/src/main/java/com/metamx/druid/index/v1/CompressedLongsIndexedSupplier.java index b76a21176e1..2f37d393763 100644 --- a/index-common/src/main/java/com/metamx/druid/index/v1/CompressedLongsIndexedSupplier.java +++ b/index-common/src/main/java/com/metamx/druid/index/v1/CompressedLongsIndexedSupplier.java @@ -59,6 +59,11 @@ public class CompressedLongsIndexedSupplier implements Supplier this.baseLongBuffers = baseLongBuffers; } + public int size() + { + return totalSize; + } + @Override public IndexedLongs get() { diff --git a/index-common/src/main/java/com/metamx/druid/index/v1/IncrementalIndex.java b/index-common/src/main/java/com/metamx/druid/index/v1/IncrementalIndex.java index 3b05c022cfb..126df67a242 100644 --- a/index-common/src/main/java/com/metamx/druid/index/v1/IncrementalIndex.java +++ b/index-common/src/main/java/com/metamx/druid/index/v1/IncrementalIndex.java @@ -22,6 +22,7 @@ package com.metamx.druid.index.v1; import com.google.common.base.Function; import com.google.common.collect.BiMap; import com.google.common.collect.HashBiMap; +import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; @@ -52,6 +53,7 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.NavigableSet; import java.util.Set; import java.util.concurrent.ConcurrentNavigableMap; import java.util.concurrent.ConcurrentSkipListMap; @@ -65,18 +67,19 @@ public class IncrementalIndex implements Iterable private final long minTimestamp; private final QueryGranularity gran; - final AggregatorFactory[] metrics; + private final AggregatorFactory[] metrics; private final Map metricIndexes; private final Map metricTypes; - final LinkedHashMap dimensionOrder; - final CopyOnWriteArrayList dimensions; - final DimensionHolder dimValues; - final ConcurrentSkipListMap facts; + private final ImmutableList metricNames; + private final LinkedHashMap dimensionOrder; + private final CopyOnWriteArrayList dimensions; + private final DimensionHolder dimValues; + private final ConcurrentSkipListMap facts; private volatile int numEntries = 0; - // This is modified by the same thread. + // This is modified on add() by a (hopefully) single thread. private InputRow in; public IncrementalIndex( @@ -89,12 +92,16 @@ public class IncrementalIndex implements Iterable this.gran = gran; this.metrics = metrics; + final ImmutableList.Builder metricNamesBuilder = ImmutableList.builder(); final ImmutableMap.Builder metricIndexesBuilder = ImmutableMap.builder(); final ImmutableMap.Builder metricTypesBuilder = ImmutableMap.builder(); for (int i = 0; i < metrics.length; i++) { - metricIndexesBuilder.put(metrics[i].getName().toLowerCase(), i); - metricTypesBuilder.put(metrics[i].getName().toLowerCase(), metrics[i].getTypeName()); + final String metricName = metrics[i].getName().toLowerCase(); + metricNamesBuilder.add(metricName); + metricIndexesBuilder.put(metricName, i); + metricTypesBuilder.put(metricName, metrics[i].getTypeName()); } + metricNames = metricNamesBuilder.build(); metricIndexes = metricIndexesBuilder.build(); metricTypes = metricTypesBuilder.build(); @@ -109,7 +116,7 @@ public class IncrementalIndex implements Iterable * Adds a new row. The row might correspond with another row that already exists, in which case this will * update that row instead of inserting a new one. *

- * This is *not* thread-safe. Calls to add() must be serialized externally + * This is *not* thread-safe. Calls to add() should always happen on the same thread. * * @param row the row of data to add * @@ -126,6 +133,8 @@ public class IncrementalIndex implements Iterable List overflow = null; for (String dimension : rowDimensions) { + dimension = dimension.toLowerCase(); + final Integer index = dimensionOrder.get(dimension); if (index == null) { dimensionOrder.put(dimension, dimensionOrder.size()); @@ -134,9 +143,9 @@ public class IncrementalIndex implements Iterable if (overflow == null) { overflow = Lists.newArrayList(); } - overflow.add(getDimVals(row, dimension)); + overflow.add(getDimVals(dimValues.add(dimension), row.getDimension(dimension))); } else { - dims[index] = getDimVals(row, dimension); + dims[index] = getDimVals(dimValues.get(dimension), row.getDimension(dimension)); } } @@ -163,8 +172,9 @@ public class IncrementalIndex implements Iterable new MetricSelectorFactory() { @Override - public FloatMetricSelector makeFloatMetricSelector(final String metricName) + public FloatMetricSelector makeFloatMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); return new FloatMetricSelector() { @Override @@ -176,7 +186,7 @@ public class IncrementalIndex implements Iterable } @Override - public ComplexMetricSelector makeComplexMetricSelector(final String metricName) + public ComplexMetricSelector makeComplexMetricSelector(final String metric) { final String typeName = agg.getTypeName(); final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName); @@ -186,6 +196,7 @@ public class IncrementalIndex implements Iterable } final ComplexMetricExtractor extractor = serde.getExtractor(); + final String metricName = metric.toLowerCase(); return new ComplexMetricSelector() { @@ -227,20 +238,18 @@ public class IncrementalIndex implements Iterable return numEntries; } - private long getMinTimeMillis() + public long getMinTimeMillis() { return facts.firstKey().getTimestamp(); } - private long getMaxTimeMillis() + public long getMaxTimeMillis() { return facts.lastKey().getTimestamp(); } - private String[] getDimVals(InputRow row, String dimension) + private String[] getDimVals(final DimDim dimLookup, final List dimValues) { - final DimDim dimLookup = dimValues.getOrAdd(dimension); - final List dimValues = row.getDimension(dimension); final String[] retVal = new String[dimValues.size()]; int count = 0; @@ -309,11 +318,21 @@ public class IncrementalIndex implements Iterable return dimensionOrder.get(dimension); } + List getMetricNames() + { + return metricNames; + } + Integer getMetricIndex(String metricName) { return metricIndexes.get(metricName); } + ConcurrentSkipListMap getFacts() + { + return facts; + } + ConcurrentNavigableMap getSubMap(TimeAndDims start, TimeAndDims end) { return facts.subMap(start, end); @@ -366,13 +385,16 @@ public class IncrementalIndex implements Iterable dimensions.clear(); } - DimDim getOrAdd(String dimension) + DimDim add(String dimension) { DimDim holder = dimensions.get(dimension); if (holder == null) { holder = new DimDim(); dimensions.put(dimension, holder); } + else { + throw new ISE("dimension[%s] already existed even though add() was called!?", dimension); + } return holder; } diff --git a/index-common/src/main/java/com/metamx/druid/indexer/data/CSVDataSpec.java b/index-common/src/main/java/com/metamx/druid/indexer/data/CSVDataSpec.java index 72a683095b9..05613567b87 100644 --- a/index-common/src/main/java/com/metamx/druid/indexer/data/CSVDataSpec.java +++ b/index-common/src/main/java/com/metamx/druid/indexer/data/CSVDataSpec.java @@ -19,17 +19,12 @@ package com.metamx.druid.indexer.data; -import com.google.common.base.Function; -import com.google.common.base.Joiner; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import com.metamx.common.parsers.CSVParser; import com.metamx.common.parsers.Parser; -import com.metamx.common.parsers.ToLowerCaseParser; import org.codehaus.jackson.annotate.JsonCreator; import org.codehaus.jackson.annotate.JsonProperty; -import javax.annotation.Nullable; import java.util.List; /** @@ -45,33 +40,13 @@ public class CSVDataSpec implements DataSpec @JsonProperty("dimensions") List dimensions ) { - Preconditions.checkNotNull(columns); - Preconditions.checkArgument( - !Joiner.on("_").join(columns).contains(","), "Columns must not have commas in them" - ); + Preconditions.checkNotNull(columns, "columns"); + for (String column : columns) { + Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column); + } - this.columns = Lists.transform( - columns, - new Function() - { - @Override - public String apply(@Nullable String input) - { - return input.toLowerCase(); - } - } - ); - this.dimensions = (dimensions == null) ? dimensions : Lists.transform( - dimensions, - new Function() - { - @Override - public String apply(@Nullable String input) - { - return input.toLowerCase(); - } - } - ); + this.columns = columns; + this.dimensions = dimensions; } @JsonProperty("columns") @@ -102,8 +77,8 @@ public class CSVDataSpec implements DataSpec } @Override - public Parser getParser() + public Parser getParser() { - return new ToLowerCaseParser(new CSVParser(columns)); + return new CSVParser(columns); } } diff --git a/index-common/src/main/java/com/metamx/druid/indexer/data/DelimitedDataSpec.java b/index-common/src/main/java/com/metamx/druid/indexer/data/DelimitedDataSpec.java index 0cad130da33..d372d46696f 100644 --- a/index-common/src/main/java/com/metamx/druid/indexer/data/DelimitedDataSpec.java +++ b/index-common/src/main/java/com/metamx/druid/indexer/data/DelimitedDataSpec.java @@ -19,17 +19,12 @@ package com.metamx.druid.indexer.data; -import com.google.common.base.Function; -import com.google.common.base.Joiner; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import com.metamx.common.parsers.DelimitedParser; import com.metamx.common.parsers.Parser; -import com.metamx.common.parsers.ToLowerCaseParser; import org.codehaus.jackson.annotate.JsonCreator; import org.codehaus.jackson.annotate.JsonProperty; -import javax.annotation.Nullable; import java.util.List; /** @@ -48,33 +43,13 @@ public class DelimitedDataSpec implements DataSpec ) { Preconditions.checkNotNull(columns); - Preconditions.checkArgument( - !Joiner.on("_").join(columns).contains(","), "Columns must not have commas in them" - ); + for (String column : columns) { + Preconditions.checkArgument(!column.contains(","), "Column[%s] has a comma, it cannot", column); + } this.delimiter = (delimiter == null) ? DelimitedParser.DEFAULT_DELIMITER : delimiter; - this.columns = Lists.transform( - columns, - new Function() - { - @Override - public String apply(@Nullable String input) - { - return input.toLowerCase(); - } - } - ); - this.dimensions = (dimensions == null) ? dimensions : Lists.transform( - dimensions, - new Function() - { - @Override - public String apply(@Nullable String input) - { - return input.toLowerCase(); - } - } - ); + this.columns = columns; + this.dimensions = dimensions; } @JsonProperty("delimiter") @@ -111,10 +86,10 @@ public class DelimitedDataSpec implements DataSpec } @Override - public Parser getParser() + public Parser getParser() { - Parser retVal = new DelimitedParser(delimiter); + Parser retVal = new DelimitedParser(delimiter); retVal.setFieldNames(columns); - return new ToLowerCaseParser(retVal); + return retVal; } } diff --git a/index-common/src/main/java/com/metamx/druid/indexer/data/JSONDataSpec.java b/index-common/src/main/java/com/metamx/druid/indexer/data/JSONDataSpec.java index 4c5f3d45444..11228bc132f 100644 --- a/index-common/src/main/java/com/metamx/druid/indexer/data/JSONDataSpec.java +++ b/index-common/src/main/java/com/metamx/druid/indexer/data/JSONDataSpec.java @@ -19,14 +19,10 @@ package com.metamx.druid.indexer.data; -import com.google.common.base.Function; -import com.google.common.collect.Lists; import com.metamx.common.parsers.JSONParser; import com.metamx.common.parsers.Parser; -import com.metamx.common.parsers.ToLowerCaseParser; import org.codehaus.jackson.annotate.JsonProperty; -import javax.annotation.Nullable; import java.util.List; /** @@ -39,17 +35,7 @@ public class JSONDataSpec implements DataSpec @JsonProperty("dimensions") List dimensions ) { - this.dimensions = (dimensions == null) ? dimensions : Lists.transform( - dimensions, - new Function() - { - @Override - public String apply(@Nullable String input) - { - return input.toLowerCase(); - } - } - ); + this.dimensions = dimensions; } @JsonProperty("dimensions") @@ -71,8 +57,8 @@ public class JSONDataSpec implements DataSpec } @Override - public Parser getParser() + public Parser getParser() { - return new ToLowerCaseParser(new JSONParser()); + return new JSONParser(); } } diff --git a/index-common/src/main/java/com/metamx/druid/indexer/data/StringInputRowParser.java b/index-common/src/main/java/com/metamx/druid/indexer/data/StringInputRowParser.java index 29d12c202d0..4c5e2dcd9bb 100644 --- a/index-common/src/main/java/com/metamx/druid/indexer/data/StringInputRowParser.java +++ b/index-common/src/main/java/com/metamx/druid/indexer/data/StringInputRowParser.java @@ -23,6 +23,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.metamx.common.exception.FormattedException; import com.metamx.common.parsers.Parser; +import com.metamx.common.parsers.ToLowerCaseParser; import com.metamx.druid.input.InputRow; import com.metamx.druid.input.MapBasedInputRow; import org.codehaus.jackson.annotate.JsonCreator; @@ -59,7 +60,7 @@ public class StringInputRowParser } this.dimensionExclusions.add(timestampSpec.getTimestampColumn()); - this.parser = dataSpec.getParser(); + this.parser = new ToLowerCaseParser(dataSpec.getParser()); } public StringInputRowParser addDimensionExclusion(String dimension) diff --git a/index-common/src/main/java/com/metamx/druid/indexer/data/ToLowercaseDataSpec.java b/index-common/src/main/java/com/metamx/druid/indexer/data/ToLowercaseDataSpec.java new file mode 100644 index 00000000000..2156314b4c6 --- /dev/null +++ b/index-common/src/main/java/com/metamx/druid/indexer/data/ToLowercaseDataSpec.java @@ -0,0 +1,70 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package com.metamx.druid.indexer.data; + +import com.metamx.common.parsers.Parser; +import com.metamx.common.parsers.ToLowerCaseParser; +import org.codehaus.jackson.annotate.JsonValue; + +import java.util.List; + +/** + */ +public class ToLowercaseDataSpec implements DataSpec +{ + private final DataSpec delegate; + + public ToLowercaseDataSpec( + DataSpec delegate + ) + { + this.delegate = delegate; + } + + @Override + public void verify(List usedCols) + { + delegate.verify(usedCols); + } + + @Override + public boolean hasCustomDimensions() + { + return delegate.hasCustomDimensions(); + } + + @Override + public List getDimensions() + { + return delegate.getDimensions(); + } + + @Override + public Parser getParser() + { + return new ToLowerCaseParser(delegate.getParser()); + } + + @JsonValue + public DataSpec getDelegate() + { + return delegate; + } +} diff --git a/index-common/src/test/java/com/metamx/druid/index/v1/IncrementalIndexTest.java b/index-common/src/test/java/com/metamx/druid/index/v1/IncrementalIndexTest.java new file mode 100644 index 00000000000..17f089ab3ee --- /dev/null +++ b/index-common/src/test/java/com/metamx/druid/index/v1/IncrementalIndexTest.java @@ -0,0 +1,81 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package com.metamx.druid.index.v1; + +import com.google.common.collect.ImmutableMap; +import com.metamx.druid.QueryGranularity; +import com.metamx.druid.aggregation.AggregatorFactory; +import com.metamx.druid.input.MapBasedInputRow; +import com.metamx.druid.input.Row; +import junit.framework.Assert; +import org.junit.Test; + +import java.util.Arrays; +import java.util.Iterator; + +/** + */ +public class IncrementalIndexTest +{ + + @Test + public void testCaseInsensitivity() throws Exception + { + final long timestamp = System.currentTimeMillis(); + + IncrementalIndex index = createCaseInsensitiveIndex(timestamp); + + Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensions()); + Assert.assertEquals(2, index.size()); + + final Iterator rows = index.iterator(); + Row row = rows.next(); + Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); + Assert.assertEquals(Arrays.asList("1"), row.getDimension("dim1")); + Assert.assertEquals(Arrays.asList("2"), row.getDimension("dim2")); + + row = rows.next(); + Assert.assertEquals(timestamp, row.getTimestampFromEpoch()); + Assert.assertEquals(Arrays.asList("3"), row.getDimension("dim1")); + Assert.assertEquals(Arrays.asList("4"), row.getDimension("dim2")); + } + + public static IncrementalIndex createCaseInsensitiveIndex(long timestamp) + { + IncrementalIndex index = new IncrementalIndex(0L, QueryGranularity.NONE, new AggregatorFactory[]{}); + + index.add( + new MapBasedInputRow( + timestamp, + Arrays.asList("Dim1", "DiM2"), + ImmutableMap.of("dim1", "1", "dim2", "2", "DIM1", "3", "dIM2", "4") + ) + ); + + index.add( + new MapBasedInputRow( + timestamp, + Arrays.asList("diM1", "dIM2"), + ImmutableMap.of("Dim1", "1", "DiM2", "2", "dim1", "3", "dim2", "4") + ) + ); + return index; + } +} diff --git a/server/src/test/resources/druid.sample.tsv b/index-common/src/test/resources/druid.sample.tsv similarity index 100% rename from server/src/test/resources/druid.sample.tsv rename to index-common/src/test/resources/druid.sample.tsv diff --git a/server/src/test/resources/druid.sample.tsv.bottom b/index-common/src/test/resources/druid.sample.tsv.bottom similarity index 100% rename from server/src/test/resources/druid.sample.tsv.bottom rename to index-common/src/test/resources/druid.sample.tsv.bottom diff --git a/server/src/test/resources/druid.sample.tsv.top b/index-common/src/test/resources/druid.sample.tsv.top similarity index 100% rename from server/src/test/resources/druid.sample.tsv.top rename to index-common/src/test/resources/druid.sample.tsv.top diff --git a/indexer/src/main/java/com/metamx/druid/indexer/HadoopDruidIndexerConfig.java b/indexer/src/main/java/com/metamx/druid/indexer/HadoopDruidIndexerConfig.java index 51244685f77..be4b09fd9c6 100644 --- a/indexer/src/main/java/com/metamx/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexer/src/main/java/com/metamx/druid/indexer/HadoopDruidIndexerConfig.java @@ -36,6 +36,7 @@ import com.metamx.common.logger.Logger; import com.metamx.druid.RegisteringNode; import com.metamx.druid.index.v1.serde.Registererer; import com.metamx.druid.indexer.data.DataSpec; +import com.metamx.druid.indexer.data.ToLowercaseDataSpec; import com.metamx.druid.indexer.granularity.GranularitySpec; import com.metamx.druid.indexer.granularity.UniformGranularitySpec; import com.metamx.druid.indexer.path.PathSpec; @@ -219,7 +220,7 @@ public class HadoopDruidIndexerConfig public void setTimestampColumnName(String timestampColumnName) { - this.timestampColumnName = timestampColumnName.toLowerCase(); + this.timestampColumnName = timestampColumnName; } @JsonProperty() @@ -241,7 +242,7 @@ public class HadoopDruidIndexerConfig public void setDataSpec(DataSpec dataSpec) { - this.dataSpec = dataSpec; + this.dataSpec = new ToLowercaseDataSpec(dataSpec); } @Deprecated @@ -326,7 +327,7 @@ public class HadoopDruidIndexerConfig public void setPartitionDimension(String partitionDimension) { - this.partitionDimension = (partitionDimension == null) ? partitionDimension : partitionDimension.toLowerCase(); + this.partitionDimension = (partitionDimension == null) ? partitionDimension : partitionDimension; } public boolean partitionByDimension() diff --git a/merger/src/main/java/com/metamx/druid/merger/common/task/DeleteTask.java b/merger/src/main/java/com/metamx/druid/merger/common/task/DeleteTask.java index 3127a5c5c90..5f08298cd2d 100644 --- a/merger/src/main/java/com/metamx/druid/merger/common/task/DeleteTask.java +++ b/merger/src/main/java/com/metamx/druid/merger/common/task/DeleteTask.java @@ -76,12 +76,7 @@ public class DeleteTask extends AbstractTask { // Strategy: Create an empty segment covering the interval to be deleted final IncrementalIndex empty = new IncrementalIndex(0, QueryGranularity.NONE, new AggregatorFactory[0]); - final IndexableAdapter emptyAdapter = new IncrementalIndexAdapter( - this.getInterval(), - empty, - new ArrayList(), - new ArrayList() - ); + final IndexableAdapter emptyAdapter = new IncrementalIndexAdapter(this.getInterval(), empty); // Create DataSegment final DataSegment segment = diff --git a/merger/src/main/java/com/metamx/druid/merger/coordinator/exec/TaskConsumer.java b/merger/src/main/java/com/metamx/druid/merger/coordinator/exec/TaskConsumer.java index be2bca53fbb..c655a152852 100644 --- a/merger/src/main/java/com/metamx/druid/merger/coordinator/exec/TaskConsumer.java +++ b/merger/src/main/java/com/metamx/druid/merger/coordinator/exec/TaskConsumer.java @@ -106,7 +106,7 @@ public class TaskConsumer implements Runnable catch (Exception e) { log.makeAlert(e, "Failed to hand off task") .addData("task", task.getId()) - .addData("type", task.getType().toString().toLowerCase()) + .addData("type", task.getType().toString()) .addData("dataSource", task.getDataSource()) .addData("interval", task.getInterval()) .emit(); @@ -136,7 +136,7 @@ public class TaskConsumer implements Runnable ); final ServiceMetricEvent.Builder builder = new ServiceMetricEvent.Builder() .setUser2(task.getDataSource()) - .setUser4(task.getType().toString().toLowerCase()) + .setUser4(task.getType().toString()) .setUser5(task.getInterval().toString()); // Run preflight checks @@ -233,7 +233,7 @@ public class TaskConsumer implements Runnable bytes += segment.getSize(); } - builder.setUser3(status.getStatusCode().toString().toLowerCase()); + builder.setUser3(status.getStatusCode().toString()); emitter.emit(builder.build("indexer/time/run/millis", status.getDuration())); emitter.emit(builder.build("indexer/segment/count", status.getSegments().size())); @@ -245,7 +245,7 @@ public class TaskConsumer implements Runnable String.format("Failed to index: %s", task.getDataSource()), ImmutableMap.builder() .put("task", task.getId()) - .put("type", task.getType().toString().toLowerCase()) + .put("type", task.getType().toString()) .put("dataSource", task.getDataSource()) .put("interval", task.getInterval()) .build() diff --git a/pom.xml b/pom.xml index 2e8efcee5bd..75ff91e97f7 100644 --- a/pom.xml +++ b/pom.xml @@ -41,11 +41,11 @@ - server - client common - indexer index-common + client + indexer + server merger realtime examples diff --git a/server/pom.xml b/server/pom.xml index af659c2ac35..eed7d78371c 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -216,6 +216,13 @@ test-jar test + + com.metamx.druid + druid-index-common + ${project.parent.version} + test-jar + test + diff --git a/server/src/main/java/com/metamx/druid/index/brita/AndFilter.java b/server/src/main/java/com/metamx/druid/index/brita/AndFilter.java index 5132cd7bf8e..2de8823d9ca 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/AndFilter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/AndFilter.java @@ -38,7 +38,7 @@ public class AndFilter implements Filter } @Override - public ImmutableConciseSet goConcise(InvertedIndexSelector selector) + public ImmutableConciseSet goConcise(BitmapIndexSelector selector) { if (filters.size() == 1) { return filters.get(0).goConcise(selector); diff --git a/server/src/main/java/com/metamx/druid/index/brita/InvertedIndexSelector.java b/server/src/main/java/com/metamx/druid/index/brita/BitmapIndexSelector.java similarity index 83% rename from server/src/main/java/com/metamx/druid/index/brita/InvertedIndexSelector.java rename to server/src/main/java/com/metamx/druid/index/brita/BitmapIndexSelector.java index 5361951075a..d2e73353a1b 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/InvertedIndexSelector.java +++ b/server/src/main/java/com/metamx/druid/index/brita/BitmapIndexSelector.java @@ -19,17 +19,14 @@ package com.metamx.druid.index.brita; -import com.metamx.druid.index.v1.processing.Offset; import com.metamx.druid.kv.Indexed; import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet; /** */ -public interface InvertedIndexSelector +public interface BitmapIndexSelector { public Indexed getDimensionValues(String dimension); public int getNumRows(); - public int[] getInvertedIndex(String dimension, String value); public ImmutableConciseSet getConciseInvertedIndex(String dimension, String value); - public Offset getInvertedIndexOffset(String dimension, String value); } diff --git a/server/src/main/java/com/metamx/druid/index/brita/DimensionPredicateFilter.java b/server/src/main/java/com/metamx/druid/index/brita/DimensionPredicateFilter.java index e588a8441d1..69c636ac6a1 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/DimensionPredicateFilter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/DimensionPredicateFilter.java @@ -43,7 +43,7 @@ class DimensionPredicateFilter implements Filter } @Override - public ImmutableConciseSet goConcise(final InvertedIndexSelector selector) + public ImmutableConciseSet goConcise(final BitmapIndexSelector selector) { return ImmutableConciseSet.union( FunctionalIterable.create(selector.getDimensionValues(dimension)) diff --git a/server/src/main/java/com/metamx/druid/index/brita/ExtractionFilter.java b/server/src/main/java/com/metamx/druid/index/brita/ExtractionFilter.java index 7eeb106936f..883e19226e8 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/ExtractionFilter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/ExtractionFilter.java @@ -47,7 +47,7 @@ public class ExtractionFilter implements Filter this.fn = fn; } - private List makeFilters(InvertedIndexSelector selector) + private List makeFilters(BitmapIndexSelector selector) { final Indexed allDimVals = selector.getDimensionValues(dimension); final List filters = Lists.newArrayList(); @@ -63,7 +63,7 @@ public class ExtractionFilter implements Filter } @Override - public ImmutableConciseSet goConcise(InvertedIndexSelector selector) + public ImmutableConciseSet goConcise(BitmapIndexSelector selector) { return new OrFilter(makeFilters(selector)).goConcise(selector); } diff --git a/server/src/main/java/com/metamx/druid/index/brita/Filter.java b/server/src/main/java/com/metamx/druid/index/brita/Filter.java index 36a22468471..fcfde0be18a 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/Filter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/Filter.java @@ -25,6 +25,6 @@ import it.uniroma3.mat.extendedset.intset.ImmutableConciseSet; */ public interface Filter { - public ImmutableConciseSet goConcise(InvertedIndexSelector selector); + public ImmutableConciseSet goConcise(BitmapIndexSelector selector); public ValueMatcher makeMatcher(ValueMatcherFactory factory); } diff --git a/server/src/main/java/com/metamx/druid/index/brita/NotFilter.java b/server/src/main/java/com/metamx/druid/index/brita/NotFilter.java index 4c3646fae91..d4d335add0c 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/NotFilter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/NotFilter.java @@ -35,7 +35,7 @@ public class NotFilter implements Filter } @Override - public ImmutableConciseSet goConcise(InvertedIndexSelector selector) + public ImmutableConciseSet goConcise(BitmapIndexSelector selector) { return ImmutableConciseSet.complement( baseFilter.goConcise(selector), diff --git a/server/src/main/java/com/metamx/druid/index/brita/OrFilter.java b/server/src/main/java/com/metamx/druid/index/brita/OrFilter.java index 3de948682a9..c9312b0b6f1 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/OrFilter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/OrFilter.java @@ -42,7 +42,7 @@ public class OrFilter implements Filter } @Override - public ImmutableConciseSet goConcise(InvertedIndexSelector selector) + public ImmutableConciseSet goConcise(BitmapIndexSelector selector) { if (filters.size() == 1) { return filters.get(0).goConcise(selector); diff --git a/server/src/main/java/com/metamx/druid/index/brita/SelectorFilter.java b/server/src/main/java/com/metamx/druid/index/brita/SelectorFilter.java index 6533a847d0c..5f711c63817 100644 --- a/server/src/main/java/com/metamx/druid/index/brita/SelectorFilter.java +++ b/server/src/main/java/com/metamx/druid/index/brita/SelectorFilter.java @@ -39,7 +39,7 @@ public class SelectorFilter implements Filter } @Override - public ImmutableConciseSet goConcise(InvertedIndexSelector selector) + public ImmutableConciseSet goConcise(BitmapIndexSelector selector) { return selector.getConciseInvertedIndex(dimension, value); } diff --git a/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexAdapter.java b/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexAdapter.java index d975770f560..3da50e3eaea 100644 --- a/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexAdapter.java +++ b/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexAdapter.java @@ -24,6 +24,7 @@ import com.google.common.collect.Maps; import com.metamx.common.guava.FunctionalIterable; import com.metamx.common.logger.Logger; import com.metamx.druid.aggregation.Aggregator; +import com.metamx.druid.input.Row; import com.metamx.druid.kv.EmptyIndexedInts; import com.metamx.druid.kv.Indexed; import com.metamx.druid.kv.IndexedInts; @@ -46,39 +47,36 @@ public class IncrementalIndexAdapter implements IndexableAdapter private final Interval dataInterval; private final IncrementalIndex index; - private final List dimensions; - private final List metrics; private final Map> invertedIndexes; public IncrementalIndexAdapter( - Interval dataInterval, IncrementalIndex index, List dimensions, List metrics + Interval dataInterval, IncrementalIndex index ) { this.dataInterval = dataInterval; this.index = index; - this.dimensions = dimensions; - this.metrics = metrics; this.invertedIndexes = Maps.newHashMap(); - for (String dimension : dimensions) { + for (String dimension : index.getDimensions()) { invertedIndexes.put(dimension, Maps.newHashMap()); } int rowNum = 0; - for (IncrementalIndex.TimeAndDims timeAndDims : index.facts.keySet()) { + for (Row row : index) { + + } + for (IncrementalIndex.TimeAndDims timeAndDims : index.getFacts().keySet()) { final String[][] dims = timeAndDims.getDims(); - for (String dimension : dimensions) { - if (index.dimensionOrder == null || invertedIndexes == null) { - log.error("wtf, dimensionOrder and indvertedIndexes are null"); - } - int dimIndex = index.dimensionOrder.get(dimension); + for (String dimension : index.getDimensions()) { + int dimIndex = index.getDimensionIndex(dimension); Map conciseSets = invertedIndexes.get(dimension); if (conciseSets == null || dims == null) { log.error("conciseSets and dims are null!"); + continue; } if (dimIndex >= dims.length || dims[dimIndex] == null) { continue; @@ -120,19 +118,19 @@ public class IncrementalIndexAdapter implements IndexableAdapter @Override public Indexed getAvailableDimensions() { - return new ListIndexed(dimensions, String.class); + return new ListIndexed(index.getDimensions(), String.class); } @Override public Indexed getAvailableMetrics() { - return new ListIndexed(metrics, String.class); + return new ListIndexed(index.getMetricNames(), String.class); } @Override public Indexed getDimValueLookup(String dimension) { - final IncrementalIndex.DimDim dimDim = index.dimValues.get(dimension); + final IncrementalIndex.DimDim dimDim = index.getDimension(dimension); dimDim.sort(); return new Indexed() @@ -173,7 +171,7 @@ public class IncrementalIndexAdapter implements IndexableAdapter public Iterable getRows() { return FunctionalIterable - .create(index.facts.entrySet()) + .create(index.getFacts().entrySet()) .transform( new Function, Rowboat>() { @@ -189,9 +187,9 @@ public class IncrementalIndexAdapter implements IndexableAdapter final Aggregator[] aggs = input.getValue(); int[][] dims = new int[dimValues.length][]; - for (String dimension : dimensions) { - int dimIndex = index.dimensionOrder.get(dimension); - final IncrementalIndex.DimDim dimDim = index.dimValues.get(dimension); + for (String dimension : index.getDimensions()) { + int dimIndex = index.getDimensionIndex(dimension); + final IncrementalIndex.DimDim dimDim = index.getDimension(dimension); dimDim.sort(); if (dimIndex >= dimValues.length || dimValues[dimIndex] == null) { diff --git a/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexStorageAdapter.java b/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexStorageAdapter.java index d2486aa585f..153ef923042 100644 --- a/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexStorageAdapter.java +++ b/server/src/main/java/com/metamx/druid/index/v1/IncrementalIndexStorageAdapter.java @@ -85,7 +85,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter @Override public int getDimensionCardinality(String dimension) { - IncrementalIndex.DimDim dimDim = index.getDimension(dimension); + IncrementalIndex.DimDim dimDim = index.getDimension(dimension.toLowerCase()); if (dimDim == null) { return 0; } @@ -232,8 +232,9 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter } @Override - public DimensionSelector makeDimensionSelector(String dimensionName) + public DimensionSelector makeDimensionSelector(String dimension) { + final String dimensionName = dimension.toLowerCase(); final IncrementalIndex.DimDim dimValLookup = index.getDimension(dimensionName); if (dimValLookup == null) { return null; @@ -303,8 +304,9 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter } @Override - public FloatMetricSelector makeFloatMetricSelector(String metricName) + public FloatMetricSelector makeFloatMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); final Integer metricIndexInt = index.getMetricIndex(metricName); if (metricIndexInt == null) { return new FloatMetricSelector() @@ -330,8 +332,9 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter } @Override - public ComplexMetricSelector makeComplexMetricSelector(String metricName) + public ComplexMetricSelector makeComplexMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); final Integer metricIndexInt = index.getMetricIndex(metricName); if (metricIndexInt == null) { return null; @@ -385,7 +388,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter String[] tmpDimensionNames = new String[dimensions.size()]; int i = 0; for (String dimension : dimensions) { - Integer dimIndex = index.getDimensionIndex(dimension); + Integer dimIndex = index.getDimensionIndex(dimension.toLowerCase()); if (dimIndex != null) { tmpDimensionNames[i] = dimension; tmpDimensionIndexes[i] = dimIndex; @@ -495,11 +498,11 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter @Override public ValueMatcher makeValueMatcher(String dimension, String value) { - Integer dimIndexObject = index.getDimensionIndex(dimension); + Integer dimIndexObject = index.getDimensionIndex(dimension.toLowerCase()); if (dimIndexObject == null) { return new BooleanValueMatcher(false); } - String idObject = index.getDimension(dimension).get(value); + String idObject = index.getDimension(dimension.toLowerCase()).get(value); if (idObject == null) { return new BooleanValueMatcher(false); } @@ -530,7 +533,7 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter @Override public ValueMatcher makeValueMatcher(String dimension, final Predicate predicate) { - Integer dimIndexObject = index.getDimensionIndex(dimension); + Integer dimIndexObject = index.getDimensionIndex(dimension.toLowerCase()); if (dimIndexObject == null) { return new BooleanValueMatcher(false); } diff --git a/server/src/main/java/com/metamx/druid/index/v1/IndexMerger.java b/server/src/main/java/com/metamx/druid/index/v1/IndexMerger.java index 5e6987ccb6f..23c6dbb6842 100644 --- a/server/src/main/java/com/metamx/druid/index/v1/IndexMerger.java +++ b/server/src/main/java/com/metamx/druid/index/v1/IndexMerger.java @@ -112,8 +112,8 @@ public class IndexMerger final IncrementalIndex index, final Interval dataInterval, File outDir, ProgressIndicator progress ) throws IOException { - final long firstTimestamp = index.facts.firstKey().getTimestamp(); - final long lastTimestamp = index.facts.lastKey().getTimestamp(); + final long firstTimestamp = index.getMinTime().getMillis(); + final long lastTimestamp = index.getMaxTime().getMillis(); if (!(dataInterval.contains(firstTimestamp) && dataInterval.contains(lastTimestamp))) { throw new IAE( "interval[%s] does not encapsulate the full range of timestamps[%s, %s]", @@ -130,26 +130,10 @@ public class IndexMerger throw new ISE("Can only persist to directories, [%s] wasn't a directory", outDir); } - final List dimensions = Lists.transform( - Lists.newArrayList(index.dimensionOrder.keySet()), - new Function() - { - @Override - public String apply(@Nullable String input) - { - return input.toLowerCase(); - } - } - ); - final List metrics = Lists.newArrayListWithCapacity(index.metrics.length); - for (int i = 0; i < index.metrics.length; ++i) { - metrics.add(index.metrics[i].getName().toLowerCase()); - } - log.info("Starting persist for interval[%s], rows[%,d]", dataInterval, index.size()); return merge( - Arrays.asList(new IncrementalIndexAdapter(dataInterval, index, dimensions, metrics)), - index.metrics, + Arrays.asList(new IncrementalIndexAdapter(dataInterval, index)), + index.getMetricAggs(), outDir, progress ); diff --git a/server/src/main/java/com/metamx/druid/index/v1/IndexStorageAdapter.java b/server/src/main/java/com/metamx/druid/index/v1/IndexStorageAdapter.java index 8d4c7691ee4..2d41ce338aa 100644 --- a/server/src/main/java/com/metamx/druid/index/v1/IndexStorageAdapter.java +++ b/server/src/main/java/com/metamx/druid/index/v1/IndexStorageAdapter.java @@ -31,8 +31,8 @@ import com.metamx.common.logger.Logger; import com.metamx.druid.BaseStorageAdapter; import com.metamx.druid.Capabilities; import com.metamx.druid.QueryGranularity; +import com.metamx.druid.index.brita.BitmapIndexSelector; import com.metamx.druid.index.brita.Filter; -import com.metamx.druid.index.brita.InvertedIndexSelector; import com.metamx.druid.index.v1.processing.ArrayBasedOffset; import com.metamx.druid.index.v1.processing.Cursor; import com.metamx.druid.index.v1.processing.DimensionSelector; @@ -143,7 +143,7 @@ public class IndexStorageAdapter extends BaseStorageAdapter baseOffset = new ArrayBasedOffset(ids, intervalStartAndEnd.lhs); } else { baseOffset = new StartLimitedOffset( - new ConciseOffset(filter.goConcise(new IndexBasedInvertedIndexSelector(index))), + new ConciseOffset(filter.goConcise(new IndexBasedBitmapIndexSelector(index))), intervalStartAndEnd.lhs ); } @@ -199,8 +199,9 @@ public class IndexStorageAdapter extends BaseStorageAdapter } @Override - public DimensionSelector makeDimensionSelector(final String dimensionName) + public DimensionSelector makeDimensionSelector(String dimension) { + final String dimensionName = dimension.toLowerCase(); final String[] nameLookup = index.reverseDimLookup.get(dimensionName); if (nameLookup == null) { return null; @@ -242,9 +243,10 @@ public class IndexStorageAdapter extends BaseStorageAdapter } @Override - public FloatMetricSelector makeFloatMetricSelector(String metricName) + public FloatMetricSelector makeFloatMetricSelector(String metric) { - IndexedFloats cachedFloats = (IndexedFloats) metricHolderCache.get(metricName); + String metricName = metric.toLowerCase(); + IndexedFloats cachedFloats = (IndexedFloats) metricHolderCache.get(metric); if (cachedFloats == null) { MetricHolder holder = index.metricVals.get(metricName); if (holder == null) { @@ -274,8 +276,9 @@ public class IndexStorageAdapter extends BaseStorageAdapter } @Override - public ComplexMetricSelector makeComplexMetricSelector(String metricName) + public ComplexMetricSelector makeComplexMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); Indexed cachedComplex = (Indexed) metricHolderCache.get(metricName); if (cachedComplex == null) { MetricHolder holder = index.metricVals.get(metricName); @@ -335,23 +338,21 @@ public class IndexStorageAdapter extends BaseStorageAdapter @Override public Indexed getDimValueLookup(String dimension) { - return new ListIndexed(Lists.newArrayList(index.dimIdLookup.get(dimension).keySet()), String.class); + return new ListIndexed( + Lists.newArrayList(index.dimIdLookup.get(dimension.toLowerCase()).keySet()), String.class + ); } @Override public ImmutableConciseSet getInvertedIndex(String dimension, String dimVal) { - return index.getInvertedIndex(dimension, dimVal); + return index.getInvertedIndex(dimension.toLowerCase(), dimVal); } @Override public Offset getFilterOffset(Filter filter) { - return new ConciseOffset( - filter.goConcise( - new IndexBasedInvertedIndexSelector(index) - ) - ); + return new ConciseOffset(filter.goConcise(new IndexBasedBitmapIndexSelector(index))); } @Override @@ -459,11 +460,11 @@ public class IndexStorageAdapter extends BaseStorageAdapter } } - private static class IndexBasedInvertedIndexSelector implements InvertedIndexSelector + private static class IndexBasedBitmapIndexSelector implements BitmapIndexSelector { private final Index index; - public IndexBasedInvertedIndexSelector(final Index index) + public IndexBasedBitmapIndexSelector(final Index index) { this.index = index; } @@ -473,7 +474,7 @@ public class IndexStorageAdapter extends BaseStorageAdapter { return new Indexed() { - private final String[] dimVals = index.reverseDimLookup.get(dimension); + private final String[] dimVals = index.reverseDimLookup.get(dimension.toLowerCase()); @Override public Class getClazz() @@ -513,22 +514,10 @@ public class IndexStorageAdapter extends BaseStorageAdapter return index.timeOffsets.length; } - @Override - public int[] getInvertedIndex(String dimension, String value) - { - throw new UnsupportedOperationException(); - } - - @Override - public Offset getInvertedIndexOffset(String dimension, String value) - { - return new ArrayBasedOffset(getInvertedIndex(dimension, value)); - } - @Override public ImmutableConciseSet getConciseInvertedIndex(String dimension, String value) { - return index.getInvertedIndex(dimension, value); + return index.getInvertedIndex(dimension.toLowerCase(), value); } } } diff --git a/server/src/main/java/com/metamx/druid/index/v1/MMappedIndexStorageAdapter.java b/server/src/main/java/com/metamx/druid/index/v1/MMappedIndexStorageAdapter.java index 537f5cd55fb..61d2d895fc5 100644 --- a/server/src/main/java/com/metamx/druid/index/v1/MMappedIndexStorageAdapter.java +++ b/server/src/main/java/com/metamx/druid/index/v1/MMappedIndexStorageAdapter.java @@ -30,8 +30,8 @@ import com.metamx.common.guava.FunctionalIterator; import com.metamx.druid.BaseStorageAdapter; import com.metamx.druid.Capabilities; import com.metamx.druid.QueryGranularity; +import com.metamx.druid.index.brita.BitmapIndexSelector; import com.metamx.druid.index.brita.Filter; -import com.metamx.druid.index.brita.InvertedIndexSelector; import com.metamx.druid.index.v1.processing.Cursor; import com.metamx.druid.index.v1.processing.DimensionSelector; import com.metamx.druid.index.v1.processing.Offset; @@ -77,7 +77,7 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter @Override public int getDimensionCardinality(String dimension) { - final Indexed dimValueLookup = index.getDimValueLookup(dimension); + final Indexed dimValueLookup = index.getDimValueLookup(dimension.toLowerCase()); if (dimValueLookup == null) { return 0; } @@ -127,7 +127,7 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter if (filter == null) { iterable = new NoFilterCursorIterable(index, actualInterval, gran); } else { - Offset offset = new ConciseOffset(filter.goConcise(new MMappedInvertedIndexSelector(index))); + Offset offset = new ConciseOffset(filter.goConcise(new MMappedBitmapIndexSelector(index))); iterable = new CursorIterable(index, actualInterval, gran, offset); } @@ -144,13 +144,13 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter @Override public Indexed getDimValueLookup(String dimension) { - return index.getDimValueLookup(dimension); + return index.getDimValueLookup(dimension.toLowerCase()); } @Override public ImmutableConciseSet getInvertedIndex(String dimension, String dimVal) { - return index.getInvertedIndex(dimension, dimVal); + return index.getInvertedIndex(dimension.toLowerCase(), dimVal); } @Override @@ -158,7 +158,7 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter { return new ConciseOffset( filter.goConcise( - new MMappedInvertedIndexSelector(index) + new MMappedBitmapIndexSelector(index) ) ); } @@ -241,8 +241,9 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } @Override - public DimensionSelector makeDimensionSelector(final String dimensionName) + public DimensionSelector makeDimensionSelector(String dimension) { + final String dimensionName = dimension; final Indexed rowVals = index.getDimColumn(dimensionName); final Indexed dimValueLookup = index.getDimValueLookup(dimensionName); @@ -280,8 +281,9 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } @Override - public FloatMetricSelector makeFloatMetricSelector(String metricName) + public FloatMetricSelector makeFloatMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); IndexedFloats cachedMetricVals = (IndexedFloats) metricHolderCache.get(metricName); if (cachedMetricVals == null) { @@ -315,8 +317,9 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } @Override - public ComplexMetricSelector makeComplexMetricSelector(String metricName) + public ComplexMetricSelector makeComplexMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); Indexed cachedMetricVals = (Indexed) metricHolderCache.get(metricName); if (cachedMetricVals == null) { @@ -490,8 +493,9 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } @Override - public DimensionSelector makeDimensionSelector(final String dimensionName) + public DimensionSelector makeDimensionSelector(final String dimension) { + final String dimensionName = dimension.toLowerCase(); final Indexed rowVals = index.getDimColumn(dimensionName); final Indexed dimValueLookup = index.getDimValueLookup(dimensionName); @@ -529,8 +533,9 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } @Override - public FloatMetricSelector makeFloatMetricSelector(String metricName) + public FloatMetricSelector makeFloatMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); IndexedFloats cachedMetricVals = (IndexedFloats) metricCacheMap.get(metricName); if (cachedMetricVals == null) { @@ -566,8 +571,9 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } @Override - public ComplexMetricSelector makeComplexMetricSelector(String metricName) + public ComplexMetricSelector makeComplexMetricSelector(String metric) { + final String metricName = metric.toLowerCase(); Indexed cachedMetricVals = (Indexed) metricCacheMap.get(metricName); if (cachedMetricVals == null) { @@ -625,11 +631,11 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter } } - private static class MMappedInvertedIndexSelector implements InvertedIndexSelector + private static class MMappedBitmapIndexSelector implements BitmapIndexSelector { private final MMappedIndex index; - public MMappedInvertedIndexSelector(final MMappedIndex index) + public MMappedBitmapIndexSelector(final MMappedIndex index) { this.index = index; } @@ -637,7 +643,7 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter @Override public Indexed getDimensionValues(String dimension) { - return index.getDimValueLookup(dimension); + return index.getDimValueLookup(dimension.toLowerCase()); } @Override @@ -646,22 +652,10 @@ public class MMappedIndexStorageAdapter extends BaseStorageAdapter return index.getReadOnlyTimestamps().size(); } - @Override - public int[] getInvertedIndex(String dimension, String value) - { - throw new UnsupportedOperationException(); - } - - @Override - public Offset getInvertedIndexOffset(String dimension, String value) - { - return new ConciseOffset(index.getInvertedIndex(dimension, value)); - } - @Override public ImmutableConciseSet getConciseInvertedIndex(String dimension, String value) { - return index.getInvertedIndex(dimension, value); + return index.getInvertedIndex(dimension.toLowerCase(), value); } } } diff --git a/server/src/test/java/com/metamx/druid/index/v1/EmptyIndexTest.java b/server/src/test/java/com/metamx/druid/index/v1/EmptyIndexTest.java index 26faef365fa..01dd32a3097 100644 --- a/server/src/test/java/com/metamx/druid/index/v1/EmptyIndexTest.java +++ b/server/src/test/java/com/metamx/druid/index/v1/EmptyIndexTest.java @@ -45,12 +45,7 @@ public class EmptyIndexTest tmpDir.deleteOnExit(); IncrementalIndex emptyIndex = new IncrementalIndex(0, QueryGranularity.NONE, new AggregatorFactory[0]); - IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter( - new Interval("2012-08-01/P3D"), - emptyIndex, - new ArrayList(), - new ArrayList() - ); + IncrementalIndexAdapter emptyIndexAdapter = new IncrementalIndexAdapter(new Interval("2012-08-01/P3D"), emptyIndex); IndexMerger.merge(Lists.newArrayList(emptyIndexAdapter), new AggregatorFactory[0], tmpDir); MMappedIndex emptyIndexMMapped = IndexIO.mapDir(tmpDir); diff --git a/server/src/test/java/com/metamx/druid/index/v1/IndexMergerTest.java b/server/src/test/java/com/metamx/druid/index/v1/IndexMergerTest.java new file mode 100644 index 00000000000..c3afa5eb94e --- /dev/null +++ b/server/src/test/java/com/metamx/druid/index/v1/IndexMergerTest.java @@ -0,0 +1,113 @@ +/* + * Druid - a distributed column store. + * Copyright (C) 2012 Metamarkets Group Inc. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License + * as published by the Free Software Foundation; either version 2 + * of the License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + */ + +package com.metamx.druid.index.v1; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.io.Files; +import com.metamx.druid.QueryGranularity; +import com.metamx.druid.aggregation.AggregatorFactory; +import com.metamx.druid.input.MapBasedInputRow; +import junit.framework.Assert; +import org.apache.commons.io.FileUtils; +import org.junit.Test; + +import java.io.File; +import java.util.Arrays; + +/** + */ +public class IndexMergerTest +{ + @Test + public void testPersistCaseInsensitive() throws Exception + { + final long timestamp = System.currentTimeMillis(); + IncrementalIndex toPersist = IncrementalIndexTest.createCaseInsensitiveIndex(timestamp); + + final File tempDir = Files.createTempDir(); + try { + MMappedIndex index = IndexIO.mapDir(IndexMerger.persist(toPersist, tempDir)); + + Assert.assertEquals(2, index.getTimestamps().size()); + Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index.getAvailableDimensions())); + Assert.assertEquals(0, index.getAvailableMetrics().size()); + } + finally { + tempDir.delete(); + } + } + + @Test + public void testPersistMergeCaseInsensitive() throws Exception + { + final long timestamp = System.currentTimeMillis(); + IncrementalIndex toPersist1 = IncrementalIndexTest.createCaseInsensitiveIndex(timestamp); + + IncrementalIndex toPersist2 = new IncrementalIndex(0L, QueryGranularity.NONE, new AggregatorFactory[]{}); + + toPersist2.add( + new MapBasedInputRow( + timestamp, + Arrays.asList("DIm1", "DIM2"), + ImmutableMap.of("dim1", "1", "dim2", "2", "DIm1", "10000", "DIM2", "100000000") + ) + ); + + toPersist2.add( + new MapBasedInputRow( + timestamp, + Arrays.asList("dIM1", "dIm2"), + ImmutableMap.of("DIm1", "1", "DIM2", "2", "dim1", "5", "dim2", "6") + ) + ); + + + final File tempDir1 = Files.createTempDir(); + final File tempDir2 = Files.createTempDir(); + final File mergedDir = Files.createTempDir(); + try { + MMappedIndex index1 = IndexIO.mapDir(IndexMerger.persist(toPersist1, tempDir1)); + + Assert.assertEquals(2, index1.getTimestamps().size()); + Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index1.getAvailableDimensions())); + Assert.assertEquals(0, index1.getAvailableMetrics().size()); + + MMappedIndex index2 = IndexIO.mapDir(IndexMerger.persist(toPersist2, tempDir2)); + + Assert.assertEquals(2, index2.getTimestamps().size()); + Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(index2.getAvailableDimensions())); + Assert.assertEquals(0, index2.getAvailableMetrics().size()); + + MMappedIndex merged = IndexIO.mapDir( + IndexMerger.mergeMMapped(Arrays.asList(index1, index2), new AggregatorFactory[]{}, mergedDir) + ); + + Assert.assertEquals(3, merged.getTimestamps().size()); + Assert.assertEquals(Arrays.asList("dim1", "dim2"), Lists.newArrayList(merged.getAvailableDimensions())); + Assert.assertEquals(0, merged.getAvailableMetrics().size()); + } + finally { + FileUtils.deleteQuietly(tempDir1); + FileUtils.deleteQuietly(tempDir2); + FileUtils.deleteQuietly(mergedDir); + } + } +} diff --git a/server/src/test/java/com/metamx/druid/index/v1/TestIndex.java b/server/src/test/java/com/metamx/druid/index/v1/TestIndex.java index 67e653daa0d..2a2013b68fc 100644 --- a/server/src/test/java/com/metamx/druid/index/v1/TestIndex.java +++ b/server/src/test/java/com/metamx/druid/index/v1/TestIndex.java @@ -19,16 +19,21 @@ package com.metamx.druid.index.v1; +import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Maps; import com.google.common.io.CharStreams; import com.google.common.io.Closeables; +import com.google.common.io.InputSupplier; import com.google.common.io.LineProcessor; import com.google.common.primitives.Ints; import com.metamx.common.ISE; import com.metamx.common.logger.Logger; +import com.metamx.common.parsers.DelimitedParser; +import com.metamx.common.parsers.Parser; +import com.metamx.common.parsers.ToLowerCaseParser; import com.metamx.druid.QueryGranularity; import com.metamx.druid.aggregation.AggregatorFactory; import com.metamx.druid.aggregation.DoubleSumAggregatorFactory; @@ -36,7 +41,11 @@ import com.metamx.druid.client.RangeIterable; import com.metamx.druid.guava.GuavaUtils; import com.metamx.druid.index.v1.serde.ComplexMetricSerde; import com.metamx.druid.index.v1.serde.ComplexMetrics; +import com.metamx.druid.indexer.data.DelimitedDataSpec; +import com.metamx.druid.indexer.data.StringInputRowParser; +import com.metamx.druid.indexer.data.TimestampSpec; import com.metamx.druid.input.InputRow; +import com.metamx.druid.input.MapBasedInputRow; import com.metamx.druid.kv.ArrayIndexed; import com.metamx.druid.kv.Indexed; import com.metamx.druid.kv.IndexedFloats; @@ -50,6 +59,7 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.net.URL; import java.nio.ByteOrder; import java.nio.FloatBuffer; @@ -71,8 +81,9 @@ public class TestIndex private static MMappedIndex mmappedIndex = null; private static MMappedIndex mergedRealtime = null; - public static final String[] DIMENSIONS = new String[]{"provider", "quality", "placement", "placementish"}; - public static final String[] METRICS = new String[]{"index"}; + public static final String[] COLUMNS = new String[]{"ts", "provider", "quALIty", "plAcEmEnT", "pLacementish", "iNdEx"}; + public static final String[] DIMENSIONS = new String[]{"provider", "quALIty", "plAcEmEnT", "pLacementish"}; + public static final String[] METRICS = new String[]{"iNdEx"}; public static final Map dimIds = Maps.uniqueIndex( new RangeIterable(4), new Function() @@ -310,7 +321,9 @@ public class TestIndex Arrays.asList( com.metamx.druid.index.v1.IndexIO.mapDir(topFile), com.metamx.druid.index.v1.IndexIO.mapDir(bottomFile) - ), METRIC_AGGS, mergedFile + ), + METRIC_AGGS, + mergedFile ) ); @@ -324,9 +337,8 @@ public class TestIndex private static IncrementalIndex makeRealtimeIndex(final String resourceFilename) { - URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename); - String filename = resource.getFile(); - log.info("Realtime loading index file[%s]", filename); + final URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename); + log.info("Realtime loading index file[%s]", resource); final IncrementalIndex retVal = new IncrementalIndex( new DateTime("2011-01-12T00:00:00.000Z").getMillis(), QueryGranularity.NONE, METRIC_AGGS @@ -336,9 +348,24 @@ public class TestIndex int lineCount; try { lineCount = CharStreams.readLines( - GuavaUtils.joinFiles(new File(filename)), + CharStreams.newReaderSupplier( + new InputSupplier() + { + @Override + public InputStream getInput() throws IOException + { + return resource.openStream(); + } + }, + Charsets.UTF_8 + ), new LineProcessor() { + StringInputRowParser parser = new StringInputRowParser( + new TimestampSpec("ts", "iso"), + new DelimitedDataSpec("\t", Arrays.asList(COLUMNS), Arrays.asList(DIMENSIONS)), + Arrays.asList() + ); boolean runOnce = false; int lineCount = 0; @@ -352,35 +379,7 @@ public class TestIndex final String[] splits = line.split("\t"); - retVal.add( - new InputRow() - { - @Override - public long getTimestampFromEpoch() - { - return new DateTime(splits[0]).getMillis(); - } - - @Override - public List getDimensions() - { - return Arrays.asList(DIMENSIONS); - } - - @Override - public List getDimension(String dimension) - { - return Arrays.asList(splits[dimIds.get(dimension) + 1].split("\u0001")); - } - - @Override - public float getFloatMetric(String metric) - { - Preconditions.checkArgument(METRICS[0].equals(metric), "WTF!?"); - return Float.parseFloat(splits[5]); - } - } - ); + retVal.add(parser.parse(line)); ++lineCount; return true; diff --git a/server/src/test/java/com/metamx/druid/query/QueryRunnerTestHelper.java b/server/src/test/java/com/metamx/druid/query/QueryRunnerTestHelper.java index 6ded490829e..110ba41f562 100644 --- a/server/src/test/java/com/metamx/druid/query/QueryRunnerTestHelper.java +++ b/server/src/test/java/com/metamx/druid/query/QueryRunnerTestHelper.java @@ -54,7 +54,7 @@ public class QueryRunnerTestHelper public static final String dataSource = "testing"; public static final QueryGranularity gran = QueryGranularity.DAY; public static final QueryGranularity allGran = QueryGranularity.ALL; - public static final String providerDimension = "provider"; + public static final String providerDimension = "proVider"; public static final String qualityDimension = "quality"; public static final String placementishDimension = "placementish"; public static final String indexMetric = "index"; diff --git a/server/src/test/java/com/metamx/druid/query/search/SearchQueryRunnerTest.java b/server/src/test/java/com/metamx/druid/query/search/SearchQueryRunnerTest.java index f4d6f62ff1c..0802a99bb9a 100644 --- a/server/src/test/java/com/metamx/druid/query/search/SearchQueryRunnerTest.java +++ b/server/src/test/java/com/metamx/druid/query/search/SearchQueryRunnerTest.java @@ -21,6 +21,7 @@ package com.metamx.druid.query.search; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.metamx.common.guava.Sequences; import com.metamx.druid.Druids; @@ -42,6 +43,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.TreeMap; /** */ @@ -75,12 +77,12 @@ public class SearchQueryRunnerTest .query("a") .build(); - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put( QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive", "mezzanine", "travel", "health", "entertainment") ); - expectedResults.put(QueryRunnerTestHelper.providerDimension, Sets.newHashSet("total_market")); + expectedResults.put(QueryRunnerTestHelper.providerDimension.toLowerCase(), Sets.newHashSet("total_market")); expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("a")); checkSearchQuery(searchQuery, expectedResults); @@ -96,7 +98,7 @@ public class SearchQueryRunnerTest .query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve"), null)) .build(); - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive")); checkSearchQuery(searchQuery, expectedResults); @@ -129,7 +131,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithDimension2() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put(QueryRunnerTestHelper.providerDimension, new HashSet(Arrays.asList("total_market"))); checkSearchQuery( @@ -147,7 +149,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithDimensions1() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.putAll( ImmutableMap.>of( QueryRunnerTestHelper.qualityDimension, @@ -167,7 +169,12 @@ public class SearchQueryRunnerTest Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) - .dimensions(Arrays.asList(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.providerDimension)) + .dimensions( + Arrays.asList( + QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.providerDimension + ) + ) .intervals(QueryRunnerTestHelper.fullOnInterval) .query("a") .build(), @@ -178,14 +185,19 @@ public class SearchQueryRunnerTest @Test public void testSearchWithDimensions2() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put(QueryRunnerTestHelper.providerDimension, new HashSet(Arrays.asList("total_market"))); checkSearchQuery( Druids.newSearchQueryBuilder() .dataSource(QueryRunnerTestHelper.dataSource) .granularity(QueryRunnerTestHelper.allGran) - .dimensions(Arrays.asList(QueryRunnerTestHelper.placementishDimension, QueryRunnerTestHelper.providerDimension)) + .dimensions( + Arrays.asList( + QueryRunnerTestHelper.placementishDimension, + QueryRunnerTestHelper.providerDimension + ) + ) .intervals(QueryRunnerTestHelper.fullOnInterval) .query("mark") .build(), @@ -196,7 +208,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithSingleFilter1() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put( QueryRunnerTestHelper.qualityDimension, new HashSet(Arrays.asList("automotive")) ); @@ -217,7 +229,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithSingleFilter2() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put(QueryRunnerTestHelper.providerDimension, new HashSet(Arrays.asList("total_market"))); checkSearchQuery( @@ -236,7 +248,7 @@ public class SearchQueryRunnerTest @Test public void testSearchMultiAndFilter() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet(Arrays.asList("automotive"))); DimFilter filter = Druids.newAndDimFilterBuilder() @@ -270,7 +282,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithMultiOrFilter() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet(Arrays.asList("automotive"))); DimFilter filter = Druids.newOrDimFilterBuilder() @@ -304,7 +316,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithEmptyResults() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); checkSearchQuery( Druids.newSearchQueryBuilder() @@ -320,7 +332,7 @@ public class SearchQueryRunnerTest @Test public void testSearchWithFilterEmptyResults() { - Map> expectedResults = new HashMap>(); + Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); DimFilter filter = Druids.newAndDimFilterBuilder() .fields( @@ -364,7 +376,10 @@ public class SearchQueryRunnerTest for (SearchHit resultValue : resultValues) { String dimension = resultValue.getDimension(); String theValue = resultValue.getValue(); - Assert.assertTrue(expectedResults.containsKey(dimension)); + Assert.assertTrue( + String.format("Result had unknown dimension[%s]", dimension), + expectedResults.containsKey(dimension) + ); Set expectedSet = expectedResults.get(dimension); Assert.assertTrue( diff --git a/server/src/test/java/com/metamx/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/server/src/test/java/com/metamx/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 97d459f8f4f..ca42f63da1f 100644 --- a/server/src/test/java/com/metamx/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/server/src/test/java/com/metamx/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -61,9 +61,7 @@ public class TimeseriesQueryRunnerTest @Parameterized.Parameters public static Collection constructorFeeder() throws IOException { - return QueryRunnerTestHelper.makeQueryRunners( - new TimeseriesQueryRunnerFactory() - ); + return QueryRunnerTestHelper.makeQueryRunners(new TimeseriesQueryRunnerFactory()); } private final QueryRunner runner;