diff --git a/processing/src/main/java/io/druid/query/Druids.java b/processing/src/main/java/io/druid/query/Druids.java index 775ff99285e..8ab3bbe5454 100644 --- a/processing/src/main/java/io/druid/query/Druids.java +++ b/processing/src/main/java/io/druid/query/Druids.java @@ -17,17 +17,22 @@ package io.druid.query; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.granularity.QueryGranularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; +import io.druid.query.datasourcemetadata.DataSourceMetadataQuery; +import io.druid.query.dimension.DefaultDimensionSpec; +import io.druid.query.dimension.DimensionSpec; import io.druid.query.filter.AndDimFilter; import io.druid.query.filter.DimFilter; import io.druid.query.filter.NoopDimFilter; import io.druid.query.filter.NotDimFilter; import io.druid.query.filter.OrDimFilter; import io.druid.query.filter.SelectorDimFilter; -import io.druid.query.datasourcemetadata.DataSourceMetadataQuery; import io.druid.query.metadata.metadata.ColumnIncluderator; import io.druid.query.metadata.metadata.SegmentMetadataQuery; import io.druid.query.search.SearchResultValue; @@ -44,6 +49,7 @@ import io.druid.query.timeseries.TimeseriesQuery; import org.joda.time.DateTime; import org.joda.time.Interval; +import javax.annotation.Nullable; import java.util.List; import java.util.Map; @@ -51,6 +57,16 @@ import java.util.Map; */ public class Druids { + public static final Function DIMENSION_IDENTITY = new Function() + { + @Nullable + @Override + public DimensionSpec apply(String input) + { + return new DefaultDimensionSpec(input, input); + } + }; + private Druids() { throw new AssertionError(); @@ -60,7 +76,7 @@ public class Druids * A Builder for AndDimFilter. * * Required: fields() must be called before build() - * + * * Usage example: *

    *   AndDimFilter andDimFilter = Druids.newAndDimFilterBuilder()
@@ -104,9 +120,9 @@ public class Druids
 
   /**
    * A Builder for OrDimFilter.
-   * 
+   *
    * Required: fields() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   OrDimFilter orDimFilter = Druids.newOrDimFilterBuilder()
@@ -159,9 +175,9 @@ public class Druids
 
   /**
    * A Builder for NotDimFilter.
-   * 
+   *
    * Required: field() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   NotDimFilter notDimFilter = Druids.newNotDimFilterBuilder()
@@ -205,9 +221,9 @@ public class Druids
 
   /**
    * A Builder for SelectorDimFilter.
-   * 
+   *
    * Required: dimension() and value() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   Selector selDimFilter = Druids.newSelectorDimFilterBuilder()
@@ -284,10 +300,10 @@ public class Druids
 
   /**
    * A Builder for TimeseriesQuery.
-   * 
+   *
    * Required: dataSource(), intervals(), and aggregators() must be called before build()
    * Optional: filters(), granularity(), postAggregators(), and context() can be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
@@ -482,11 +498,11 @@ public class Druids
 
   /**
    * A Builder for SearchQuery.
-   * 
+   *
    * Required: dataSource(), intervals(), dimensions() and query() must be called before build()
-   * 
+   *
    * Optional: filters(), granularity(), and context() can be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   SearchQuery query = Druids.newSearchQueryBuilder()
@@ -506,7 +522,7 @@ public class Druids
     private QueryGranularity granularity;
     private int limit;
     private QuerySegmentSpec querySegmentSpec;
-    private List dimensions;
+    private List dimensions;
     private SearchQuerySpec querySpec;
     private Map context;
 
@@ -634,12 +650,24 @@ public class Druids
     }
 
     public SearchQueryBuilder dimensions(String d)
+    {
+      dimensions = ImmutableList.of(DIMENSION_IDENTITY.apply(d));
+      return this;
+    }
+
+    public SearchQueryBuilder dimensions(Iterable d)
+    {
+      dimensions = ImmutableList.copyOf(Iterables.transform(d, DIMENSION_IDENTITY));
+      return this;
+    }
+
+    public SearchQueryBuilder dimensions(DimensionSpec d)
     {
       dimensions = Lists.newArrayList(d);
       return this;
     }
 
-    public SearchQueryBuilder dimensions(List d)
+    public SearchQueryBuilder dimensions(List d)
     {
       dimensions = d;
       return this;
@@ -677,9 +705,9 @@ public class Druids
 
   /**
    * A Builder for TimeBoundaryQuery.
-   * 
+   *
    * Required: dataSource() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   TimeBoundaryQuery query = new MaxTimeQueryBuilder()
@@ -773,9 +801,9 @@ public class Druids
 
   /**
    * A Builder for Result.
-   * 
+   *
    * Required: timestamp() and value() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   Result<T> result = Druids.newResultBuilder()
@@ -839,9 +867,9 @@ public class Druids
 
   /**
    * A Builder for SegmentMetadataQuery.
-   * 
+   *
    * Required: dataSource(), intervals() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   SegmentMetadataQuery query = new SegmentMetadataQueryBuilder()
@@ -947,9 +975,9 @@ public class Druids
 
   /**
    * A Builder for SelectQuery.
-   * 
+   *
    * Required: dataSource(), intervals() must be called before build()
-   * 
+   *
    * Usage example:
    * 

    *   SelectQuery query = new SelectQueryBuilder()
diff --git a/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java
index a1a05de3034..422b60e37c4 100644
--- a/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java
+++ b/processing/src/main/java/io/druid/query/extraction/ExtractionFn.java
@@ -30,7 +30,8 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
     @JsonSubTypes.Type(name = "searchQuery", value = SearchQuerySpecDimExtractionFn.class),
     @JsonSubTypes.Type(name = "javascript", value = JavascriptExtractionFn.class),
     @JsonSubTypes.Type(name = "timeFormat", value = TimeFormatExtractionFn.class),
-    @JsonSubTypes.Type(name = "lookup", value = LookupExtractionFn.class)
+    @JsonSubTypes.Type(name = "lookup", value = LookupExtractionFn.class),
+    @JsonSubTypes.Type(name = "identity", value = IdentityExtractionFn.class)
 })
 /**
  * An ExtractionFn is a function that can be used to transform the values of a column (typically a dimension)
diff --git a/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
new file mode 100644
index 00000000000..0a21c1c90ea
--- /dev/null
+++ b/processing/src/main/java/io/druid/query/extraction/IdentityExtractionFn.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.extraction;
+
+import com.google.common.base.Strings;
+
+public class IdentityExtractionFn implements ExtractionFn
+{
+  private static final byte CACHE_TYPE_ID = 0x6;
+  @Override
+  public byte[] getCacheKey()
+  {
+    return new byte[]{CACHE_TYPE_ID};
+  }
+
+  @Override
+  public String apply(Object value)
+  {
+    return value == null ? null : Strings.emptyToNull(value.toString());
+  }
+
+  @Override
+  public String apply(String value)
+  {
+    return Strings.emptyToNull(value);
+  }
+
+  @Override
+  public String apply(long value)
+  {
+    return Long.toString(value);
+  }
+
+  @Override
+  public boolean preservesOrdering()
+  {
+    return true;
+  }
+
+  @Override
+  public ExtractionType getExtractionType()
+  {
+    return ExtractionType.ONE_TO_ONE;
+  }
+}
diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java
index d7dde83a657..347a976d02e 100644
--- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java
+++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java
@@ -20,10 +20,10 @@ package io.druid.query.search;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.google.common.base.Function;
 import com.google.common.base.Functions;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Ordering;
-import com.google.common.collect.Sets;
 import com.google.common.primitives.Ints;
 import com.google.inject.Inject;
 import com.metamx.common.IAE;
@@ -32,19 +32,19 @@ import com.metamx.common.guava.MergeSequence;
 import com.metamx.common.guava.Sequence;
 import com.metamx.common.guava.Sequences;
 import com.metamx.common.guava.nary.BinaryFn;
-import com.metamx.common.StringUtils;
 import com.metamx.emitter.service.ServiceMetricEvent;
 import io.druid.collections.OrderedMergeSequence;
 import io.druid.query.CacheStrategy;
+import io.druid.query.DruidMetrics;
 import io.druid.query.IntervalChunkingQueryRunnerDecorator;
 import io.druid.query.Query;
-import io.druid.query.DruidMetrics;
 import io.druid.query.QueryRunner;
 import io.druid.query.QueryToolChest;
 import io.druid.query.Result;
 import io.druid.query.ResultGranularTimestampComparator;
 import io.druid.query.ResultMergeQueryRunner;
 import io.druid.query.aggregation.MetricManipulationFn;
+import io.druid.query.dimension.DimensionSpec;
 import io.druid.query.filter.DimFilter;
 import io.druid.query.search.search.SearchHit;
 import io.druid.query.search.search.SearchQuery;
@@ -53,9 +53,9 @@ import org.joda.time.DateTime;
 
 import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
+import java.util.Collection;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
 
 /**
  */
@@ -152,16 +152,15 @@ public class SearchQueryQueryToolChest extends QueryToolChest dimensions = Sets.newTreeSet();
-        if (query.getDimensions() != null) {
-          dimensions.addAll(query.getDimensions());
-        }
+        final Collection dimensions = query.getDimensions() == null
+                                                     ? ImmutableList.of()
+                                                     : query.getDimensions();
 
         final byte[][] dimensionsBytes = new byte[dimensions.size()][];
         int dimensionsBytesSize = 0;
         int index = 0;
-        for (String dimension : dimensions) {
-          dimensionsBytes[index] = StringUtils.toUtf8(dimension);
+        for (DimensionSpec dimension : dimensions) {
+          dimensionsBytes[index] = dimension.getCacheKey();
           dimensionsBytesSize += dimensionsBytes[index].length;
           ++index;
         }
@@ -215,7 +214,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest result = (List) input;
 
             return new Result<>(
-                new DateTime(((Number)result.get(0)).longValue()),
+                new DateTime(((Number) result.get(0)).longValue()),
                 new SearchResultValue(
                     Lists.transform(
                         (List) result.get(1),
diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
index e6167813154..22443f726f3 100644
--- a/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
+++ b/processing/src/main/java/io/druid/query/search/SearchQueryRunner.java
@@ -17,8 +17,11 @@
 
 package io.druid.query.search;
 
+import com.google.common.base.Function;
+import com.google.common.base.Functions;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -31,9 +34,13 @@ import com.metamx.common.guava.Sequence;
 import com.metamx.common.guava.Sequences;
 import com.metamx.emitter.EmittingLogger;
 import io.druid.granularity.QueryGranularity;
+import io.druid.query.Druids;
 import io.druid.query.Query;
 import io.druid.query.QueryRunner;
 import io.druid.query.Result;
+import io.druid.query.dimension.DimensionSpec;
+import io.druid.query.extraction.ExtractionFn;
+import io.druid.query.extraction.IdentityExtractionFn;
 import io.druid.query.filter.Filter;
 import io.druid.query.search.search.SearchHit;
 import io.druid.query.search.search.SearchQuery;
@@ -49,6 +56,7 @@ import io.druid.segment.column.Column;
 import io.druid.segment.data.IndexedInts;
 import io.druid.segment.filter.Filters;
 
+import javax.annotation.Nullable;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
@@ -59,7 +67,6 @@ import java.util.TreeSet;
 public class SearchQueryRunner implements QueryRunner>
 {
   private static final EmittingLogger log = new EmittingLogger(SearchQueryRunner.class);
-
   private final Segment segment;
 
   public SearchQueryRunner(Segment segment)
@@ -79,44 +86,50 @@ public class SearchQueryRunner implements QueryRunner>
 
     final SearchQuery query = (SearchQuery) input;
     final Filter filter = Filters.convertDimensionFilters(query.getDimensionsFilter());
-    final List dimensions = query.getDimensions();
+    final List dimensions = query.getDimensions();
     final SearchQuerySpec searchQuerySpec = query.getQuery();
     final int limit = query.getLimit();
 
+    // Closing this will cause segfaults in unit tests.
     final QueryableIndex index = segment.asQueryableIndex();
+
     if (index != null) {
       final TreeSet retVal = Sets.newTreeSet(query.getSort().getComparator());
 
-      Iterable dimsToSearch;
+      Iterable dimsToSearch;
       if (dimensions == null || dimensions.isEmpty()) {
-        dimsToSearch = index.getAvailableDimensions();
+        dimsToSearch = Iterables.transform(index.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
       } else {
         dimsToSearch = dimensions;
       }
 
-      BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
+      final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
 
       final ImmutableBitmap baseFilter;
       if (filter == null) {
         baseFilter = bitmapFactory.complement(bitmapFactory.makeEmptyImmutableBitmap(), index.getNumRows());
       } else {
-        ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(bitmapFactory, index);
+        final ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(bitmapFactory, index);
         baseFilter = filter.getBitmapIndex(selector);
       }
 
-      for (String dimension : dimsToSearch) {
-        final Column column = index.getColumn(dimension);
+      for (DimensionSpec dimension : dimsToSearch) {
+        final Column column = index.getColumn(dimension.getDimension());
         if (column == null) {
           continue;
         }
 
         final BitmapIndex bitmapIndex = column.getBitmapIndex();
+        ExtractionFn extractionFn = dimension.getExtractionFn();
+        if (extractionFn == null) {
+          extractionFn = new IdentityExtractionFn();
+        }
         if (bitmapIndex != null) {
           for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
-            String dimVal = Strings.nullToEmpty(bitmapIndex.getValue(i));
+            String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
             if (searchQuerySpec.accept(dimVal) &&
                 bitmapFactory.intersection(Arrays.asList(baseFilter, bitmapIndex.getBitmap(i))).size() > 0) {
-              retVal.add(new SearchHit(dimension, dimVal));
+              retVal.add(new SearchHit(dimension.getOutputName(), dimVal));
               if (retVal.size() >= limit) {
                 return makeReturnResult(limit, retVal);
               }
@@ -139,9 +152,9 @@ public class SearchQueryRunner implements QueryRunner>
       );
     }
 
-    final Iterable dimsToSearch;
+    final Iterable dimsToSearch;
     if (dimensions == null || dimensions.isEmpty()) {
-      dimsToSearch = adapter.getAvailableDimensions();
+      dimsToSearch = Iterables.transform(adapter.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
     } else {
       dimsToSearch = dimensions;
     }
@@ -160,9 +173,11 @@ public class SearchQueryRunner implements QueryRunner>
             }
 
             Map dimSelectors = Maps.newHashMap();
-            for (String dim : dimsToSearch) {
-              // switching to using DimensionSpec for search would allow the use of extractionFn here.
-              dimSelectors.put(dim, cursor.makeDimensionSelector(dim, null));
+            for (DimensionSpec dim : dimsToSearch) {
+              dimSelectors.put(
+                  dim.getOutputName(),
+                  cursor.makeDimensionSelector(dim.getDimension(), dim.getExtractionFn())
+              );
             }
 
             while (!cursor.isDone()) {
diff --git a/processing/src/main/java/io/druid/query/search/search/SearchQuery.java b/processing/src/main/java/io/druid/query/search/search/SearchQuery.java
index 07d0f217d3c..4f969f60143 100644
--- a/processing/src/main/java/io/druid/query/search/search/SearchQuery.java
+++ b/processing/src/main/java/io/druid/query/search/search/SearchQuery.java
@@ -27,6 +27,7 @@ import io.druid.query.BaseQuery;
 import io.druid.query.DataSource;
 import io.druid.query.Query;
 import io.druid.query.Result;
+import io.druid.query.dimension.DimensionSpec;
 import io.druid.query.filter.DimFilter;
 import io.druid.query.search.SearchResultValue;
 import io.druid.query.spec.QuerySegmentSpec;
@@ -42,7 +43,7 @@ public class SearchQuery extends BaseQuery>
   private final DimFilter dimFilter;
   private final SearchSortSpec sortSpec;
   private final QueryGranularity granularity;
-  private final List dimensions;
+  private final List dimensions;
   private final SearchQuerySpec querySpec;
   private final int limit;
 
@@ -53,7 +54,7 @@ public class SearchQuery extends BaseQuery>
       @JsonProperty("granularity") QueryGranularity granularity,
       @JsonProperty("limit") int limit,
       @JsonProperty("intervals") QuerySegmentSpec querySegmentSpec,
-      @JsonProperty("searchDimensions") List dimensions,
+      @JsonProperty("searchDimensions") List dimensions,
       @JsonProperty("query") SearchQuerySpec querySpec,
       @JsonProperty("sort") SearchSortSpec sortSpec,
       @JsonProperty("context") Map context
@@ -64,17 +65,7 @@ public class SearchQuery extends BaseQuery>
     this.sortSpec = sortSpec == null ? new LexicographicSearchSortSpec() : sortSpec;
     this.granularity = granularity == null ? QueryGranularity.ALL : granularity;
     this.limit = (limit == 0) ? 1000 : limit;
-    this.dimensions = (dimensions == null) ? null : Lists.transform(
-        dimensions,
-        new Function()
-        {
-          @Override
-          public String apply(@Nullable String input)
-          {
-            return input;
-          }
-        }
-    );
+    this.dimensions = dimensions;
     this.querySpec = querySpec;
 
     Preconditions.checkNotNull(querySegmentSpec, "Must specify an interval");
@@ -160,7 +151,7 @@ public class SearchQuery extends BaseQuery>
   }
 
   @JsonProperty("searchDimensions")
-  public List getDimensions()
+  public List getDimensions()
   {
     return dimensions;
   }
diff --git a/processing/src/test/java/io/druid/query/dimension/DefaultDimensionSpecTest.java b/processing/src/test/java/io/druid/query/dimension/DefaultDimensionSpecTest.java
new file mode 100644
index 00000000000..339cb227050
--- /dev/null
+++ b/processing/src/test/java/io/druid/query/dimension/DefaultDimensionSpecTest.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.dimension;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.druid.jackson.DefaultObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class DefaultDimensionSpecTest
+{
+
+  private final ObjectMapper mapper = new DefaultObjectMapper();
+
+  @Test
+  public void testEqualsSerde() throws IOException
+  {
+    final String name = "foo";
+    final DimensionSpec spec = new DefaultDimensionSpec(name, name);
+    final String json = mapper.writeValueAsString(spec);
+    final DimensionSpec other = mapper.readValue(json, DimensionSpec.class);
+    Assert.assertEquals(spec.toString(), other.toString());
+    Assert.assertEquals(spec, other);
+    Assert.assertEquals(spec.hashCode(), other.hashCode());
+  }
+}
diff --git a/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java b/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java
new file mode 100644
index 00000000000..dc708b802de
--- /dev/null
+++ b/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package io.druid.query.dimension;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import io.druid.jackson.DefaultObjectMapper;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.List;
+
+public class LegacyDimensionSpecTest
+{
+  private final ObjectMapper mapper = new DefaultObjectMapper();
+
+  @Test
+  public void testEqualsSerde() throws IOException
+  {
+    final String dimension = "testDimension";
+    final List deserializedSpecs = mapper.readValue(
+        String.format("[\"%s\"]", dimension), new TypeReference>()
+        {
+        }
+    );
+    Assert.assertEquals(dimension, deserializedSpecs.get(0).getDimension());
+    Assert.assertEquals(dimension, deserializedSpecs.get(0).getOutputName());
+    Assert.assertEquals(new LegacyDimensionSpec(dimension), deserializedSpecs.get(0));
+  }
+}
diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java
index c493b9e8b3f..9005f0c398d 100644
--- a/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java
+++ b/processing/src/test/java/io/druid/query/search/SearchQueryQueryToolChestTest.java
@@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableList;
 import io.druid.granularity.QueryGranularity;
 import io.druid.jackson.DefaultObjectMapper;
 import io.druid.query.CacheStrategy;
+import io.druid.query.Druids;
 import io.druid.query.Result;
 import io.druid.query.TableDataSource;
 import io.druid.query.search.search.FragmentSearchQuerySpec;
@@ -55,7 +56,7 @@ public class SearchQueryQueryToolChestTest
                         )
                     )
                 ),
-                ImmutableList.of("dim1"),
+                ImmutableList.of(Druids.DIMENSION_IDENTITY.apply("dim1")),
                 new FragmentSearchQuerySpec(ImmutableList.of("a", "b")),
                 null,
                 null
diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
index 63e9a09a041..d72d477ec96 100644
--- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
+++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerTest.java
@@ -17,7 +17,6 @@
 
 package io.druid.query.search;
 
-import com.google.common.base.Function;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -27,7 +26,11 @@ import io.druid.query.Druids;
 import io.druid.query.QueryRunner;
 import io.druid.query.QueryRunnerTestHelper;
 import io.druid.query.Result;
+import io.druid.query.dimension.ExtractionDimensionSpec;
+import io.druid.query.extraction.LookupExtractionFn;
+import io.druid.query.extraction.MapLookupExtractor;
 import io.druid.query.filter.DimFilter;
+import io.druid.query.filter.ExtractionDimFilter;
 import io.druid.query.search.search.FragmentSearchQuerySpec;
 import io.druid.query.search.search.SearchHit;
 import io.druid.query.search.search.SearchQuery;
@@ -38,10 +41,8 @@ import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import javax.annotation.Nullable;
 import java.io.IOException;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -239,6 +240,44 @@ public class SearchQueryRunnerTest
     );
   }
 
+
+  @Test
+  public void testSearchWithExtractionFilter1()
+  {
+    final String automotiveSnowman = "automotive☃";
+    Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
+    expectedResults.put(
+        QueryRunnerTestHelper.qualityDimension, new HashSet(Arrays.asList(automotiveSnowman))
+    );
+
+
+    final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(
+      new MapLookupExtractor(ImmutableMap.of("automotive", automotiveSnowman)),
+      true,
+      null,
+      true
+    );
+
+    checkSearchQuery(
+        Druids.newSearchQueryBuilder()
+              .dataSource(QueryRunnerTestHelper.dataSource)
+              .granularity(QueryRunnerTestHelper.allGran)
+              .filters(new ExtractionDimFilter(QueryRunnerTestHelper.qualityDimension, automotiveSnowman, lookupExtractionFn, null))
+              .intervals(QueryRunnerTestHelper.fullOnInterval)
+              .dimensions(
+                  new ExtractionDimensionSpec(
+                      QueryRunnerTestHelper.qualityDimension,
+                      null,
+                      lookupExtractionFn,
+                      null
+                  )
+              )
+              .query("☃")
+              .build(),
+        expectedResults
+    );
+  }
+
   @Test
   public void testSearchWithSingleFilter1()
   {
diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryTest.java
index 1f785425cdc..ee812fa5bb0 100644
--- a/processing/src/test/java/io/druid/query/search/SearchQueryTest.java
+++ b/processing/src/test/java/io/druid/query/search/SearchQueryTest.java
@@ -22,6 +22,8 @@ import io.druid.jackson.DefaultObjectMapper;
 import io.druid.query.Druids;
 import io.druid.query.Query;
 import io.druid.query.QueryRunnerTestHelper;
+import io.druid.query.dimension.DefaultDimensionSpec;
+import io.druid.query.dimension.LegacyDimensionSpec;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -35,11 +37,11 @@ public class SearchQueryTest
   public void testQuerySerialization() throws IOException
   {
     Query query = Druids.newSearchQueryBuilder()
-        .dataSource(QueryRunnerTestHelper.dataSource)
-        .granularity(QueryRunnerTestHelper.allGran)
-        .intervals(QueryRunnerTestHelper.fullOnInterval)
-        .query("a")
-        .build();
+                        .dataSource(QueryRunnerTestHelper.dataSource)
+                        .granularity(QueryRunnerTestHelper.allGran)
+                        .intervals(QueryRunnerTestHelper.fullOnInterval)
+                        .query("a")
+                        .build();
 
     String json = jsonMapper.writeValueAsString(query);
     Query serdeQuery = jsonMapper.readValue(json, Query.class);
@@ -47,4 +49,60 @@ public class SearchQueryTest
     Assert.assertEquals(query, serdeQuery);
   }
 
+  @Test
+  public void testEquals()
+  {
+    Query query1 = Druids.newSearchQueryBuilder()
+                         .dataSource(QueryRunnerTestHelper.dataSource)
+                         .granularity(QueryRunnerTestHelper.allGran)
+                         .intervals(QueryRunnerTestHelper.fullOnInterval)
+                         .dimensions(
+                             new DefaultDimensionSpec(
+                                 QueryRunnerTestHelper.qualityDimension,
+                                 QueryRunnerTestHelper.qualityDimension
+                             )
+                         )
+                         .query("a")
+                         .build();
+    Query query2 = Druids.newSearchQueryBuilder()
+                         .dataSource(QueryRunnerTestHelper.dataSource)
+                         .granularity(QueryRunnerTestHelper.allGran)
+                         .intervals(QueryRunnerTestHelper.fullOnInterval)
+                         .dimensions(
+                             new DefaultDimensionSpec(
+                                 QueryRunnerTestHelper.qualityDimension,
+                                 QueryRunnerTestHelper.qualityDimension
+                             )
+                         )
+                         .query("a")
+                         .build();
+
+    Assert.assertEquals(query1, query2);
+  }
+
+  @Test
+  public void testSerDe() throws IOException
+  {
+    Query query = Druids.newSearchQueryBuilder()
+                        .dataSource(QueryRunnerTestHelper.dataSource)
+                        .granularity(QueryRunnerTestHelper.allGran)
+                        .intervals(QueryRunnerTestHelper.fullOnInterval)
+                        .dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.qualityDimension))
+                        .query("a")
+                        .build();
+    final String json =
+        "{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\""
+        + QueryRunnerTestHelper.qualityDimension
+        + "\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}";
+    final Query serdeQuery = jsonMapper.readValue(json, Query.class);
+    Assert.assertEquals(query.toString(), serdeQuery.toString());
+    Assert.assertEquals(query, serdeQuery);
+
+    final String json2 =
+        "{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\"quality\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}";
+    final Query serdeQuery2 = jsonMapper.readValue(json2, Query.class);
+
+    Assert.assertEquals(query.toString(), serdeQuery2.toString());
+    Assert.assertEquals(query, serdeQuery2);
+  }
 }