diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 90ee42297ca..015375c7931 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -181,9 +181,11 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest { + /* The SegmentMetadataQuery cache key may contain UTF-8 column name strings. + * Prepend 0xFF before the analysisTypes as a separator to avoid + * any potential confusion with string values. + */ + public static final byte[] ANALYSIS_TYPES_CACHE_PREFIX = new byte[]{(byte) 0xFF}; + public enum AnalysisType { CARDINALITY, @@ -43,18 +52,23 @@ public class SegmentMetadataQuery extends BaseQuery @JsonValue @Override - public String toString() { + public String toString() + { return this.name().toLowerCase(); } @JsonCreator - public static AnalysisType fromString(String name) { + public static AnalysisType fromString(String name) + { return valueOf(name.toUpperCase()); } + + public byte[] getCacheKey() + { + return new byte[]{(byte) this.ordinal()}; + } } - - public static final Interval DEFAULT_INTERVAL = new Interval( JodaUtils.MIN_INSTANT, JodaUtils.MAX_INSTANT ); @@ -67,7 +81,7 @@ public class SegmentMetadataQuery extends BaseQuery private final ColumnIncluderator toInclude; private final boolean merge; private final boolean usingDefaultInterval; - private final EnumSet analysisTypes; + private final EnumSet analysisTypes; @JsonCreator public SegmentMetadataQuery( @@ -147,6 +161,26 @@ public class SegmentMetadataQuery extends BaseQuery return analysisTypes.contains(AnalysisType.SIZE); } + public byte[] getAnalysisTypesCacheKey() + { + int size = 1; + List typeBytesList = Lists.newArrayListWithExpectedSize(analysisTypes.size()); + for (AnalysisType analysisType : analysisTypes) { + final byte[] bytes = analysisType.getCacheKey(); + typeBytesList.add(bytes); + size += bytes.length; + } + + final ByteBuffer bytes = ByteBuffer.allocate(size); + bytes.put(ANALYSIS_TYPES_CACHE_PREFIX); + for (byte[] typeBytes : typeBytesList) { + bytes.put(typeBytes); + } + + return bytes.array(); + } + + @Override public Query withOverriddenContext(Map contextOverride) { diff --git a/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java new file mode 100644 index 00000000000..1de554a69d7 --- /dev/null +++ b/processing/src/test/java/io/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -0,0 +1,90 @@ +/* +* Licensed to Metamarkets Group Inc. (Metamarkets) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. Metamarkets licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, +* software distributed under the License is distributed on an +* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +* KIND, either express or implied. See the License for the +* specific language governing permissions and limitations +* under the License. +*/ + +package io.druid.query.metadata; + + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.druid.jackson.DefaultObjectMapper; +import io.druid.query.CacheStrategy; + +import io.druid.query.TableDataSource; +import io.druid.query.metadata.metadata.ColumnAnalysis; +import io.druid.query.metadata.metadata.SegmentAnalysis; +import io.druid.query.metadata.metadata.SegmentMetadataQuery; +import io.druid.query.spec.QuerySegmentSpecs; +import io.druid.segment.column.ValueType; +import org.joda.time.Interval; +import org.junit.Assert; +import org.junit.Test; + +public class SegmentMetadataQueryQueryToolChestTest +{ + @Test + public void testCacheStrategy() throws Exception + { + SegmentMetadataQuery query = new SegmentMetadataQuery( + new TableDataSource("dummy"), + QuerySegmentSpecs.create("2015-01-01/2015-01-02"), + null, + null, + null, + null, + false + ); + + CacheStrategy strategy = + new SegmentMetadataQueryQueryToolChest(null).getCacheStrategy(query); + + // Test cache key generation + byte[] expectedKey = {0x04, 0x01, (byte) 0xFF, 0x00, 0x01}; + byte[] actualKey = strategy.computeCacheKey(query); + Assert.assertArrayEquals(expectedKey, actualKey); + + SegmentAnalysis result = new SegmentAnalysis( + "testSegment", + ImmutableList.of( + new Interval("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z") + ), + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ValueType.STRING.toString(), + 10881, + 1, + null + ) + ), 71982 + ); + + Object preparedValue = strategy.prepareForCache().apply(result); + + ObjectMapper objectMapper = new DefaultObjectMapper(); + SegmentAnalysis fromCacheValue = objectMapper.readValue( + objectMapper.writeValueAsBytes(preparedValue), + strategy.getCacheObjectClazz() + ); + + SegmentAnalysis fromCacheResult = strategy.pullFromCache().apply(fromCacheValue); + + Assert.assertEquals(result, fromCacheResult); + } +}