From 0d427923c01cd27fff4a270e9e72393a3ae9cdc5 Mon Sep 17 00:00:00 2001 From: Nishant Date: Fri, 10 Jun 2016 06:19:47 +0530 Subject: [PATCH] fix caching for search results (#3119) * fix caching for search results properly read count when reading from cache. * fix NPE during merging search count and add test * Update cache key to invalidate prev results --- .../io/druid/query/search/SearchBinaryFn.java | 7 ++- .../search/SearchQueryQueryToolChest.java | 7 +-- .../query/search/SearchBinaryFnTest.java | 45 +++++++++++++++-- .../client/CachingClusteredClientTest.java | 50 +++++++++---------- 4 files changed, 76 insertions(+), 33 deletions(-) diff --git a/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java b/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java index beca47bbeed..734bdbd2260 100644 --- a/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java +++ b/processing/src/main/java/io/druid/query/search/SearchBinaryFn.java @@ -86,12 +86,17 @@ public class SearchBinaryFn continue; } if (prev.equals(searchHit)) { - if (prev.getCount() != null) { + if (prev.getCount() != null && searchHit.getCount() != null) { prev = new SearchHit( prev.getDimension(), prev.getValue(), prev.getCount() + searchHit.getCount() ); + } else { + prev = new SearchHit( + prev.getDimension(), + prev.getValue() + ); } } else { results.add(prev); diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index 0c37d478e60..e7c008b8050 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -62,7 +62,7 @@ import java.util.Map; */ public class SearchQueryQueryToolChest extends QueryToolChest, SearchQuery> { - private static final byte SEARCH_QUERY = 0x2; + private static final byte SEARCH_QUERY = 0x15; private static final TypeReference> TYPE_REFERENCE = new TypeReference>() { }; @@ -221,8 +221,9 @@ public class SearchQueryQueryToolChest extends QueryToolChest i1 = ((Iterable) o1).iterator(); + Iterator i2 = ((Iterable) o2).iterator(); while (i1.hasNext() && i2.hasNext()) { - Assert.assertEquals(i1.next(), i2.next()); + SearchHit s1 = i1.next(); + SearchHit s2 = i2.next(); + Assert.assertEquals(s1, s2); + Assert.assertEquals(s1.getCount(), s2.getCount()); } Assert.assertTrue(!i1.hasNext() && !i2.hasNext()); } @@ -336,4 +339,38 @@ public class SearchBinaryFnTest Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); assertSearchMergeResult(expected.getValue(), actual.getValue()); } + + @Test + public void testMergeCountWithNull() { + Result r1 = new Result( + currTime, + new SearchResultValue( + ImmutableList.of( + new SearchHit( + "blah", + "foo" + ) + ) + ) + ); + + Result r2 = new Result( + currTime, + new SearchResultValue( + ImmutableList.of( + new SearchHit( + "blah", + "foo", + 3 + ) + ) + ) + ); + + Result expected = r1; + + Result actual = new SearchBinaryFn(new LexicographicSearchSortSpec(), QueryGranularities.ALL, Integer.MAX_VALUE).apply(r1, r2); + Assert.assertEquals(expected.getTimestamp(), actual.getTimestamp()); + assertSearchMergeResult(expected.getValue(), actual.getValue()); + } } diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 277669ca06e..1ca49b2a1c4 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -1107,27 +1107,27 @@ public class CachingClusteredClientTest client, builder.build(), new Interval("2011-01-01/2011-01-02"), - makeSearchResults(new DateTime("2011-01-01"), "how", "howdy", "howwwwww", "howwy"), + makeSearchResults(new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4), new Interval("2011-01-02/2011-01-03"), - makeSearchResults(new DateTime("2011-01-02"), "how1", "howdy1", "howwwwww1", "howwy1"), + makeSearchResults(new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4), new Interval("2011-01-05/2011-01-10"), makeSearchResults( - new DateTime("2011-01-05"), "how2", "howdy2", "howwwwww2", "howww2", - new DateTime("2011-01-06"), "how3", "howdy3", "howwwwww3", "howww3", - new DateTime("2011-01-07"), "how4", "howdy4", "howwwwww4", "howww4", - new DateTime("2011-01-08"), "how5", "howdy5", "howwwwww5", "howww5", - new DateTime("2011-01-09"), "how6", "howdy6", "howwwwww6", "howww6" + new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), new Interval("2011-01-05/2011-01-10"), makeSearchResults( - new DateTime("2011-01-05T01"), "how2", "howdy2", "howwwwww2", "howww2", - new DateTime("2011-01-06T01"), "how3", "howdy3", "howwwwww3", "howww3", - new DateTime("2011-01-07T01"), "how4", "howdy4", "howwwwww4", "howww4", - new DateTime("2011-01-08T01"), "how5", "howdy5", "howwwwww5", "howww5", - new DateTime("2011-01-09T01"), "how6", "howdy6", "howwwwww6", "howww6" + new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ) ); @@ -1140,18 +1140,18 @@ public class CachingClusteredClientTest HashMap context = new HashMap(); TestHelper.assertExpectedResults( makeSearchResults( - new DateTime("2011-01-01"), "how", "howdy", "howwwwww", "howwy", - new DateTime("2011-01-02"), "how1", "howdy1", "howwwwww1", "howwy1", - new DateTime("2011-01-05"), "how2", "howdy2", "howwwwww2", "howww2", - new DateTime("2011-01-05T01"), "how2", "howdy2", "howwwwww2", "howww2", - new DateTime("2011-01-06"), "how3", "howdy3", "howwwwww3", "howww3", - new DateTime("2011-01-06T01"), "how3", "howdy3", "howwwwww3", "howww3", - new DateTime("2011-01-07"), "how4", "howdy4", "howwwwww4", "howww4", - new DateTime("2011-01-07T01"), "how4", "howdy4", "howwwwww4", "howww4", - new DateTime("2011-01-08"), "how5", "howdy5", "howwwwww5", "howww5", - new DateTime("2011-01-08T01"), "how5", "howdy5", "howwwwww5", "howww5", - new DateTime("2011-01-09"), "how6", "howdy6", "howwwwww6", "howww6", - new DateTime("2011-01-09T01"), "how6", "howdy6", "howwwwww6", "howww6" + new DateTime("2011-01-01"), "how", 1, "howdy", 2, "howwwwww", 3, "howwy", 4, + new DateTime("2011-01-02"), "how1", 1, "howdy1", 2, "howwwwww1", 3, "howwy1", 4, + new DateTime("2011-01-05"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + new DateTime("2011-01-05T01"), "how2", 1, "howdy2", 2, "howwwwww2", 3, "howww2", 4, + new DateTime("2011-01-06"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + new DateTime("2011-01-06T01"), "how3", 1, "howdy3", 2, "howwwwww3", 3, "howww3", 4, + new DateTime("2011-01-07"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + new DateTime("2011-01-07T01"), "how4", 1, "howdy4", 2, "howwwwww4", 3, "howww4", 4, + new DateTime("2011-01-08"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + new DateTime("2011-01-08T01"), "how5", 1, "howdy5", 2, "howwwwww5", 3, "howww5", 4, + new DateTime("2011-01-09"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4, + new DateTime("2011-01-09T01"), "how6", 1, "howdy6", 2, "howwwwww6", 3, "howww6", 4 ), runner.run( builder.intervals("2011-01-01/2011-01-10") @@ -2118,7 +2118,7 @@ public class CachingClusteredClientTest List values = Lists.newArrayList(); while (index < objects.length && !(objects[index] instanceof DateTime)) { - values.add(new SearchHit(TOP_DIM, objects[index++].toString())); + values.add(new SearchHit(TOP_DIM, objects[index++].toString(), (Integer) objects[index++])); } retVal.add(new Result<>(timestamp, new SearchResultValue(values)));