[Rollup] Validate timezone in range queries (#30338)

When validating the search request, we make sure any date_histogram
aggregations have timezones that match the jobs.  But we didn't
do any such validation on range queries.

While it wouldn't produce incorrect results, it would be confusing
to the user as no documents would match the aggregation (because we
add a filter clause on the timezone for the agg).

Now the user gets an exception up front, and some helpful text about
why the range query didnt match, and which timezones are acceptable
This commit is contained in:
Zachary Tong 2018-05-04 10:45:16 -07:00 committed by GitHub
parent 21bc87a65b
commit 1c0d339904
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 68 additions and 25 deletions

View File

@ -107,6 +107,10 @@ ones that the user is authorized to access in case field level security is enabl
Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions
({pull}29000[#29000]) ({pull}29000[#29000])
Rollup::
* Validate timezone in range queries to ensure they match the selected job when
searching ({pull}30338[#30338])
[float] [float]
=== Regressions === Regressions
Fail snapshot operations early when creating or deleting a snapshot on a repository that has been Fail snapshot operations early when creating or deleting a snapshot on a repository that has been
@ -167,6 +171,10 @@ Machine Learning::
* Account for gaps in data counts after job is reopened ({pull}30294[#30294]) * Account for gaps in data counts after job is reopened ({pull}30294[#30294])
Rollup::
* Validate timezone in range queries to ensure they match the selected job when
searching ({pull}30338[#30338])
//[float] //[float]
//=== Regressions //=== Regressions

View File

@ -56,10 +56,12 @@ import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.RollupField;
import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps; import org.elasticsearch.xpack.core.rollup.action.RollupJobCaps;
import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction;
import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig;
import org.elasticsearch.xpack.rollup.Rollup; import org.elasticsearch.xpack.rollup.Rollup;
import org.elasticsearch.xpack.rollup.RollupJobIdentifierUtils; import org.elasticsearch.xpack.rollup.RollupJobIdentifierUtils;
import org.elasticsearch.xpack.rollup.RollupRequestTranslator; import org.elasticsearch.xpack.rollup.RollupRequestTranslator;
import org.elasticsearch.xpack.rollup.RollupResponseTranslator; import org.elasticsearch.xpack.rollup.RollupResponseTranslator;
import org.joda.time.DateTimeZone;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -277,6 +279,7 @@ public class TransportRollupSearchAction extends TransportAction<SearchRequest,
? ((RangeQueryBuilder)builder).fieldName() ? ((RangeQueryBuilder)builder).fieldName()
: ((TermQueryBuilder)builder).fieldName(); : ((TermQueryBuilder)builder).fieldName();
List<String> incorrectTimeZones = new ArrayList<>();
List<String> rewrittenFieldName = jobCaps.stream() List<String> rewrittenFieldName = jobCaps.stream()
// We only care about job caps that have the query's target field // We only care about job caps that have the query's target field
.filter(caps -> caps.getFieldCaps().keySet().contains(fieldName)) .filter(caps -> caps.getFieldCaps().keySet().contains(fieldName))
@ -286,6 +289,24 @@ public class TransportRollupSearchAction extends TransportAction<SearchRequest,
// For now, we only allow filtering on grouping fields // For now, we only allow filtering on grouping fields
.filter(agg -> { .filter(agg -> {
String type = (String)agg.get(RollupField.AGG); String type = (String)agg.get(RollupField.AGG);
// If the cap is for a date_histo, and the query is a range, the timezones need to match
if (type.equals(DateHistogramAggregationBuilder.NAME) && builder instanceof RangeQueryBuilder) {
String timeZone = ((RangeQueryBuilder)builder).timeZone();
// Many range queries don't include the timezone because the default is UTC, but the query
// builder will return null so we need to set it here
if (timeZone == null) {
timeZone = DateTimeZone.UTC.toString();
}
boolean matchingTZ = ((String)agg.get(DateHistoGroupConfig.TIME_ZONE.getPreferredName()))
.equalsIgnoreCase(timeZone);
if (matchingTZ == false) {
incorrectTimeZones.add((String)agg.get(DateHistoGroupConfig.TIME_ZONE.getPreferredName()));
}
return matchingTZ;
}
// Otherwise just make sure it's one of the three groups
return type.equals(TermsAggregationBuilder.NAME) return type.equals(TermsAggregationBuilder.NAME)
|| type.equals(DateHistogramAggregationBuilder.NAME) || type.equals(DateHistogramAggregationBuilder.NAME)
|| type.equals(HistogramAggregationBuilder.NAME); || type.equals(HistogramAggregationBuilder.NAME);
@ -304,8 +325,14 @@ public class TransportRollupSearchAction extends TransportAction<SearchRequest,
.collect(ArrayList::new, List::addAll, List::addAll); .collect(ArrayList::new, List::addAll, List::addAll);
if (rewrittenFieldName.isEmpty()) { if (rewrittenFieldName.isEmpty()) {
throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builder.getWriteableName() if (incorrectTimeZones.isEmpty()) {
throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builder.getWriteableName()
+ "] query is not available in selected rollup indices, cannot query."); + "] query is not available in selected rollup indices, cannot query.");
} else {
throw new IllegalArgumentException("Field [" + fieldName + "] in [" + builder.getWriteableName()
+ "] query was found in rollup indices, but requested timezone is not compatible. Options include: "
+ incorrectTimeZones);
}
} }
if (rewrittenFieldName.size() > 1) { if (rewrittenFieldName.size() > 1) {

View File

@ -121,16 +121,38 @@ public class SearchActionTests extends ESTestCase {
RollupJobCaps cap = new RollupJobCaps(job.build()); RollupJobCaps cap = new RollupJobCaps(job.build());
Set<RollupJobCaps> caps = new HashSet<>(); Set<RollupJobCaps> caps = new HashSet<>();
caps.add(cap); caps.add(cap);
QueryBuilder rewritten = null; QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("UTC"), caps);
try {
rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps);
} catch (Exception e) {
fail("Should not have thrown exception when parsing query.");
}
assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp")); assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp"));
} }
public void testRangeNullTimeZone() {
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig();
group.setDateHisto(new DateHistoGroupConfig.Builder().setField("foo").setInterval(new DateHistogramInterval("1h")).build());
job.setGroupConfig(group.build());
RollupJobCaps cap = new RollupJobCaps(job.build());
Set<RollupJobCaps> caps = new HashSet<>();
caps.add(cap);
QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1), caps);
assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
assertThat(((RangeQueryBuilder)rewritten).fieldName(), equalTo("foo.date_histogram.timestamp"));
}
public void testRangeWrongTZ() {
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig();
group.setDateHisto(new DateHistoGroupConfig.Builder().setField("foo").setInterval(new DateHistogramInterval("1h")).build());
job.setGroupConfig(group.build());
RollupJobCaps cap = new RollupJobCaps(job.build());
Set<RollupJobCaps> caps = new HashSet<>();
caps.add(cap);
Exception e = expectThrows(IllegalArgumentException.class,
() -> TransportRollupSearchAction.rewriteQuery(new RangeQueryBuilder("foo").gt(1).timeZone("EST"), caps));
assertThat(e.getMessage(), equalTo("Field [foo] in [range] query was found in rollup indices, but requested timezone is not " +
"compatible. Options include: [UTC]"));
}
public void testTerms() { public void testTerms() {
RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo"); RollupJobConfig.Builder job = ConfigTestHelpers.getRollupJob("foo");
GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig(); GroupConfig.Builder group = ConfigTestHelpers.getGroupConfig();
@ -139,12 +161,7 @@ public class SearchActionTests extends ESTestCase {
RollupJobCaps cap = new RollupJobCaps(job.build()); RollupJobCaps cap = new RollupJobCaps(job.build());
Set<RollupJobCaps> caps = new HashSet<>(); Set<RollupJobCaps> caps = new HashSet<>();
caps.add(cap); caps.add(cap);
QueryBuilder rewritten = null; QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new TermQueryBuilder("foo", "bar"), caps);
try {
rewritten = TransportRollupSearchAction.rewriteQuery(new TermQueryBuilder("foo", "bar"), caps);
} catch (Exception e) {
fail("Should not have thrown exception when parsing query.");
}
assertThat(rewritten, instanceOf(TermQueryBuilder.class)); assertThat(rewritten, instanceOf(TermQueryBuilder.class));
assertThat(((TermQueryBuilder)rewritten).fieldName(), equalTo("foo.terms.value")); assertThat(((TermQueryBuilder)rewritten).fieldName(), equalTo("foo.terms.value"));
} }
@ -160,12 +177,7 @@ public class SearchActionTests extends ESTestCase {
BoolQueryBuilder builder = new BoolQueryBuilder(); BoolQueryBuilder builder = new BoolQueryBuilder();
builder.must(getQueryBuilder(2)); builder.must(getQueryBuilder(2));
QueryBuilder rewritten = null; QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(builder, caps);
try {
rewritten = TransportRollupSearchAction.rewriteQuery(builder, caps);
} catch (Exception e) {
fail("Should not have thrown exception when parsing query.");
}
assertThat(rewritten, instanceOf(BoolQueryBuilder.class)); assertThat(rewritten, instanceOf(BoolQueryBuilder.class));
assertThat(((BoolQueryBuilder)rewritten).must().size(), equalTo(1)); assertThat(((BoolQueryBuilder)rewritten).must().size(), equalTo(1));
} }
@ -178,12 +190,8 @@ public class SearchActionTests extends ESTestCase {
RollupJobCaps cap = new RollupJobCaps(job.build()); RollupJobCaps cap = new RollupJobCaps(job.build());
Set<RollupJobCaps> caps = new HashSet<>(); Set<RollupJobCaps> caps = new HashSet<>();
caps.add(cap); caps.add(cap);
try { QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new MatchAllQueryBuilder(), caps);
QueryBuilder rewritten = TransportRollupSearchAction.rewriteQuery(new MatchAllQueryBuilder(), caps); assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class));
assertThat(rewritten, instanceOf(MatchAllQueryBuilder.class));
} catch (Exception e) {
fail("Should not have thrown exception when parsing query.");
}
} }
public void testAmbiguousResolution() { public void testAmbiguousResolution() {