Rename epochStart/epochEnd to start/end in result query builders

Original commit: elastic/x-pack-elasticsearch@f4fdd64278
This commit is contained in:
Dimitrios Athanasiou 2016-11-29 18:28:31 +00:00 committed by Dimitris Athanasiou
parent 2898e3c421
commit ec5aa34d17
8 changed files with 93 additions and 119 deletions

View File

@ -319,7 +319,7 @@ extends Action<GetInfluencersAction.Request, GetInfluencersAction.Response, GetI
@Override
protected void doExecute(Request request, ActionListener<Response> listener) {
InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder().includeInterim(request.includeInterim)
.epochStart(request.start).epochEnd(request.end).from(request.pageParams.getFrom()).size(request.pageParams.getSize())
.start(request.start).end(request.end).from(request.pageParams.getFrom()).size(request.pageParams.getSize())
.anomalyScoreThreshold(request.anomalyScoreFilter).sortField(request.sort).sortDescending(request.decending).build();
QueryPage<Influencer> page = jobProvider.influencers(request.jobId, query);

View File

@ -5,8 +5,8 @@
*/
package org.elasticsearch.xpack.prelert.job.persistence;
import org.elasticsearch.xpack.prelert.job.results.Bucket;
import org.elasticsearch.common.Strings;
import org.elasticsearch.xpack.prelert.job.results.Bucket;
import java.util.Objects;
@ -23,10 +23,10 @@ import java.util.Objects;
* this value. Default = 0.0</li>
* <li>normalizedProbabilityThreshold- Return only buckets with a
* maxNormalizedProbability &gt;= this value. Default = 0.0</li>
* <li>epochStart- The start bucket time. A bucket with this timestamp will be
* <li>start- The start bucket time. A bucket with this timestamp will be
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
* returned. Default = -1</li>
* <li>epochEnd- The end bucket timestamp buckets up to but NOT including this
* <li>end- The end bucket timestamp buckets up to but NOT including this
* timestamp are returned. If 0 all buckets from <code>startEpochMs</code> are
* returned. Default = -1</li>
* <li>partitionValue Set the bucket's max normalised probability to this
@ -92,7 +92,7 @@ public final class BucketsQueryBuilder {
* If startTime &lt;= 0 the parameter is not set
*/
public BucketsQueryBuilder epochStart(String startTime) {
bucketsQuery.epochStart = startTime;
bucketsQuery.start = startTime;
return this;
}
@ -100,7 +100,7 @@ public final class BucketsQueryBuilder {
* If endTime &lt;= 0 the parameter is not set
*/
public BucketsQueryBuilder epochEnd(String endTime) {
bucketsQuery.epochEnd = endTime;
bucketsQuery.end = endTime;
return this;
}
@ -120,8 +120,8 @@ public final class BucketsQueryBuilder {
private boolean includeInterim = false;
private double anomalyScoreFilter = 0.0d;
private double normalizedProbability = 0.0d;
private String epochStart;
private String epochEnd;
private String start;
private String end;
private String partitionValue = null;
private String sortField = Bucket.TIMESTAMP.getPreferredName();
private boolean sortDescending = false;
@ -150,12 +150,12 @@ public final class BucketsQueryBuilder {
return normalizedProbability;
}
public String getEpochStart() {
return epochStart;
public String getStart() {
return start;
}
public String getEpochEnd() {
return epochEnd;
public String getEnd() {
return end;
}
/**
@ -175,7 +175,7 @@ public final class BucketsQueryBuilder {
@Override
public int hashCode() {
return Objects.hash(from, size, expand, includeInterim, anomalyScoreFilter, normalizedProbability, epochStart, epochEnd,
return Objects.hash(from, size, expand, includeInterim, anomalyScoreFilter, normalizedProbability, start, end,
partitionValue, sortField, sortDescending);
}
@ -197,8 +197,8 @@ public final class BucketsQueryBuilder {
Objects.equals(size, other.size) &&
Objects.equals(expand, other.expand) &&
Objects.equals(includeInterim, other.includeInterim) &&
Objects.equals(epochStart, other.epochStart) &&
Objects.equals(epochEnd, other.epochEnd) &&
Objects.equals(start, other.start) &&
Objects.equals(end, other.end) &&
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
Objects.equals(normalizedProbability, other.normalizedProbability) &&
Objects.equals(partitionValue, other.partitionValue) &&

View File

@ -273,7 +273,7 @@ public class ElasticsearchJobProvider implements JobProvider {
public QueryPage<Bucket> buckets(String jobId, BucketsQuery query)
throws ResourceNotFoundException {
QueryBuilder fb = new ResultsFilterBuilder()
.timeRange(ElasticsearchMappings.ES_TIMESTAMP, query.getEpochStart(), query.getEpochEnd())
.timeRange(ElasticsearchMappings.ES_TIMESTAMP, query.getStart(), query.getEnd())
.score(Bucket.ANOMALY_SCORE.getPreferredName(), query.getAnomalyScoreFilter())
.score(Bucket.MAX_NORMALIZED_PROBABILITY.getPreferredName(), query.getNormalizedProbability())
.interim(Bucket.IS_INTERIM.getPreferredName(), query.isIncludeInterim())
@ -292,7 +292,7 @@ public class ElasticsearchJobProvider implements JobProvider {
}
} else {
List<PerPartitionMaxProbabilities> scores =
partitionMaxNormalisedProbabilities(jobId, query.getEpochStart(), query.getEpochEnd(), query.getPartitionValue());
partitionMaxNormalisedProbabilities(jobId, query.getStart(), query.getEnd(), query.getPartitionValue());
mergePartitionScoresIntoBucket(scores, buckets.results(), query.getPartitionValue());
@ -630,7 +630,7 @@ public class ElasticsearchJobProvider implements JobProvider {
public QueryPage<AnomalyRecord> records(String jobId, RecordsQueryBuilder.RecordsQuery query)
throws ResourceNotFoundException {
QueryBuilder fb = new ResultsFilterBuilder()
.timeRange(ElasticsearchMappings.ES_TIMESTAMP, query.getEpochStart(), query.getEpochEnd())
.timeRange(ElasticsearchMappings.ES_TIMESTAMP, query.getStart(), query.getEnd())
.score(AnomalyRecord.ANOMALY_SCORE.getPreferredName(), query.getAnomalyScoreThreshold())
.score(AnomalyRecord.NORMALIZED_PROBABILITY.getPreferredName(), query.getNormalizedProbabilityThreshold())
.interim(AnomalyRecord.IS_INTERIM.getPreferredName(), query.isIncludeInterim())
@ -711,7 +711,7 @@ public class ElasticsearchJobProvider implements JobProvider {
@Override
public QueryPage<Influencer> influencers(String jobId, InfluencersQuery query) throws ResourceNotFoundException {
QueryBuilder fb = new ResultsFilterBuilder()
.timeRange(ElasticsearchMappings.ES_TIMESTAMP, query.getEpochStart(), query.getEpochEnd())
.timeRange(ElasticsearchMappings.ES_TIMESTAMP, query.getStart(), query.getEnd())
.score(Bucket.ANOMALY_SCORE.getPreferredName(), query.getAnomalyScoreFilter())
.interim(Bucket.IS_INTERIM.getPreferredName(), query.isIncludeInterim())
.build();

View File

@ -10,23 +10,20 @@ import org.elasticsearch.xpack.prelert.job.results.Influencer;
import java.util.Objects;
/**
* One time query builder for buckets.
* One time query builder for influencers.
* <ul>
* <li>From- Skip the first N Buckets. This parameter is for paging if not
* <li>From- Skip the first N Influencers. This parameter is for paging if not
* required set to 0. Default = 0</li>
* <li>Size- Take only this number of Buckets. Default =
* <li>Size- Take only this number of Influencers. Default =
* {@value DEFAULT_SIZE}</li>
* <li>Expand- Include anomaly records. Default= false</li>
* <li>IncludeInterim- Include interim results. Default = false</li>
* <li>anomalyScoreThreshold- Return only buckets with an anomalyScore &gt;=
* <li>anomalyScoreThreshold- Return only influencers with an anomalyScore &gt;=
* this value. Default = 0.0</li>
* <li>normalizedProbabilityThreshold- Return only buckets with a
* maxNormalizedProbability &gt;= this value. Default = 0.0</li>
* <li>epochStart- The start bucket time. A bucket with this timestamp will be
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
* <li>start- The start influencer time. An influencer with this timestamp will be
* included in the results. If 0 all influencers up to <code>end</code> are
* returned. Default = -1</li>
* <li>epochEnd- The end bucket timestamp buckets up to but NOT including this
* timestamp are returned. If 0 all buckets from <code>startEpochMs</code> are
* <li>end- The end influencer timestamp. Influencers up to but NOT including this
* timestamp are returned. If 0 all influencers from <code>start</code> are
* returned. Default = -1</li>
* <li>partitionValue Set the bucket's max normalised probability to this
* partition field value's max normalised probability. Default = null</li>
@ -70,16 +67,16 @@ public final class InfluencersQueryBuilder {
/**
* If startTime &gt;= 0 the parameter is not set
*/
public InfluencersQueryBuilder epochStart(String startTime) {
influencersQuery.epochStart = startTime;
public InfluencersQueryBuilder start(String startTime) {
influencersQuery.start = startTime;
return this;
}
/**
* If endTime &gt;= 0 the parameter is not set
*/
public InfluencersQueryBuilder epochEnd(String endTime) {
influencersQuery.epochEnd = endTime;
public InfluencersQueryBuilder end(String endTime) {
influencersQuery.end = endTime;
return this;
}
@ -97,8 +94,8 @@ public final class InfluencersQueryBuilder {
private int size = DEFAULT_SIZE;
private boolean includeInterim = false;
private double anomalyScoreFilter = 0.0d;
private String epochStart;
private String epochEnd;
private String start;
private String end;
private String sortField = Influencer.ANOMALY_SCORE.getPreferredName();
private boolean sortDescending = false;
@ -118,12 +115,12 @@ public final class InfluencersQueryBuilder {
return anomalyScoreFilter;
}
public String getEpochStart() {
return epochStart;
public String getStart() {
return start;
}
public String getEpochEnd() {
return epochEnd;
public String getEnd() {
return end;
}
public String getSortField() {
@ -136,8 +133,7 @@ public final class InfluencersQueryBuilder {
@Override
public int hashCode() {
return Objects.hash(from, size, includeInterim, anomalyScoreFilter, epochStart, epochEnd,
sortField, sortDescending);
return Objects.hash(from, size, includeInterim, anomalyScoreFilter, start, end, sortField, sortDescending);
}
@ -157,8 +153,8 @@ public final class InfluencersQueryBuilder {
return Objects.equals(from, other.from) &&
Objects.equals(size, other.size) &&
Objects.equals(includeInterim, other.includeInterim) &&
Objects.equals(epochStart, other.epochStart) &&
Objects.equals(epochEnd, other.epochEnd) &&
Objects.equals(start, other.start) &&
Objects.equals(end, other.end) &&
Objects.equals(anomalyScoreFilter, other.anomalyScoreFilter) &&
Objects.equals(sortField, other.sortField) &&
this.sortDescending == other.sortDescending;

View File

@ -21,92 +21,80 @@ package org.elasticsearch.xpack.prelert.job.persistence;
* this value. Default = 0.0</li>
* <li>normalizedProbabilityThreshold. Return only buckets with a
* maxNormalizedProbability &gt;= this value. Default = 0.0</li>
* <li>epochStart- The start bucket time. A bucket with this timestamp will be
* <li>start- The start bucket time. A bucket with this timestamp will be
* included in the results. If 0 all buckets up to <code>endEpochMs</code> are
* returned. Default = -1</li>
* <li>epochEnd- The end bucket timestamp buckets up to but NOT including this
* <li>end- The end bucket timestamp buckets up to but NOT including this
* timestamp are returned. If 0 all buckets from <code>startEpochMs</code> are
* returned. Default = -1</li>
* </ul>
*/
public final class RecordsQueryBuilder
{
public final class RecordsQueryBuilder {
public static final int DEFAULT_SIZE = 100;
private RecordsQuery recordsQuery = new RecordsQuery();
public RecordsQueryBuilder from(int from)
{
public RecordsQueryBuilder from(int from) {
recordsQuery.from = from;
return this;
}
public RecordsQueryBuilder size(int size)
{
public RecordsQueryBuilder size(int size) {
recordsQuery.size = size;
return this;
}
public RecordsQueryBuilder epochStart(String startTime)
{
recordsQuery.epochStart = startTime;
public RecordsQueryBuilder epochStart(String startTime) {
recordsQuery.start = startTime;
return this;
}
public RecordsQueryBuilder epochEnd(String endTime)
{
recordsQuery.epochEnd = endTime;
public RecordsQueryBuilder epochEnd(String endTime) {
recordsQuery.end = endTime;
return this;
}
public RecordsQueryBuilder includeInterim(boolean include)
{
public RecordsQueryBuilder includeInterim(boolean include) {
recordsQuery.includeInterim = include;
return this;
}
public RecordsQueryBuilder sortField(String fieldname)
{
public RecordsQueryBuilder sortField(String fieldname) {
recordsQuery.sortField = fieldname;
return this;
}
public RecordsQueryBuilder sortDescending(boolean sortDescending)
{
public RecordsQueryBuilder sortDescending(boolean sortDescending) {
recordsQuery.sortDescending = sortDescending;
return this;
}
public RecordsQueryBuilder anomalyScoreThreshold(double anomalyScoreFilter)
{
public RecordsQueryBuilder anomalyScoreThreshold(double anomalyScoreFilter) {
recordsQuery.anomalyScoreFilter = anomalyScoreFilter;
return this;
}
public RecordsQueryBuilder normalizedProbability(double normalizedProbability)
{
public RecordsQueryBuilder normalizedProbability(double normalizedProbability) {
recordsQuery.normalizedProbability = normalizedProbability;
return this;
}
public RecordsQueryBuilder partitionFieldValue(String partitionFieldValue)
{
public RecordsQueryBuilder partitionFieldValue(String partitionFieldValue) {
recordsQuery.partitionFieldValue = partitionFieldValue;
return this;
}
public RecordsQuery build()
{
public RecordsQuery build() {
return recordsQuery;
}
public void clear()
{
public void clear() {
recordsQuery = new RecordsQuery();
}
public class RecordsQuery
{
public class RecordsQuery {
private int from = 0;
private int size = DEFAULT_SIZE;
private boolean includeInterim = false;
@ -115,58 +103,48 @@ public final class RecordsQueryBuilder
private double anomalyScoreFilter = 0.0d;
private double normalizedProbability = 0.0d;
private String partitionFieldValue;
private String epochStart;
private String epochEnd;
private String start;
private String end;
public int getSize()
{
public int getSize() {
return size;
}
public boolean isIncludeInterim()
{
public boolean isIncludeInterim() {
return includeInterim;
}
public String getSortField()
{
public String getSortField() {
return sortField;
}
public boolean isSortDescending()
{
public boolean isSortDescending() {
return sortDescending;
}
public double getAnomalyScoreThreshold()
{
public double getAnomalyScoreThreshold() {
return anomalyScoreFilter;
}
public double getNormalizedProbabilityThreshold()
{
public double getNormalizedProbabilityThreshold() {
return normalizedProbability;
}
public String getPartitionFieldValue()
{
public String getPartitionFieldValue() {
return partitionFieldValue;
}
public int getFrom()
{
public int getFrom() {
return from;
}
public String getEpochStart()
{
return epochStart;
public String getStart() {
return start;
}
public String getEpochEnd()
{
return epochEnd;
public String getEnd() {
return end;
}
}
}

View File

@ -18,8 +18,8 @@ public class BucketsQueryBuilderTests extends ESTestCase {
assertEquals(false, query.isExpand());
assertEquals(0.0, query.getAnomalyScoreFilter(), 0.0001);
assertEquals(0.0, query.getNormalizedProbability(), 0.0001);
assertNull(query.getEpochStart());
assertNull(query.getEpochEnd());
assertNull(query.getStart());
assertNull(query.getEnd());
assertEquals("timestamp", query.getSortField());
assertFalse(query.isSortDescending());
}
@ -45,8 +45,8 @@ public class BucketsQueryBuilderTests extends ESTestCase {
assertEquals(true, query.isExpand());
assertEquals(50.0d, query.getAnomalyScoreFilter(), 0.00001);
assertEquals(70.0d, query.getNormalizedProbability(), 0.00001);
assertEquals("1000", query.getEpochStart());
assertEquals("2000", query.getEpochEnd());
assertEquals("1000", query.getStart());
assertEquals("2000", query.getEnd());
assertEquals("foo", query.getPartitionValue());
assertEquals("anomalyScore", query.getSortField());
assertTrue(query.isSortDescending());

View File

@ -762,7 +762,7 @@ public class ElasticsearchJobProviderTests extends ESTestCase {
Client client = clientBuilder.build();
ElasticsearchJobProvider provider = createProvider(client);
InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).epochStart("0").epochEnd("0").sortField("sort")
InfluencersQuery query = new InfluencersQueryBuilder().from(from).size(size).start("0").end("0").sortField("sort")
.sortDescending(true).anomalyScoreThreshold(0.0).includeInterim(true).build();
QueryPage<Influencer> page = provider.influencers(jobId, query);
assertEquals(2L, page.count());

View File

@ -17,8 +17,8 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
assertEquals(InfluencersQueryBuilder.DEFAULT_SIZE, query.getSize());
assertEquals(false, query.isIncludeInterim());
assertEquals(0.0, query.getAnomalyScoreFilter(), 0.0001);
assertNull(query.getEpochStart());
assertNull(query.getEpochEnd());
assertNull(query.getStart());
assertNull(query.getEnd());
assertEquals(Influencer.ANOMALY_SCORE.getPreferredName(), query.getSortField());
assertFalse(query.isSortDescending());
}
@ -29,8 +29,8 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
.size(40)
.includeInterim(true)
.anomalyScoreThreshold(50.0d)
.epochStart("1000")
.epochEnd("2000")
.start("1000")
.end("2000")
.sortField("anomalyScore")
.sortDescending(true)
.build();
@ -39,8 +39,8 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
assertEquals(40, query.getSize());
assertEquals(true, query.isIncludeInterim());
assertEquals(50.0d, query.getAnomalyScoreFilter(), 0.00001);
assertEquals("1000", query.getEpochStart());
assertEquals("2000", query.getEpochEnd());
assertEquals("1000", query.getStart());
assertEquals("2000", query.getEnd());
assertEquals("anomalyScore", query.getSortField());
assertTrue(query.isSortDescending());
}
@ -51,16 +51,16 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
.size(40)
.includeInterim(true)
.anomalyScoreThreshold(50.0d)
.epochStart("1000")
.epochEnd("2000");
.start("1000")
.end("2000");
InfluencersQueryBuilder query2 = new InfluencersQueryBuilder()
.from(20)
.size(40)
.includeInterim(true)
.anomalyScoreThreshold(50.0d)
.epochStart("1000")
.epochEnd("2000");
.start("1000")
.end("2000");
assertEquals(query.build(), query2.build());
assertEquals(query.build().hashCode(), query2.build().hashCode());
@ -71,8 +71,8 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
.size(40)
.includeInterim(true)
.anomalyScoreThreshold(50.0d)
.epochStart("1000")
.epochEnd("2000");
.start("1000")
.end("2000");
assertEquals(query.build(), query2.build());
query2.clear();
@ -80,8 +80,8 @@ public class InfluencersQueryBuilderTests extends ESTestCase {
.size(40)
.includeInterim(true)
.anomalyScoreThreshold(50.1d)
.epochStart("1000")
.epochEnd("2000");
.start("1000")
.end("2000");
assertFalse(query.build().equals(query2.build()));
}
}