Upgrades to 6.0.0-alpha1-SNAPSHOT (elastic/elasticsearch#672)
* Upgrades to ES 6.0.0-alpha1-SNAPSHOT * Kibana changes to run upgrade to 6.0.0-alpha1-SNAPSHOT * Other version changes to 6.0.0-alpha1-SNAPSHOT Original commit: elastic/x-pack-elasticsearch@574d8573ab
This commit is contained in:
parent
2d8de6adb2
commit
68b8ce40fd
|
@ -75,7 +75,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
|
|||
PARSER.declareLong(Builder::setQueryDelay, QUERY_DELAY);
|
||||
PARSER.declareLong(Builder::setFrequency, FREQUENCY);
|
||||
PARSER.declareObject(Builder::setQuery,
|
||||
(p, c) -> new QueryParseContext(p, ParseFieldMatcher.STRICT).parseInnerQueryBuilder().get(), QUERY);
|
||||
(p, c) -> new QueryParseContext(p, c.getParseFieldMatcher()).parseInnerQueryBuilder(), QUERY);
|
||||
PARSER.declareObject(Builder::setAggregations, (p, c) -> AggregatorFactories.parseAggregators(
|
||||
new QueryParseContext(p, ParseFieldMatcher.STRICT)), AGGREGATIONS);
|
||||
PARSER.declareObject(Builder::setAggregations,(p, c) -> AggregatorFactories.parseAggregators(
|
||||
|
|
|
@ -30,7 +30,7 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
|||
List<Bucket> hits = new ArrayList<>(listSize);
|
||||
for (int j = 0; j < listSize; j++) {
|
||||
String jobId = "foo";
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomLong()), randomPositiveLong());
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomLong()), randomNonNegativeLong());
|
||||
if (randomBoolean()) {
|
||||
bucket.setAnomalyScore(randomDouble());
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
|||
bucket.setBucketInfluencers(bucketInfluencers);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setEventCount(randomPositiveLong());
|
||||
bucket.setEventCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setInitialAnomalyScore(randomDouble());
|
||||
|
@ -88,7 +88,7 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Get
|
|||
int size = randomInt(10);
|
||||
List<AnomalyRecord> records = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomLong()), randomPositiveLong(), sequenceNum++);
|
||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomLong()), randomNonNegativeLong(), sequenceNum++);
|
||||
anomalyRecord.setAnomalyScore(randomDouble());
|
||||
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
||||
|
|
|
@ -22,11 +22,11 @@ public class GetInfluencersActionRequestTests extends AbstractStreamableXContent
|
|||
protected Request createTestInstance() {
|
||||
Request request = new Request(randomAsciiOfLengthBetween(1, 20));
|
||||
if (randomBoolean()) {
|
||||
String start = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
String start = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomNonNegativeLong());
|
||||
request.setStart(start);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String end = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
String end = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomNonNegativeLong());
|
||||
request.setEnd(end);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -22,7 +22,7 @@ public class GetInfluencersActionResponseTests extends AbstractStreamableTestCas
|
|||
List<Influencer> hits = new ArrayList<>(listSize);
|
||||
for (int j = 0; j < listSize; j++) {
|
||||
Influencer influencer = new Influencer(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||
randomAsciiOfLengthBetween(1, 20), new Date(randomPositiveLong()), randomPositiveLong(), j + 1);
|
||||
randomAsciiOfLengthBetween(1, 20), new Date(randomNonNegativeLong()), randomNonNegativeLong(), j + 1);
|
||||
influencer.setAnomalyScore(randomDouble());
|
||||
influencer.setInitialAnomalyScore(randomDouble());
|
||||
influencer.setProbability(randomDouble());
|
||||
|
|
|
@ -35,13 +35,13 @@ public class GetJobsActionResponseTests extends AbstractStreamableTestCase<GetJo
|
|||
for (int j = 0; j < listSize; j++) {
|
||||
String jobId = randomAsciiOfLength(10);
|
||||
String description = randomBoolean() ? randomAsciiOfLength(10) : null;
|
||||
Date createTime = new Date(randomPositiveLong());
|
||||
Date finishedTime = randomBoolean() ? new Date(randomPositiveLong()) : null;
|
||||
Date lastDataTime = randomBoolean() ? new Date(randomPositiveLong()) : null;
|
||||
long timeout = randomPositiveLong();
|
||||
Date createTime = new Date(randomNonNegativeLong());
|
||||
Date finishedTime = randomBoolean() ? new Date(randomNonNegativeLong()) : null;
|
||||
Date lastDataTime = randomBoolean() ? new Date(randomNonNegativeLong()) : null;
|
||||
long timeout = randomNonNegativeLong();
|
||||
AnalysisConfig analysisConfig = new AnalysisConfig.Builder(
|
||||
Collections.singletonList(new Detector.Builder("metric", "some_field").build())).build();
|
||||
AnalysisLimits analysisLimits = new AnalysisLimits(randomPositiveLong(), randomPositiveLong());
|
||||
AnalysisLimits analysisLimits = new AnalysisLimits(randomNonNegativeLong(), randomNonNegativeLong());
|
||||
DataDescription dataDescription = randomBoolean() ? new DataDescription.Builder().build() : null;
|
||||
int numTransformers = randomIntBetween(0, 32);
|
||||
List<TransformConfig> transformConfigList = new ArrayList<>(numTransformers);
|
||||
|
|
|
@ -22,11 +22,11 @@ public class GetRecordsActionRequestTests extends AbstractStreamableXContentTest
|
|||
protected Request createTestInstance() {
|
||||
Request request = new Request(randomAsciiOfLengthBetween(1, 20));
|
||||
if (randomBoolean()) {
|
||||
String start = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
String start = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomNonNegativeLong());
|
||||
request.setStart(start);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
String end = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomPositiveLong());
|
||||
String end = randomBoolean() ? randomAsciiOfLengthBetween(1, 20) : String.valueOf(randomNonNegativeLong());
|
||||
request.setEnd(end);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
|
|
|
@ -35,8 +35,8 @@ public class GetSchedulersActionResponseTests extends AbstractStreamableTestCase
|
|||
SchedulerConfig.Builder schedulerConfig = new SchedulerConfig.Builder(schedulerId, jobId);
|
||||
schedulerConfig.setIndexes(randomSubsetOf(2, Arrays.asList("index-1", "index-2", "index-3")));
|
||||
schedulerConfig.setTypes(randomSubsetOf(2, Arrays.asList("type-1", "type-2", "type-3")));
|
||||
schedulerConfig.setFrequency(randomPositiveLong());
|
||||
schedulerConfig.setQueryDelay(randomPositiveLong());
|
||||
schedulerConfig.setFrequency(randomNonNegativeLong());
|
||||
schedulerConfig.setQueryDelay(randomNonNegativeLong());
|
||||
if (randomBoolean()) {
|
||||
schedulerConfig.setQuery(QueryBuilders.termQuery(randomAsciiOfLength(10), randomAsciiOfLength(10)));
|
||||
}
|
||||
|
|
|
@ -14,9 +14,9 @@ public class StartSchedulerActionRequestTests extends AbstractStreamableXContent
|
|||
|
||||
@Override
|
||||
protected Request createTestInstance() {
|
||||
Request request = new Request(randomAsciiOfLength(10), randomPositiveLong());
|
||||
Request request = new Request(randomAsciiOfLength(10), randomNonNegativeLong());
|
||||
if (randomBoolean()) {
|
||||
request.setEndTime(randomPositiveLong());
|
||||
request.setEndTime(randomNonNegativeLong());
|
||||
}
|
||||
return request;
|
||||
}
|
||||
|
|
|
@ -345,13 +345,13 @@ public class AutodetectResultProcessorIT extends ESSingleNodeTestCase {
|
|||
private ModelSizeStats createModelSizeStats() {
|
||||
ModelSizeStats.Builder builder = new ModelSizeStats.Builder(JOB_ID);
|
||||
builder.setId(randomAsciiOfLength(20));
|
||||
builder.setTimestamp(new Date(randomPositiveLong()));
|
||||
builder.setLogTime(new Date(randomPositiveLong()));
|
||||
builder.setBucketAllocationFailuresCount(randomPositiveLong());
|
||||
builder.setModelBytes(randomPositiveLong());
|
||||
builder.setTotalByFieldCount(randomPositiveLong());
|
||||
builder.setTotalOverFieldCount(randomPositiveLong());
|
||||
builder.setTotalPartitionFieldCount(randomPositiveLong());
|
||||
builder.setTimestamp(new Date(randomNonNegativeLong()));
|
||||
builder.setLogTime(new Date(randomNonNegativeLong()));
|
||||
builder.setBucketAllocationFailuresCount(randomNonNegativeLong());
|
||||
builder.setModelBytes(randomNonNegativeLong());
|
||||
builder.setTotalByFieldCount(randomNonNegativeLong());
|
||||
builder.setTotalOverFieldCount(randomNonNegativeLong());
|
||||
builder.setTotalPartitionFieldCount(randomNonNegativeLong());
|
||||
builder.setMemoryStatus(randomFrom(EnumSet.allOf(ModelSizeStats.MemoryStatus.class)));
|
||||
return builder.build();
|
||||
}
|
||||
|
@ -363,7 +363,7 @@ public class AutodetectResultProcessorIT extends ESSingleNodeTestCase {
|
|||
}
|
||||
|
||||
private Quantiles createQuantiles() {
|
||||
return new Quantiles(JOB_ID, new Date(randomPositiveLong()), randomAsciiOfLength(100));
|
||||
return new Quantiles(JOB_ID, new Date(randomNonNegativeLong()), randomAsciiOfLength(100));
|
||||
}
|
||||
|
||||
private FlushAcknowledgement createFlushAcknowledgement() {
|
||||
|
|
|
@ -291,7 +291,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
Collections.emptyMap(), new StringEntity(RESULT_MAPPING));
|
||||
} catch (ResponseException e) {
|
||||
// it is ok: the index already exists
|
||||
assertThat(e.getMessage(), containsString("index_already_exists_exception"));
|
||||
assertThat(e.getMessage(), containsString("resource_already_exists_exception"));
|
||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
|
||||
}
|
||||
|
||||
|
@ -310,7 +310,7 @@ public class PrelertJobIT extends ESRestTestCase {
|
|||
new StringEntity(RESULT_MAPPING));
|
||||
} catch (ResponseException e) {
|
||||
// it is ok: the index already exists
|
||||
assertThat(e.getMessage(), containsString("index_already_exists_exception"));
|
||||
assertThat(e.getMessage(), containsString("resource_already_exists_exception"));
|
||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
|
||||
|
||||
if (randomBoolean()) {
|
||||
builder.setBatchSpan(randomPositiveLong());
|
||||
builder.setBatchSpan(randomNonNegativeLong());
|
||||
}
|
||||
long bucketSpan = AnalysisConfig.Builder.DEFAULT_BUCKET_SPAN;
|
||||
if (randomBoolean()) {
|
||||
|
@ -52,7 +52,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
builder.setInfluencers(Arrays.asList(generateRandomStringArray(10, 10, false)));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setLatency(randomPositiveLong());
|
||||
builder.setLatency(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int numBucketSpans = randomIntBetween(0, 10);
|
||||
|
@ -69,7 +69,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
|
|||
builder.setOverlappingBuckets(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setResultFinalizationWindow(randomPositiveLong());
|
||||
builder.setResultFinalizationWindow(randomNonNegativeLong());
|
||||
}
|
||||
|
||||
builder.setUsePerPartitionNormalization(false);
|
||||
|
|
|
@ -15,7 +15,7 @@ public class AnalysisLimitsTests extends AbstractSerializingTestCase<AnalysisLim
|
|||
|
||||
@Override
|
||||
protected AnalysisLimits createTestInstance() {
|
||||
return new AnalysisLimits(randomBoolean() ? randomLong() : null, randomBoolean() ? randomPositiveLong() : null);
|
||||
return new AnalysisLimits(randomBoolean() ? randomLong() : null, randomBoolean() ? randomNonNegativeLong() : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -472,20 +472,20 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
if (randomBoolean()) {
|
||||
builder.setDescription(randomAsciiOfLength(10));
|
||||
}
|
||||
builder.setCreateTime(new Date(randomPositiveLong()));
|
||||
builder.setCreateTime(new Date(randomNonNegativeLong()));
|
||||
if (randomBoolean()) {
|
||||
builder.setFinishedTime(new Date(randomPositiveLong()));
|
||||
builder.setFinishedTime(new Date(randomNonNegativeLong()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setLastDataTime(new Date(randomPositiveLong()));
|
||||
builder.setLastDataTime(new Date(randomNonNegativeLong()));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setTimeout(randomPositiveLong());
|
||||
builder.setTimeout(randomNonNegativeLong());
|
||||
}
|
||||
AnalysisConfig.Builder analysisConfig = createAnalysisConfig();
|
||||
analysisConfig.setBucketSpan(100L);
|
||||
builder.setAnalysisConfig(analysisConfig);
|
||||
builder.setAnalysisLimits(new AnalysisLimits(randomPositiveLong(), randomPositiveLong()));
|
||||
builder.setAnalysisLimits(new AnalysisLimits(randomNonNegativeLong(), randomNonNegativeLong()));
|
||||
if (randomBoolean()) {
|
||||
DataDescription.Builder dataDescription = new DataDescription.Builder();
|
||||
dataDescription.setFormat(randomFrom(DataDescription.DataFormat.values()));
|
||||
|
@ -514,16 +514,16 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
|||
}
|
||||
builder.setIgnoreDowntime(randomFrom(IgnoreDowntime.values()));
|
||||
if (randomBoolean()) {
|
||||
builder.setRenormalizationWindowDays(randomPositiveLong());
|
||||
builder.setRenormalizationWindowDays(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setBackgroundPersistInterval(randomPositiveLong());
|
||||
builder.setBackgroundPersistInterval(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setModelSnapshotRetentionDays(randomPositiveLong());
|
||||
builder.setModelSnapshotRetentionDays(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setResultsRetentionDays(randomPositiveLong());
|
||||
builder.setResultsRetentionDays(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setCustomSettings(Collections.singletonMap(randomAsciiOfLength(10), randomAsciiOfLength(10)));
|
||||
|
|
|
@ -57,19 +57,19 @@ public class ModelSizeStatsTests extends AbstractSerializingTestCase<ModelSizeSt
|
|||
protected ModelSizeStats createTestInstance() {
|
||||
ModelSizeStats.Builder stats = new ModelSizeStats.Builder("foo");
|
||||
if (randomBoolean()) {
|
||||
stats.setBucketAllocationFailuresCount(randomPositiveLong());
|
||||
stats.setBucketAllocationFailuresCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setModelBytes(randomPositiveLong());
|
||||
stats.setModelBytes(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalByFieldCount(randomPositiveLong());
|
||||
stats.setTotalByFieldCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalOverFieldCount(randomPositiveLong());
|
||||
stats.setTotalOverFieldCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalPartitionFieldCount(randomPositiveLong());
|
||||
stats.setTotalPartitionFieldCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setLogTime(new Date(randomLong()));
|
||||
|
|
|
@ -170,19 +170,19 @@ public class ModelSnapshotTests extends AbstractSerializingTestCase<ModelSnapsho
|
|||
modelSnapshot.setSnapshotDocCount(randomInt());
|
||||
ModelSizeStats.Builder stats = new ModelSizeStats.Builder(randomAsciiOfLengthBetween(1, 20));
|
||||
if (randomBoolean()) {
|
||||
stats.setBucketAllocationFailuresCount(randomPositiveLong());
|
||||
stats.setBucketAllocationFailuresCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setModelBytes(randomPositiveLong());
|
||||
stats.setModelBytes(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalByFieldCount(randomPositiveLong());
|
||||
stats.setTotalByFieldCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalOverFieldCount(randomPositiveLong());
|
||||
stats.setTotalOverFieldCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setTotalPartitionFieldCount(randomPositiveLong());
|
||||
stats.setTotalPartitionFieldCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
stats.setLogTime(new Date(randomLong()));
|
||||
|
|
|
@ -14,12 +14,13 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.prelert.job.JobStatus;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
|
@ -65,13 +66,13 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
assertFalse("Job is allocate, so nothing to allocate", jobAllocator.shouldAllocate(cs));
|
||||
}
|
||||
|
||||
public void testAssignJobsToNodes() {
|
||||
public void testAssignJobsToNodes() throws Exception {
|
||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.masterNodeId("_node_id"))
|
||||
.build();
|
||||
ClusterState result1 = jobAllocator.assignJobsToNodes(cs1);
|
||||
|
@ -85,8 +86,8 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(
|
||||
DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id1", new LocalTransportAddress("_id1"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id2", new LocalTransportAddress("_id2"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id1", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id2", new TransportAddress(InetAddress.getLoopbackAddress(), 9201), Version.CURRENT))
|
||||
.masterNodeId("_node_id1")
|
||||
)
|
||||
.build();
|
||||
|
@ -105,7 +106,7 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
ClusterState cs4 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.masterNodeId("_node_id"))
|
||||
.build();
|
||||
ClusterState result3 = jobAllocator.assignJobsToNodes(cs4);
|
||||
|
@ -113,7 +114,7 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
assertNull("my_job_id must be unallocated, because job has been removed", pm.getAllocations().get("my_job_id"));
|
||||
}
|
||||
|
||||
public void testClusterChanged_onlyAllocateIfMasterAndHaveUnAllocatedJobs() {
|
||||
public void testClusterChanged_onlyAllocateIfMasterAndHaveUnAllocatedJobs() throws Exception {
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
doAnswer(invocation -> {
|
||||
((Runnable) invocation.getArguments()[0]).run();
|
||||
|
@ -125,7 +126,7 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.metaData(MetaData.builder().putCustom(PrelertMetadata.TYPE, new PrelertMetadata.Builder().build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_id")
|
||||
)
|
||||
.build();
|
||||
|
@ -137,7 +138,7 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.metaData(MetaData.builder().putCustom(PrelertMetadata.TYPE, new PrelertMetadata.Builder().build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.masterNodeId("_id")
|
||||
.localNodeId("_id")
|
||||
)
|
||||
|
@ -152,7 +153,7 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
pmBuilder.assignToNode("my_job_id", "_node_id");
|
||||
cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.masterNodeId("_id")
|
||||
.localNodeId("_id")
|
||||
)
|
||||
|
@ -167,7 +168,7 @@ public class JobAllocatorTests extends ESTestCase {
|
|||
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||
cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.masterNodeId("_id")
|
||||
.localNodeId("_id")
|
||||
)
|
||||
|
|
|
@ -16,13 +16,15 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.prelert.action.UpdateJobStatusAction;
|
||||
import org.elasticsearch.xpack.prelert.job.JobStatus;
|
||||
import org.elasticsearch.xpack.prelert.job.data.DataProcessor;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.net.InetAddress;
|
||||
|
||||
import static org.elasticsearch.xpack.prelert.job.JobTests.buildJobBuilder;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.eq;
|
||||
|
@ -58,13 +60,13 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
verify(dataProcessor).closeJob("my_job_id");
|
||||
}
|
||||
|
||||
public void testClusterChanged_startJob() {
|
||||
public void testClusterChanged_startJob() throws Exception {
|
||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||
|
@ -76,7 +78,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||
|
@ -89,7 +91,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||
|
@ -101,7 +103,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
verify(dataProcessor, times(1)).openJob("my_job_id", false);
|
||||
}
|
||||
|
||||
public void testClusterChanged_stopJob() {
|
||||
public void testClusterChanged_stopJob() throws Exception {
|
||||
jobLifeCycleService.localAssignedJobs.add("my_job_id");
|
||||
|
||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||
|
@ -109,7 +111,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||
|
@ -124,7 +126,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||
|
@ -132,7 +134,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
verify(dataProcessor, times(1)).closeJob("my_job_id");
|
||||
}
|
||||
|
||||
public void testClusterChanged_allocationDeletingJob() {
|
||||
public void testClusterChanged_allocationDeletingJob() throws Exception {
|
||||
jobLifeCycleService.localAssignedJobs.add("my_job_id");
|
||||
|
||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||
|
@ -141,7 +143,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||
|
@ -152,7 +154,7 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
|||
ClusterState cs2 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id"))
|
||||
.build();
|
||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs2, cs1));
|
||||
|
|
|
@ -15,12 +15,13 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
|||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.xpack.prelert.job.persistence.AnomalyDetectorsIndex;
|
||||
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
|
||||
import static org.elasticsearch.mock.orig.Mockito.doAnswer;
|
||||
|
@ -33,7 +34,7 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
public class PrelertInitializationServiceTests extends ESTestCase {
|
||||
|
||||
public void testInitialize() {
|
||||
public void testInitialize() throws Exception {
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
doAnswer(invocation -> {
|
||||
|
@ -49,7 +50,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id")
|
||||
.masterNodeId("_node_id"))
|
||||
.metaData(MetaData.builder())
|
||||
|
@ -61,7 +62,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
verify(jobProvider, times(1)).createJobStateIndex(any());
|
||||
}
|
||||
|
||||
public void testInitialize_noMasterNode() {
|
||||
public void testInitialize_noMasterNode() throws Exception {
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
doAnswer(invocation -> {
|
||||
|
@ -77,7 +78,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT)))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT)))
|
||||
.metaData(MetaData.builder())
|
||||
.build();
|
||||
initializationService.clusterChanged(new ClusterChangedEvent("_source", cs, cs));
|
||||
|
@ -86,7 +87,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
verify(jobProvider, times(0)).createUsageMeteringIndex(any());
|
||||
}
|
||||
|
||||
public void testInitialize_alreadyInitialized() {
|
||||
public void testInitialize_alreadyInitialized() throws Exception {
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
doAnswer(invocation -> {
|
||||
|
@ -102,7 +103,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id")
|
||||
.masterNodeId("_node_id"))
|
||||
.metaData(MetaData.builder()
|
||||
|
@ -125,7 +126,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
verify(jobProvider, times(0)).createJobStateIndex(any());
|
||||
}
|
||||
|
||||
public void testInitialize_onlyOnce() {
|
||||
public void testInitialize_onlyOnce() throws Exception {
|
||||
ThreadPool threadPool = mock(ThreadPool.class);
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
doAnswer(invocation -> {
|
||||
|
@ -141,7 +142,7 @@ public class PrelertInitializationServiceTests extends ESTestCase {
|
|||
|
||||
ClusterState cs = ClusterState.builder(new ClusterName("_name"))
|
||||
.nodes(DiscoveryNodes.builder()
|
||||
.add(new DiscoveryNode("_node_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||
.add(new DiscoveryNode("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9200), Version.CURRENT))
|
||||
.localNodeId("_node_id")
|
||||
.masterNodeId("_node_id"))
|
||||
.metaData(MetaData.builder())
|
||||
|
|
|
@ -20,7 +20,7 @@ public class QueryPageTests extends AbstractWireSerializingTestCase<QueryPage<In
|
|||
ArrayList<Influencer> hits = new ArrayList<>();
|
||||
for (int i = 0; i < hitCount; i++) {
|
||||
hits.add(new Influencer(randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||
randomAsciiOfLengthBetween(1, 20), new Date(), randomPositiveLong(), i + 1));
|
||||
randomAsciiOfLengthBetween(1, 20), new Date(), randomNonNegativeLong(), i + 1));
|
||||
}
|
||||
return new QueryPage<>(hits, hitCount, new ParseField("test"));
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ public class AnomalyRecordTests extends AbstractSerializingTestCase<AnomalyRecor
|
|||
}
|
||||
|
||||
public AnomalyRecord createTestInstance(String jobId, int sequenceNum) {
|
||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomPositiveLong()), randomPositiveLong(), sequenceNum);
|
||||
AnomalyRecord anomalyRecord = new AnomalyRecord(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong(), sequenceNum);
|
||||
anomalyRecord.setActual(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setTypical(Collections.singletonList(randomDouble()));
|
||||
anomalyRecord.setAnomalyScore(randomDouble());
|
||||
|
|
|
@ -38,7 +38,7 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
FlushAcknowledgement flushAcknowledgement;
|
||||
String jobId = "foo";
|
||||
if (randomBoolean()) {
|
||||
bucket = new Bucket(jobId, new Date(randomLong()), randomPositiveLong());
|
||||
bucket = new Bucket(jobId, new Date(randomLong()), randomNonNegativeLong());
|
||||
} else {
|
||||
bucket = null;
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
int size = randomInt(10);
|
||||
records = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
AnomalyRecord record = new AnomalyRecord(jobId, new Date(randomLong()), randomPositiveLong(), i + 1);
|
||||
AnomalyRecord record = new AnomalyRecord(jobId, new Date(randomLong()), randomNonNegativeLong(), i + 1);
|
||||
record.setProbability(randomDoubleBetween(0.0, 1.0, true));
|
||||
records.add(record);
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ public class AutodetectResultTests extends AbstractSerializingTestCase<Autodetec
|
|||
influencers = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
Influencer influencer = new Influencer(jobId, randomAsciiOfLength(10), randomAsciiOfLength(10),
|
||||
new Date(randomLong()), randomPositiveLong(), i + 1);
|
||||
new Date(randomLong()), randomNonNegativeLong(), i + 1);
|
||||
influencer.setProbability(randomDoubleBetween(0.0, 1.0, true));
|
||||
influencers.add(influencer);
|
||||
}
|
||||
|
|
|
@ -16,8 +16,8 @@ public class BucketInfluencerTests extends AbstractSerializingTestCase<BucketInf
|
|||
|
||||
@Override
|
||||
protected BucketInfluencer createTestInstance() {
|
||||
BucketInfluencer bucketInfluencer = new BucketInfluencer(randomAsciiOfLengthBetween(1, 20), new Date(randomPositiveLong()),
|
||||
randomPositiveLong(), randomIntBetween(1, 1000));
|
||||
BucketInfluencer bucketInfluencer = new BucketInfluencer(randomAsciiOfLengthBetween(1, 20), new Date(randomNonNegativeLong()),
|
||||
randomNonNegativeLong(), randomIntBetween(1, 1000));
|
||||
if (randomBoolean()) {
|
||||
bucketInfluencer.setAnomalyScore(randomDouble());
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public Bucket createTestInstance(String jobId) {
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomPositiveLong()), randomPositiveLong());
|
||||
Bucket bucket = new Bucket(jobId, new Date(randomNonNegativeLong()), randomNonNegativeLong());
|
||||
if (randomBoolean()) {
|
||||
bucket.setAnomalyScore(randomDouble());
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
bucket.setBucketInfluencers(bucketInfluencers);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setEventCount(randomPositiveLong());
|
||||
bucket.setEventCount(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
bucket.setInitialAnomalyScore(randomDouble());
|
||||
|
@ -103,7 +103,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
|
|||
}
|
||||
|
||||
public void testEquals_GivenDifferentClass() {
|
||||
Bucket bucket = new Bucket("foo", new Date(randomLong()), randomPositiveLong());
|
||||
Bucket bucket = new Bucket("foo", new Date(randomLong()), randomNonNegativeLong());
|
||||
assertFalse(bucket.equals("a string"));
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ public class InfluencerTests extends AbstractSerializingTestCase<Influencer> {
|
|||
|
||||
public Influencer createTestInstance(String jobId) {
|
||||
Influencer influencer = new Influencer(jobId, randomAsciiOfLengthBetween(1, 20), randomAsciiOfLengthBetween(1, 20),
|
||||
new Date(randomPositiveLong()), randomPositiveLong(), randomIntBetween(1, 1000));
|
||||
new Date(randomNonNegativeLong()), randomNonNegativeLong(), randomIntBetween(1, 1000));
|
||||
influencer.setInterim(randomBoolean());
|
||||
influencer.setAnomalyScore(randomDouble());
|
||||
influencer.setInitialAnomalyScore(randomDouble());
|
||||
|
|
|
@ -26,7 +26,7 @@ public class PerPartitionMaxProbabilitiesTests extends AbstractSerializingTestCa
|
|||
}
|
||||
|
||||
return new PerPartitionMaxProbabilities(randomAsciiOfLength(20), new DateTime(randomDateTimeZone()).toDate(),
|
||||
randomPositiveLong(), pps);
|
||||
randomNonNegativeLong(), pps);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -57,10 +57,10 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
|
|||
builder.setScrollSize(randomIntBetween(0, Integer.MAX_VALUE));
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setFrequency(randomPositiveLong());
|
||||
builder.setFrequency(randomNonNegativeLong());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.setQueryDelay(randomPositiveLong());
|
||||
builder.setQueryDelay(randomNonNegativeLong());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
org.gradle.daemon=false
|
||||
|
||||
elasticsearchVersion=5.3.0-SNAPSHOT
|
||||
elasticsearchVersion=6.0.0-alpha1-SNAPSHOT
|
||||
|
|
Loading…
Reference in New Issue