Disallow non alphanumeric chars from start or end in IDs (elastic/elasticsearch#552)
Closes elastic/elasticsearch#550 Original commit: elastic/x-pack-elasticsearch@ca0ca9f4da
This commit is contained in:
parent
bc8d966690
commit
8841a97659
|
@ -705,7 +705,7 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
|
||||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_ID_TOO_LONG, MAX_JOB_ID_LENGTH));
|
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_ID_TOO_LONG, MAX_JOB_ID_LENGTH));
|
||||||
}
|
}
|
||||||
if (!PrelertStrings.isValidId(id)) {
|
if (!PrelertStrings.isValidId(id)) {
|
||||||
throw new IllegalArgumentException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_JOBID_CHARS));
|
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
|
||||||
}
|
}
|
||||||
return new Job(
|
return new Job(
|
||||||
id, description, createTime, finishedTime, lastDataTime, timeout, analysisConfig, analysisLimits,
|
id, description, createTime, finishedTime, lastDataTime, timeout, analysisConfig, analysisLimits,
|
||||||
|
|
|
@ -583,7 +583,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
|
||||||
ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
|
ExceptionsHelper.requireNonNull(id, ID.getPreferredName());
|
||||||
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
ExceptionsHelper.requireNonNull(jobId, Job.ID.getPreferredName());
|
||||||
if (!PrelertStrings.isValidId(id)) {
|
if (!PrelertStrings.isValidId(id)) {
|
||||||
throw new IllegalArgumentException(ID.getPreferredName() + " [" + id + "] contains invalid characters");
|
throw new IllegalArgumentException(Messages.getMessage(Messages.INVALID_ID, ID.getPreferredName()));
|
||||||
}
|
}
|
||||||
if (indexes == null || indexes.isEmpty() || indexes.contains(null) || indexes.contains("")) {
|
if (indexes == null || indexes.isEmpty() || indexes.contains(null) || indexes.contains("")) {
|
||||||
throw invalidOptionValue(INDEXES.getPreferredName(), indexes);
|
throw invalidOptionValue(INDEXES.getPreferredName(), indexes);
|
||||||
|
|
|
@ -27,6 +27,8 @@ public final class Messages {
|
||||||
public static final String DATASTORE_ERROR_DELETING_MISSING_INDEX = "datastore.error.deleting.missing.index";
|
public static final String DATASTORE_ERROR_DELETING_MISSING_INDEX = "datastore.error.deleting.missing.index";
|
||||||
public static final String DATASTORE_ERROR_EXECUTING_SCRIPT = "datastore.error.executing.script";
|
public static final String DATASTORE_ERROR_EXECUTING_SCRIPT = "datastore.error.executing.script";
|
||||||
|
|
||||||
|
public static final String INVALID_ID = "invalid.id";
|
||||||
|
|
||||||
public static final String LICENSE_LIMIT_DETECTORS = "license.limit.detectors";
|
public static final String LICENSE_LIMIT_DETECTORS = "license.limit.detectors";
|
||||||
public static final String LICENSE_LIMIT_JOBS = "license.limit.jobs";
|
public static final String LICENSE_LIMIT_JOBS = "license.limit.jobs";
|
||||||
public static final String LICENSE_LIMIT_DETECTORS_REACTIVATE = "license.limit.detectors.reactivate";
|
public static final String LICENSE_LIMIT_DETECTORS_REACTIVATE = "license.limit.detectors.reactivate";
|
||||||
|
@ -108,7 +110,6 @@ public final class Messages {
|
||||||
public static final String JOB_CONFIG_ID_TOO_LONG = "job.config.id.too.long";
|
public static final String JOB_CONFIG_ID_TOO_LONG = "job.config.id.too.long";
|
||||||
public static final String JOB_CONFIG_ID_ALREADY_TAKEN = "job.config.id.already.taken";
|
public static final String JOB_CONFIG_ID_ALREADY_TAKEN = "job.config.id.already.taken";
|
||||||
public static final String JOB_CONFIG_INVALID_FIELDNAME_CHARS = "job.config.invalid.fieldname.chars";
|
public static final String JOB_CONFIG_INVALID_FIELDNAME_CHARS = "job.config.invalid.fieldname.chars";
|
||||||
public static final String JOB_CONFIG_INVALID_JOBID_CHARS = "job.config.invalid.jobid.chars";
|
|
||||||
public static final String JOB_CONFIG_INVALID_TIMEFORMAT = "job.config.invalid.timeformat";
|
public static final String JOB_CONFIG_INVALID_TIMEFORMAT = "job.config.invalid.timeformat";
|
||||||
public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED = "job.config.function.incompatible.presummarized";
|
public static final String JOB_CONFIG_FUNCTION_INCOMPATIBLE_PRESUMMARIZED = "job.config.function.incompatible.presummarized";
|
||||||
public static final String JOB_CONFIG_MISSING_ANALYSISCONFIG = "job.config.missing.analysisconfig";
|
public static final String JOB_CONFIG_MISSING_ANALYSISCONFIG = "job.config.missing.analysisconfig";
|
||||||
|
|
|
@ -16,9 +16,12 @@ public final class PrelertStrings {
|
||||||
private static final Pattern NEEDS_QUOTING = Pattern.compile("\\W");
|
private static final Pattern NEEDS_QUOTING = Pattern.compile("\\W");
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Valid user entered id characters. Note that '.' is allowed but not documented.
|
* Valid user id pattern.
|
||||||
|
* Matches a string that contains lower case characters, digits, hyphens, underscores or dots.
|
||||||
|
* The string may start and end only in lower case characters or digits.
|
||||||
|
* Note that '.' is allowed but not documented.
|
||||||
*/
|
*/
|
||||||
private static final Pattern VALID_ID_CHAR_PATTERN = Pattern.compile("[a-z0-9_\\-\\.]+");
|
private static final Pattern VALID_ID_CHAR_PATTERN = Pattern.compile("[a-z0-9](?:[a-z0-9_\\-\\.]*[a-z0-9])?");
|
||||||
|
|
||||||
private PrelertStrings() {
|
private PrelertStrings() {
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,8 @@ datastore.error.deleting = Error deleting index ''{0}''
|
||||||
datastore.error.deleting.missing.index = Cannot delete job - no index with id ''{0}'' in the database
|
datastore.error.deleting.missing.index = Cannot delete job - no index with id ''{0}'' in the database
|
||||||
datastore.error.executing.script = Error executing script ''{0}''
|
datastore.error.executing.script = Error executing script ''{0}''
|
||||||
|
|
||||||
|
invalid.id = Invalid {0}; must be lowercase alphanumeric, may contain hyphens or underscores, may not start with underscore
|
||||||
|
|
||||||
license.limit.detectors = Cannot create new job - your license limits you to {0,number,integer} detector(s), but you have configured {1,number,integer}.
|
license.limit.detectors = Cannot create new job - your license limits you to {0,number,integer} detector(s), but you have configured {1,number,integer}.
|
||||||
license.limit.detectors.reactivate = Cannot reactivate job with id ''{0}'' - your license limits you to {1,number,integer} concurrently running detectors. You must close a job before you can reactivate another.
|
license.limit.detectors.reactivate = Cannot reactivate job with id ''{0}'' - your license limits you to {1,number,integer} concurrently running detectors. You must close a job before you can reactivate another.
|
||||||
license.limit.jobs = Cannot create new job - your license limits you to {0,number,integer} concurrently running job(s). You must close a job before you can create a new one.
|
license.limit.jobs = Cannot create new job - your license limits you to {0,number,integer} concurrently running job(s). You must close a job before you can create a new one.
|
||||||
|
@ -79,7 +81,6 @@ job.config.function.incompatible.presummarized = The ''{0}'' function cannot be
|
||||||
job.config.id.already.taken = The job cannot be created with the Id ''{0}''. The Id is already used.
|
job.config.id.already.taken = The job cannot be created with the Id ''{0}''. The Id is already used.
|
||||||
job.config.id.too.long = The job id cannot contain more than {0,number,integer} characters.
|
job.config.id.too.long = The job id cannot contain more than {0,number,integer} characters.
|
||||||
job.config.invalid.fieldname.chars = Invalid field name ''{0}''. Field names including over, by and partition fields cannot contain any of these characters: {1}
|
job.config.invalid.fieldname.chars = Invalid field name ''{0}''. Field names including over, by and partition fields cannot contain any of these characters: {1}
|
||||||
job.config.invalid.jobid.chars = Invalid job id; must be lowercase alphanumeric and may contain hyphens or underscores
|
|
||||||
job.config.invalid.timeformat = Invalid Time format string ''{0}''
|
job.config.invalid.timeformat = Invalid Time format string ''{0}''
|
||||||
job.config.missing.analysisconfig = An analysis_config must be set
|
job.config.missing.analysisconfig = An analysis_config must be set
|
||||||
job.config.model.debug.config.invalid.bounds.percentile = Invalid model_debug_config: bounds_percentile must be in the range [0, 100]
|
job.config.model.debug.config.invalid.bounds.percentile = Invalid model_debug_config: bounds_percentile must be in the range [0, 100]
|
||||||
|
|
|
@ -84,12 +84,12 @@ public class ScheduledJobsIT extends ESIntegTestCase {
|
||||||
startSchedulerRequest.setEndTime(now);
|
startSchedulerRequest.setEndTime(now);
|
||||||
client().execute(StartSchedulerAction.INSTANCE, startSchedulerRequest).get();
|
client().execute(StartSchedulerAction.INSTANCE, startSchedulerRequest).get();
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
DataCounts dataCounts = getDataCounts("_job_id");
|
DataCounts dataCounts = getDataCounts(job.getId());
|
||||||
assertThat(dataCounts.getInputRecordCount(), equalTo(numDocs));
|
assertThat(dataCounts.getInputRecordCount(), equalTo(numDocs));
|
||||||
|
|
||||||
PrelertMetadata prelertMetadata = client().admin().cluster().prepareState().all().get()
|
PrelertMetadata prelertMetadata = client().admin().cluster().prepareState().all().get()
|
||||||
.getState().metaData().custom(PrelertMetadata.TYPE);
|
.getState().metaData().custom(PrelertMetadata.TYPE);
|
||||||
assertThat(prelertMetadata.getSchedulerStatusByJobId("_job_id").get(), equalTo(SchedulerStatus.STOPPED));
|
assertThat(prelertMetadata.getSchedulerStatusByJobId(job.getId()).get(), equalTo(SchedulerStatus.STOPPED));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
|
||||||
});
|
});
|
||||||
t.start();
|
t.start();
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
DataCounts dataCounts = getDataCounts("_job_id");
|
DataCounts dataCounts = getDataCounts(job.getId());
|
||||||
assertThat(dataCounts.getInputRecordCount(), equalTo(numDocs1));
|
assertThat(dataCounts.getInputRecordCount(), equalTo(numDocs1));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
|
||||||
now = System.currentTimeMillis();
|
now = System.currentTimeMillis();
|
||||||
indexDocs(numDocs2, now + 5000, now + 6000);
|
indexDocs(numDocs2, now + 5000, now + 6000);
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
DataCounts dataCounts = getDataCounts("_job_id");
|
DataCounts dataCounts = getDataCounts(job.getId());
|
||||||
assertThat(dataCounts.getInputRecordCount(), equalTo(numDocs1 + numDocs2));
|
assertThat(dataCounts.getInputRecordCount(), equalTo(numDocs1 + numDocs2));
|
||||||
}, 30, TimeUnit.SECONDS);
|
}, 30, TimeUnit.SECONDS);
|
||||||
|
|
||||||
|
@ -143,7 +143,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
PrelertMetadata prelertMetadata = client().admin().cluster().prepareState().all().get()
|
PrelertMetadata prelertMetadata = client().admin().cluster().prepareState().all().get()
|
||||||
.getState().metaData().custom(PrelertMetadata.TYPE);
|
.getState().metaData().custom(PrelertMetadata.TYPE);
|
||||||
assertThat(prelertMetadata.getSchedulerStatusByJobId("_job_id").get(), equalTo(SchedulerStatus.STOPPED));
|
assertThat(prelertMetadata.getSchedulerStatusByJobId(job.getId()).get(), equalTo(SchedulerStatus.STOPPED));
|
||||||
});
|
});
|
||||||
assertThat(errorHolder.get(), nullValue());
|
assertThat(errorHolder.get(), nullValue());
|
||||||
}
|
}
|
||||||
|
@ -174,7 +174,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
|
||||||
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
|
AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build()));
|
||||||
|
|
||||||
Job.Builder builder = new Job.Builder();
|
Job.Builder builder = new Job.Builder();
|
||||||
builder.setId("_job_id");
|
builder.setId("my_job_id");
|
||||||
|
|
||||||
builder.setAnalysisConfig(analysisConfig);
|
builder.setAnalysisConfig(analysisConfig);
|
||||||
builder.setDataDescription(dataDescription);
|
builder.setDataDescription(dataDescription);
|
||||||
|
@ -194,7 +194,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
|
||||||
GetResponse getResponse = client().prepareGet(JobResultsPersister.getJobIndexName(jobId),
|
GetResponse getResponse = client().prepareGet(JobResultsPersister.getJobIndexName(jobId),
|
||||||
DataCounts.TYPE.getPreferredName(), jobId + "-data-counts").get();
|
DataCounts.TYPE.getPreferredName(), jobId + "-data-counts").get();
|
||||||
if (getResponse.isExists() == false) {
|
if (getResponse.isExists() == false) {
|
||||||
return new DataCounts("_job_id");
|
return new DataCounts(jobId);
|
||||||
}
|
}
|
||||||
|
|
||||||
try (XContentParser parser = XContentHelper.createParser(getResponse.getSourceAsBytesRef())) {
|
try (XContentParser parser = XContentHelper.createParser(getResponse.getSourceAsBytesRef())) {
|
||||||
|
|
|
@ -26,10 +26,10 @@ import static org.hamcrest.Matchers.equalTo;
|
||||||
public class ScheduledJobIT extends ESRestTestCase {
|
public class ScheduledJobIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testStartJobScheduler_GivenLookbackOnly() throws Exception {
|
public void testStartJobScheduler_GivenLookbackOnly() throws Exception {
|
||||||
String jobId = "_id2";
|
String jobId = "job-1";
|
||||||
createAirlineDataIndex();
|
createAirlineDataIndex();
|
||||||
createJob(jobId);
|
createJob(jobId);
|
||||||
String schedulerId = "_sched1";
|
String schedulerId = "sched-1";
|
||||||
createScheduler(schedulerId, jobId);
|
createScheduler(schedulerId, jobId);
|
||||||
openJob(client(), jobId);
|
openJob(client(), jobId);
|
||||||
|
|
||||||
|
@ -49,10 +49,10 @@ public class ScheduledJobIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStartJobScheduler_GivenRealtime() throws Exception {
|
public void testStartJobScheduler_GivenRealtime() throws Exception {
|
||||||
String jobId = "_id3";
|
String jobId = "job-2";
|
||||||
createAirlineDataIndex();
|
createAirlineDataIndex();
|
||||||
createJob(jobId);
|
createJob(jobId);
|
||||||
String schedulerId = "_sched1";
|
String schedulerId = "sched-2";
|
||||||
createScheduler(schedulerId, jobId);
|
createScheduler(schedulerId, jobId);
|
||||||
openJob(client(), jobId);
|
openJob(client(), jobId);
|
||||||
|
|
||||||
|
|
|
@ -228,14 +228,14 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
||||||
|
|
||||||
public void testCheckValidId_GivenAllValidChars() {
|
public void testCheckValidId_GivenAllValidChars() {
|
||||||
Job.Builder builder = buildJobBuilder("foo");
|
Job.Builder builder = buildJobBuilder("foo");
|
||||||
builder.setId("abcdefghijklmnopqrstuvwxyz-0123456789_.");
|
builder.setId("abcdefghijklmnopqrstuvwxyz-._0123456789");
|
||||||
builder.build();
|
builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCheckValidId_ProhibitedChars() {
|
public void testCheckValidId_ProhibitedChars() {
|
||||||
String invalidChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()+?\"'~±/\\[]{},<>=";
|
String invalidChars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()+?\"'~±/\\[]{},<>=";
|
||||||
Job.Builder builder = buildJobBuilder("foo");
|
Job.Builder builder = buildJobBuilder("foo");
|
||||||
String errorMessage = Messages.getMessage(Messages.JOB_CONFIG_INVALID_JOBID_CHARS);
|
String errorMessage = Messages.getMessage(Messages.INVALID_ID, Job.ID.getPreferredName());
|
||||||
for (char c : invalidChars.toCharArray()) {
|
for (char c : invalidChars.toCharArray()) {
|
||||||
builder.setId(Character.toString(c));
|
builder.setId(Character.toString(c));
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build);
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build);
|
||||||
|
@ -243,6 +243,20 @@ public class JobTests extends AbstractSerializingTestCase<Job> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCheckValidId_startsWithUnderscore() {
|
||||||
|
Job.Builder builder = buildJobBuilder("_foo");
|
||||||
|
String errorMessage = Messages.getMessage(Messages.INVALID_ID, Job.ID.getPreferredName());
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build);
|
||||||
|
assertEquals(errorMessage, e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCheckValidId_endsWithUnderscore() {
|
||||||
|
Job.Builder builder = buildJobBuilder("foo_");
|
||||||
|
String errorMessage = Messages.getMessage(Messages.INVALID_ID, Job.ID.getPreferredName());
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, builder::build);
|
||||||
|
assertEquals(errorMessage, e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
public void testCheckValidId_ControlChars() {
|
public void testCheckValidId_ControlChars() {
|
||||||
Job.Builder builder = buildJobBuilder("foo");
|
Job.Builder builder = buildJobBuilder("foo");
|
||||||
builder.setId("two\nlines");
|
builder.setId("two\nlines");
|
||||||
|
|
|
@ -280,8 +280,8 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
||||||
ExecutorService executorService = mock(ExecutorService.class);
|
ExecutorService executorService = mock(ExecutorService.class);
|
||||||
doThrow(new EsRejectedExecutionException("")).when(executorService).execute(any());
|
doThrow(new EsRejectedExecutionException("")).when(executorService).execute(any());
|
||||||
when(threadPool.executor(anyString())).thenReturn(executorService);
|
when(threadPool.executor(anyString())).thenReturn(executorService);
|
||||||
when(jobManager.getJobOrThrowIfUnknown("_id")).thenReturn(createJobDetails("_id"));
|
when(jobManager.getJobOrThrowIfUnknown("my_id")).thenReturn(createJobDetails("my_id"));
|
||||||
when(jobProvider.dataCounts("_id")).thenReturn(new DataCounts("_id"));
|
when(jobProvider.dataCounts("my_id")).thenReturn(new DataCounts("my_id"));
|
||||||
|
|
||||||
Set<Setting<?>> settingSet = new HashSet<>();
|
Set<Setting<?>> settingSet = new HashSet<>();
|
||||||
settingSet.addAll(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
settingSet.addAll(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
|
||||||
|
@ -294,7 +294,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
||||||
jobResultsPersister, jobRenormalizedResultsPersister, jobDataCountsPersister, parser, autodetectProcessFactory,
|
jobResultsPersister, jobRenormalizedResultsPersister, jobDataCountsPersister, parser, autodetectProcessFactory,
|
||||||
normalizerFactory, clusterSettings);
|
normalizerFactory, clusterSettings);
|
||||||
|
|
||||||
expectThrows(EsRejectedExecutionException.class, () -> manager.create("_id", false));
|
expectThrows(EsRejectedExecutionException.class, () -> manager.create("my_id", false));
|
||||||
verify(autodetectProcess, times(1)).close();
|
verify(autodetectProcess, times(1)).close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -50,14 +50,14 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
assertFalse("No jobs, so nothing to allocate", jobAllocator.shouldAllocate(cs));
|
assertFalse("No jobs, so nothing to allocate", jobAllocator.shouldAllocate(cs));
|
||||||
|
|
||||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder(cs.metaData().custom(PrelertMetadata.TYPE));
|
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder(cs.metaData().custom(PrelertMetadata.TYPE));
|
||||||
pmBuilder.putJob((buildJobBuilder("_job_id").build()), false);
|
pmBuilder.putJob((buildJobBuilder("my_job_id").build()), false);
|
||||||
cs = ClusterState.builder(cs).metaData(MetaData.builder()
|
cs = ClusterState.builder(cs).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.build();
|
.build();
|
||||||
assertTrue("A unassigned job, so we should allocate", jobAllocator.shouldAllocate(cs));
|
assertTrue("A unassigned job, so we should allocate", jobAllocator.shouldAllocate(cs));
|
||||||
|
|
||||||
pmBuilder = new PrelertMetadata.Builder(cs.metaData().custom(PrelertMetadata.TYPE));
|
pmBuilder = new PrelertMetadata.Builder(cs.metaData().custom(PrelertMetadata.TYPE));
|
||||||
pmBuilder.assignToNode("_job_id", "_node_id");
|
pmBuilder.assignToNode("my_job_id", "_node_id");
|
||||||
cs = ClusterState.builder(cs).metaData(MetaData.builder()
|
cs = ClusterState.builder(cs).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.build();
|
.build();
|
||||||
|
@ -66,7 +66,7 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
|
|
||||||
public void testAssignJobsToNodes() {
|
public void testAssignJobsToNodes() {
|
||||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -75,7 +75,7 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
.build();
|
.build();
|
||||||
ClusterState result1 = jobAllocator.assignJobsToNodes(cs1);
|
ClusterState result1 = jobAllocator.assignJobsToNodes(cs1);
|
||||||
PrelertMetadata pm = result1.metaData().custom(PrelertMetadata.TYPE);
|
PrelertMetadata pm = result1.metaData().custom(PrelertMetadata.TYPE);
|
||||||
assertEquals("_job_id must be allocated to _node_id", pm.getAllocations().get("_job_id").getNodeId(), "_node_id");
|
assertEquals("my_job_id must be allocated to _node_id", pm.getAllocations().get("my_job_id").getNodeId(), "_node_id");
|
||||||
|
|
||||||
ClusterState result2 = jobAllocator.assignJobsToNodes(result1);
|
ClusterState result2 = jobAllocator.assignJobsToNodes(result1);
|
||||||
assertSame("job has been allocated, same instance must be returned", result1, result2);
|
assertSame("job has been allocated, same instance must be returned", result1, result2);
|
||||||
|
@ -99,7 +99,7 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
expectThrows(IllegalStateException.class, () -> jobAllocator.assignJobsToNodes(cs3));
|
expectThrows(IllegalStateException.class, () -> jobAllocator.assignJobsToNodes(cs3));
|
||||||
|
|
||||||
pmBuilder = new PrelertMetadata.Builder(result1.getMetaData().custom(PrelertMetadata.TYPE));
|
pmBuilder = new PrelertMetadata.Builder(result1.getMetaData().custom(PrelertMetadata.TYPE));
|
||||||
pmBuilder.removeJob("_job_id");
|
pmBuilder.removeJob("my_job_id");
|
||||||
ClusterState cs4 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
ClusterState cs4 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -108,7 +108,7 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
.build();
|
.build();
|
||||||
ClusterState result3 = jobAllocator.assignJobsToNodes(cs4);
|
ClusterState result3 = jobAllocator.assignJobsToNodes(cs4);
|
||||||
pm = result3.metaData().custom(PrelertMetadata.TYPE);
|
pm = result3.metaData().custom(PrelertMetadata.TYPE);
|
||||||
assertNull("_job_id must be unallocated, because job has been removed", pm.getAllocations().get("_job_id"));
|
assertNull("my_job_id must be unallocated, because job has been removed", pm.getAllocations().get("my_job_id"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClusterChanged_onlyAllocateIfMasterAndHaveUnAllocatedJobs() {
|
public void testClusterChanged_onlyAllocateIfMasterAndHaveUnAllocatedJobs() {
|
||||||
|
@ -146,8 +146,8 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
|
|
||||||
// add an allocated job
|
// add an allocated job
|
||||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
pmBuilder.assignToNode("_id", "_node_id");
|
pmBuilder.assignToNode("my_job_id", "_node_id");
|
||||||
cs = ClusterState.builder(new ClusterName("_name"))
|
cs = ClusterState.builder(new ClusterName("_name"))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||||
|
@ -162,7 +162,7 @@ public class JobAllocatorTests extends ESTestCase {
|
||||||
|
|
||||||
// make job not allocated
|
// make job not allocated
|
||||||
pmBuilder = new PrelertMetadata.Builder();
|
pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
cs = ClusterState.builder(new ClusterName("_name"))
|
cs = ClusterState.builder(new ClusterName("_name"))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
.add(new DiscoveryNode("_id", new LocalTransportAddress("_id"), Version.CURRENT))
|
||||||
|
|
|
@ -46,19 +46,19 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
|
|
||||||
public void testStartStop() {
|
public void testStartStop() {
|
||||||
Allocation.Builder allocation = new Allocation.Builder();
|
Allocation.Builder allocation = new Allocation.Builder();
|
||||||
allocation.setJobId("_job_id");
|
allocation.setJobId("my_job_id");
|
||||||
jobLifeCycleService.startJob(allocation.build());
|
jobLifeCycleService.startJob(allocation.build());
|
||||||
assertTrue(jobLifeCycleService.localAssignedJobs.contains("_job_id"));
|
assertTrue(jobLifeCycleService.localAssignedJobs.contains("my_job_id"));
|
||||||
verify(dataProcessor).openJob("_job_id", false);
|
verify(dataProcessor).openJob("my_job_id", false);
|
||||||
|
|
||||||
jobLifeCycleService.stopJob("_job_id");
|
jobLifeCycleService.stopJob("my_job_id");
|
||||||
assertTrue(jobLifeCycleService.localAssignedJobs.isEmpty());
|
assertTrue(jobLifeCycleService.localAssignedJobs.isEmpty());
|
||||||
verify(dataProcessor).closeJob("_job_id");
|
verify(dataProcessor).closeJob("my_job_id");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClusterChanged_startJob() {
|
public void testClusterChanged_startJob() {
|
||||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -66,12 +66,11 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
.localNodeId("_node_id"))
|
.localNodeId("_node_id"))
|
||||||
.build();
|
.build();
|
||||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||||
assertFalse("not allocated to a node",
|
assertFalse("not allocated to a node", jobLifeCycleService.localAssignedJobs.contains("my_job_id"));
|
||||||
jobLifeCycleService.localAssignedJobs.contains("_job_id"));
|
|
||||||
|
|
||||||
pmBuilder = new PrelertMetadata.Builder();
|
pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
pmBuilder.updateStatus("_job_id", JobStatus.OPENING, null);
|
pmBuilder.updateStatus("my_job_id", JobStatus.OPENING, null);
|
||||||
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -79,13 +78,12 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
.localNodeId("_node_id"))
|
.localNodeId("_node_id"))
|
||||||
.build();
|
.build();
|
||||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||||
assertFalse("Status not started",
|
assertFalse("Status not started", jobLifeCycleService.localAssignedJobs.contains("my_job_id"));
|
||||||
jobLifeCycleService.localAssignedJobs.contains("_job_id"));
|
|
||||||
|
|
||||||
pmBuilder = new PrelertMetadata.Builder();
|
pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
pmBuilder.updateStatus("_job_id", JobStatus.OPENING, null);
|
pmBuilder.updateStatus("my_job_id", JobStatus.OPENING, null);
|
||||||
pmBuilder.assignToNode("_job_id", "_node_id");
|
pmBuilder.assignToNode("my_job_id", "_node_id");
|
||||||
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -93,19 +91,19 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
.localNodeId("_node_id"))
|
.localNodeId("_node_id"))
|
||||||
.build();
|
.build();
|
||||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||||
assertTrue("Expect allocation, because job allocation says _job_id should be allocated locally",
|
assertTrue("Expect allocation, because job allocation says my_job_id should be allocated locally",
|
||||||
jobLifeCycleService.localAssignedJobs.contains("_job_id"));
|
jobLifeCycleService.localAssignedJobs.contains("my_job_id"));
|
||||||
verify(dataProcessor, times(1)).openJob("_job_id", false);
|
verify(dataProcessor, times(1)).openJob("my_job_id", false);
|
||||||
|
|
||||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||||
verify(dataProcessor, times(1)).openJob("_job_id", false);
|
verify(dataProcessor, times(1)).openJob("my_job_id", false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClusterChanged_stopJob() {
|
public void testClusterChanged_stopJob() {
|
||||||
jobLifeCycleService.localAssignedJobs.add("_job_id");
|
jobLifeCycleService.localAssignedJobs.add("my_job_id");
|
||||||
|
|
||||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -116,11 +114,11 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
assertEquals("Status is not closing, so nothing happened", jobLifeCycleService.localAssignedJobs.size(), 1);
|
assertEquals("Status is not closing, so nothing happened", jobLifeCycleService.localAssignedJobs.size(), 1);
|
||||||
|
|
||||||
pmBuilder = new PrelertMetadata.Builder();
|
pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
pmBuilder.updateStatus("_job_id", JobStatus.OPENING, null);
|
pmBuilder.updateStatus("my_job_id", JobStatus.OPENING, null);
|
||||||
pmBuilder.updateStatus("_job_id", JobStatus.OPENED, null);
|
pmBuilder.updateStatus("my_job_id", JobStatus.OPENED, null);
|
||||||
pmBuilder.updateStatus("_job_id", JobStatus.CLOSING, null);
|
pmBuilder.updateStatus("my_job_id", JobStatus.CLOSING, null);
|
||||||
pmBuilder.assignToNode("_job_id", "_node_id");
|
pmBuilder.assignToNode("my_job_id", "_node_id");
|
||||||
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -129,15 +127,15 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
.build();
|
.build();
|
||||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||||
assertEquals(jobLifeCycleService.localAssignedJobs.size(), 0);
|
assertEquals(jobLifeCycleService.localAssignedJobs.size(), 0);
|
||||||
verify(dataProcessor, times(1)).closeJob("_job_id");
|
verify(dataProcessor, times(1)).closeJob("my_job_id");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testClusterChanged_allocationRemovedStopJob() {
|
public void testClusterChanged_allocationRemovedStopJob() {
|
||||||
jobLifeCycleService.localAssignedJobs.add("_job_id");
|
jobLifeCycleService.localAssignedJobs.add("my_job_id");
|
||||||
|
|
||||||
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
PrelertMetadata.Builder pmBuilder = new PrelertMetadata.Builder();
|
||||||
pmBuilder.putJob(buildJobBuilder("_job_id").build(), false);
|
pmBuilder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
pmBuilder.removeJob("_job_id");
|
pmBuilder.removeJob("my_job_id");
|
||||||
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
ClusterState cs1 = ClusterState.builder(new ClusterName("_cluster_name")).metaData(MetaData.builder()
|
||||||
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
.putCustom(PrelertMetadata.TYPE, pmBuilder.build()))
|
||||||
.nodes(DiscoveryNodes.builder()
|
.nodes(DiscoveryNodes.builder()
|
||||||
|
@ -146,28 +144,28 @@ public class JobLifeCycleServiceTests extends ESTestCase {
|
||||||
.build();
|
.build();
|
||||||
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
jobLifeCycleService.clusterChanged(new ClusterChangedEvent("_source", cs1, cs1));
|
||||||
assertEquals(jobLifeCycleService.localAssignedJobs.size(), 0);
|
assertEquals(jobLifeCycleService.localAssignedJobs.size(), 0);
|
||||||
verify(dataProcessor, times(1)).closeJob("_job_id");
|
verify(dataProcessor, times(1)).closeJob("my_job_id");
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStart_openJobFails() {
|
public void testStart_openJobFails() {
|
||||||
doThrow(new RuntimeException("error")).when(dataProcessor).openJob("_job_id", false);
|
doThrow(new RuntimeException("error")).when(dataProcessor).openJob("my_job_id", false);
|
||||||
Allocation.Builder allocation = new Allocation.Builder();
|
Allocation.Builder allocation = new Allocation.Builder();
|
||||||
allocation.setJobId("_job_id");
|
allocation.setJobId("my_job_id");
|
||||||
jobLifeCycleService.startJob(allocation.build());
|
jobLifeCycleService.startJob(allocation.build());
|
||||||
assertTrue(jobLifeCycleService.localAssignedJobs.contains("_job_id"));
|
assertTrue(jobLifeCycleService.localAssignedJobs.contains("my_job_id"));
|
||||||
verify(dataProcessor).openJob("_job_id", false);
|
verify(dataProcessor).openJob("my_job_id", false);
|
||||||
UpdateJobStatusAction.Request expectedRequest = new UpdateJobStatusAction.Request("_job_id", JobStatus.FAILED);
|
UpdateJobStatusAction.Request expectedRequest = new UpdateJobStatusAction.Request("my_job_id", JobStatus.FAILED);
|
||||||
expectedRequest.setReason("failed to open, error");
|
expectedRequest.setReason("failed to open, error");
|
||||||
verify(client).execute(eq(UpdateJobStatusAction.INSTANCE), eq(expectedRequest), any());
|
verify(client).execute(eq(UpdateJobStatusAction.INSTANCE), eq(expectedRequest), any());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testStart_closeJobFails() {
|
public void testStart_closeJobFails() {
|
||||||
jobLifeCycleService.localAssignedJobs.add("_job_id");
|
jobLifeCycleService.localAssignedJobs.add("my_job_id");
|
||||||
doThrow(new RuntimeException("error")).when(dataProcessor).closeJob("_job_id");
|
doThrow(new RuntimeException("error")).when(dataProcessor).closeJob("my_job_id");
|
||||||
jobLifeCycleService.stopJob("_job_id");
|
jobLifeCycleService.stopJob("my_job_id");
|
||||||
assertEquals(jobLifeCycleService.localAssignedJobs.size(), 0);
|
assertEquals(jobLifeCycleService.localAssignedJobs.size(), 0);
|
||||||
verify(dataProcessor).closeJob("_job_id");
|
verify(dataProcessor).closeJob("my_job_id");
|
||||||
UpdateJobStatusAction.Request expectedRequest = new UpdateJobStatusAction.Request("_job_id", JobStatus.FAILED);
|
UpdateJobStatusAction.Request expectedRequest = new UpdateJobStatusAction.Request("my_job_id", JobStatus.FAILED);
|
||||||
expectedRequest.setReason("failed to close, error");
|
expectedRequest.setReason("failed to close, error");
|
||||||
verify(client).execute(eq(UpdateJobStatusAction.INSTANCE), eq(expectedRequest), any());
|
verify(client).execute(eq(UpdateJobStatusAction.INSTANCE), eq(expectedRequest), any());
|
||||||
}
|
}
|
||||||
|
|
|
@ -252,16 +252,16 @@ public class PrelertMetadataTests extends AbstractSerializingTestCase<PrelertMet
|
||||||
|
|
||||||
public void testUpdateAllocation_setFinishedTime() {
|
public void testUpdateAllocation_setFinishedTime() {
|
||||||
PrelertMetadata.Builder builder = new PrelertMetadata.Builder();
|
PrelertMetadata.Builder builder = new PrelertMetadata.Builder();
|
||||||
builder.putJob(buildJobBuilder("_job_id").build(), false);
|
builder.putJob(buildJobBuilder("my_job_id").build(), false);
|
||||||
builder.updateStatus("_job_id", JobStatus.OPENING, null);
|
builder.updateStatus("my_job_id", JobStatus.OPENING, null);
|
||||||
|
|
||||||
builder.updateStatus("_job_id", JobStatus.OPENED, null);
|
builder.updateStatus("my_job_id", JobStatus.OPENED, null);
|
||||||
PrelertMetadata prelertMetadata = builder.build();
|
PrelertMetadata prelertMetadata = builder.build();
|
||||||
assertThat(prelertMetadata.getJobs().get("_job_id").getFinishedTime(), nullValue());
|
assertThat(prelertMetadata.getJobs().get("my_job_id").getFinishedTime(), nullValue());
|
||||||
|
|
||||||
builder.updateStatus("_job_id", JobStatus.CLOSED, null);
|
builder.updateStatus("my_job_id", JobStatus.CLOSED, null);
|
||||||
prelertMetadata = builder.build();
|
prelertMetadata = builder.build();
|
||||||
assertThat(prelertMetadata.getJobs().get("_job_id").getFinishedTime(), notNullValue());
|
assertThat(prelertMetadata.getJobs().get("my_job_id").getFinishedTime(), notNullValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testUpdateStatus_failBecauseJobDoesNotExist() {
|
public void testUpdateStatus_failBecauseJobDoesNotExist() {
|
||||||
|
|
|
@ -8,6 +8,8 @@ package org.elasticsearch.xpack.prelert.utils;
|
||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
|
|
||||||
public class PrelertStringsTests extends ESTestCase {
|
public class PrelertStringsTests extends ESTestCase {
|
||||||
public void testDoubleQuoteIfNotAlphaNumeric() {
|
public void testDoubleQuoteIfNotAlphaNumeric() {
|
||||||
assertEquals("foo2", PrelertStrings.doubleQuoteIfNotAlphaNumeric("foo2"));
|
assertEquals("foo2", PrelertStrings.doubleQuoteIfNotAlphaNumeric("foo2"));
|
||||||
|
@ -15,4 +17,17 @@ public class PrelertStringsTests extends ESTestCase {
|
||||||
assertEquals("\" \"", PrelertStrings.doubleQuoteIfNotAlphaNumeric(" "));
|
assertEquals("\" \"", PrelertStrings.doubleQuoteIfNotAlphaNumeric(" "));
|
||||||
assertEquals("\"ba\\\"r\\\"\"", PrelertStrings.doubleQuoteIfNotAlphaNumeric("ba\"r\""));
|
assertEquals("\"ba\\\"r\\\"\"", PrelertStrings.doubleQuoteIfNotAlphaNumeric("ba\"r\""));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testIsValidId() {
|
||||||
|
assertThat(PrelertStrings.isValidId("1_-.a"), is(true));
|
||||||
|
assertThat(PrelertStrings.isValidId("b.-_3"), is(true));
|
||||||
|
assertThat(PrelertStrings.isValidId("a-b.c_d"), is(true));
|
||||||
|
|
||||||
|
assertThat(PrelertStrings.isValidId("a1_-."), is(false));
|
||||||
|
assertThat(PrelertStrings.isValidId("-.a1_"), is(false));
|
||||||
|
assertThat(PrelertStrings.isValidId(".a1_-"), is(false));
|
||||||
|
assertThat(PrelertStrings.isValidId("_-.a1"), is(false));
|
||||||
|
assertThat(PrelertStrings.isValidId("A"), is(false));
|
||||||
|
assertThat(PrelertStrings.isValidId("!afafd"), is(false));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue