Merge branch 'master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-08-15 06:11:25 -07:00
commit 92ecd1d271
119 changed files with 2954 additions and 1434 deletions

View File

@ -59,9 +59,6 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats;
@ -270,33 +267,6 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertEquals(2, type2.getDocCount()); assertEquals(2, type2.getDocCount());
assertEquals(0, type2.getAggregations().asList().size()); assertEquals(0, type2.getAggregations().asList().size());
} }
public void testSearchWithSignificantTermsAgg() throws IOException {
SearchRequest searchRequest = new SearchRequest();
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(new MatchQueryBuilder("num","50"));
searchSourceBuilder.aggregation(new SignificantTermsAggregationBuilder("agg1", ValueType.STRING)
.field("type.keyword")
.minDocCount(1)
.significanceHeuristic(new PercentageScore()));
searchSourceBuilder.size(0);
searchRequest.source(searchSourceBuilder);
SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
assertSearchHeader(searchResponse);
assertNull(searchResponse.getSuggest());
assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
assertEquals(0, searchResponse.getHits().getHits().length);
assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
SignificantTerms significantTermsAgg = searchResponse.getAggregations().get("agg1");
assertEquals("agg1", significantTermsAgg.getName());
assertEquals(1, significantTermsAgg.getBuckets().size());
SignificantTerms.Bucket type1 = significantTermsAgg.getBucketByKey("type1");
assertEquals(1, type1.getDocCount());
assertEquals(1, type1.getSubsetDf());
assertEquals(1, type1.getSubsetSize());
assertEquals(3, type1.getSupersetDf());
assertEquals(1d/3d, type1.getSignificanceScore(), 0d);
}
public void testSearchWithRangeAgg() throws IOException { public void testSearchWithRangeAgg() throws IOException {
{ {

View File

@ -0,0 +1,121 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.documentation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.protocol.xpack.ml.PutJobRequest;
import org.elasticsearch.protocol.xpack.ml.PutJobResponse;
import org.elasticsearch.protocol.xpack.ml.job.config.AnalysisConfig;
import org.elasticsearch.protocol.xpack.ml.job.config.DataDescription;
import org.elasticsearch.protocol.xpack.ml.job.config.Detector;
import org.elasticsearch.protocol.xpack.ml.job.config.Job;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.greaterThan;
public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
public void testCreateJob() throws Exception {
RestHighLevelClient client = highLevelClient();
//tag::x-pack-ml-put-job-detector
Detector.Builder detectorBuilder = new Detector.Builder()
.setFunction("sum") // <1>
.setFieldName("total") // <2>
.setDetectorDescription("Sum of total"); // <3>
//end::x-pack-ml-put-job-detector
//tag::x-pack-ml-put-job-analysis-config
List<Detector> detectors = Collections.singletonList(detectorBuilder.build()); // <1>
AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2>
.setBucketSpan(TimeValue.timeValueMinutes(10)); // <3>
//end::x-pack-ml-put-job-analysis-config
//tag::x-pack-ml-put-job-data-description
DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder()
.setTimeField("timestamp"); // <1>
//end::x-pack-ml-put-job-data-description
{
String id = "job_1";
//tag::x-pack-ml-put-job-config
Job.Builder jobBuilder = new Job.Builder(id) // <1>
.setAnalysisConfig(analysisConfigBuilder) // <2>
.setDataDescription(dataDescriptionBuilder) // <3>
.setDescription("Total sum of requests"); // <4>
//end::x-pack-ml-put-job-config
//tag::x-pack-ml-put-job-request
PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1>
//end::x-pack-ml-put-job-request
//tag::x-pack-ml-put-job-execute
PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT);
//end::x-pack-ml-put-job-execute
//tag::x-pack-ml-put-job-response
Date createTime = response.getResponse().getCreateTime(); // <1>
//end::x-pack-ml-put-job-response
assertThat(createTime.getTime(), greaterThan(0L));
}
{
String id = "job_2";
Job.Builder jobBuilder = new Job.Builder(id)
.setAnalysisConfig(analysisConfigBuilder)
.setDataDescription(dataDescriptionBuilder)
.setDescription("Total sum of requests");
PutJobRequest request = new PutJobRequest(jobBuilder.build());
// tag::x-pack-ml-put-job-execute-listener
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
@Override
public void onResponse(PutJobResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::x-pack-ml-put-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::x-pack-ml-put-job-execute-async
client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::x-pack-ml-put-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
}

View File

@ -157,7 +157,7 @@ subprojects {
environment('JAVA_HOME', getJavaHome(it, 8)) environment('JAVA_HOME', getJavaHome(it, 8))
} else if ("6.2".equals(bwcBranch)) { } else if ("6.2".equals(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 9)) environment('JAVA_HOME', getJavaHome(it, 9))
} else if (["6.3", "6.x"].contains(bwcBranch)) { } else if (["6.3", "6.4", "6.x"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 10)) environment('JAVA_HOME', getJavaHome(it, 10))
} else { } else {
environment('JAVA_HOME', project.compilerJavaHome) environment('JAVA_HOME', project.compilerJavaHome)

View File

@ -0,0 +1,161 @@
[[java-rest-high-x-pack-ml-put-job]]
=== Put Job API
The Put Job API can be used to create a new {ml} job
in the cluster. The API accepts a `PutJobRequest` object
as a request and returns a `PutJobResponse`.
[[java-rest-high-x-pack-ml-put-job-request]]
==== Put Job Request
A `PutJobRequest` requires the following argument:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-request]
--------------------------------------------------
<1> The configuration of the {ml} job to create as a `Job`
[[java-rest-high-x-pack-ml-put-job-config]]
==== Job Configuration
The `Job` object contains all the details about the {ml} job
configuration.
A `Job` requires the following arguments:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-config]
--------------------------------------------------
<1> The job ID
<2> An analysis configuration
<3> A data description
<4> Optionally, a human-readable description
[[java-rest-high-x-pack-ml-put-job-analysis-config]]
==== Analysis Configuration
The analysis configuration of the {ml} job is defined in the `AnalysisConfig`.
`AnalysisConfig` reflects all the configuration
settings that can be defined using the REST API.
Using the REST API, we could define this analysis configuration:
[source,js]
--------------------------------------------------
"analysis_config" : {
"bucket_span" : "10m",
"detectors" : [
{
"detector_description" : "Sum of total",
"function" : "sum",
"field_name" : "total"
}
]
}
--------------------------------------------------
// NOTCONSOLE
Using the `AnalysisConfig` object and the high level REST client, the list
of detectors must be built first.
An example of building a `Detector` instance is as follows:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-detector]
--------------------------------------------------
<1> The function to use
<2> The field to apply the function to
<3> Optionally, a human-readable description
Then the same configuration would be:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-analysis-config]
--------------------------------------------------
<1> Create a list of detectors
<2> Pass the list of detectors to the analysis config builder constructor
<3> The bucket span
[[java-rest-high-x-pack-ml-put-job-data-description]]
==== Data Description
After defining the analysis config, the next thing to define is the
data description, using a `DataDescription` instance. `DataDescription`
reflects all the configuration settings that can be defined using the
REST API.
Using the REST API, we could define this metrics configuration:
[source,js]
--------------------------------------------------
"data_description" : {
"time_field" : "timestamp"
}
--------------------------------------------------
// NOTCONSOLE
Using the `DataDescription` object and the high level REST client, the same
configuration would be:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-data-description]
--------------------------------------------------
<1> The time field
[[java-rest-high-x-pack-ml-put-job-execution]]
==== Execution
The Put Job API can be executed through a `MachineLearningClient`
instance. Such an instance can be retrieved from a `RestHighLevelClient`
using the `machineLearning()` method:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute]
--------------------------------------------------
[[java-rest-high-x-pack-ml-put-job-response]]
==== Response
The returned `PutJobResponse` returns the full representation of
the new {ml} job if it has been successfully created. This will
contain the creation time and other fields initialized using
default values:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-response]
--------------------------------------------------
<1> The creation time is a field that was not passed in the `Job` object in the request
[[java-rest-high-x-pack-ml-put-job-async]]
==== Asynchronous Execution
This request can be executed asynchronously:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-async]
--------------------------------------------------
<1> The `PutMlJobRequest` to execute and the `ActionListener` to use when
the execution completes
The asynchronous method does not block and returns immediately. Once it is
completed the `ActionListener` is called back using the `onResponse` method
if the execution successfully completed or using the `onFailure` method if
it failed.
A typical listener for `PutJobResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-put-job-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument
<2> Called in case of failure. The raised exception is provided as an argument

View File

@ -200,6 +200,14 @@ include::licensing/put-license.asciidoc[]
include::licensing/get-license.asciidoc[] include::licensing/get-license.asciidoc[]
include::licensing/delete-license.asciidoc[] include::licensing/delete-license.asciidoc[]
== Machine Learning APIs
The Java High Level REST Client supports the following Machine Learning APIs:
* <<java-rest-high-x-pack-ml-put-job>>
include::ml/put_job.asciidoc[]
== Migration APIs == Migration APIs
The Java High Level REST Client supports the following Migration APIs: The Java High Level REST Client supports the following Migration APIs:

50
libs/dissect/build.gradle Normal file
View File

@ -0,0 +1,50 @@
import org.elasticsearch.gradle.precommit.PrecommitTasks
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
archivesBaseName = 'elasticsearch-dissect'
dependencies {
if (isEclipse == false || project.path == ":libs:dissect-tests") {
testCompile("org.elasticsearch.test:framework:${version}") {
exclude group: 'org.elasticsearch', module: 'dissect'
}
}
testCompile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
testCompile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}")
testCompile("com.fasterxml.jackson.core:jackson-databind:${versions.jackson}")
}
forbiddenApisMain {
signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')]
}
if (isEclipse) {
// in eclipse the project is under a fake root, we need to change around the source sets
sourceSets {
if (project.path == ":libs:dissect") {
main.java.srcDirs = ['java']
main.resources.srcDirs = ['resources']
} else {
test.java.srcDirs = ['java']
test.resources.srcDirs = ['resources']
}
}
}

View File

@ -0,0 +1,3 @@
// this is just shell gradle file for eclipse to have separate projects for dissect src and tests
apply from: '../../build.gradle'

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
/**
* Parent class for all dissect related exceptions. Consumers may catch this exception or more specific child exceptions.
*/
public abstract class DissectException extends RuntimeException {
DissectException(String message) {
super(message);
}
/**
* Error while parsing a dissect pattern
*/
static class PatternParse extends DissectException {
PatternParse(String pattern, String reason) {
super("Unable to parse pattern: " + pattern + " Reason: " + reason);
}
}
/**
* Error while parsing a dissect key
*/
static class KeyParse extends DissectException {
KeyParse(String key, String reason) {
super("Unable to parse key: " + key + " Reason: " + reason);
}
}
/**
* Unable to find a match between pattern and source string
*/
static class FindMatch extends DissectException {
FindMatch(String pattern, String source) {
super("Unable to find match for dissect pattern: " + pattern + " against source: " + source);
}
}
}

View File

@ -0,0 +1,191 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
import java.util.EnumSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* <p>A Key of a dissect pattern. This class models the name and modifiers and provides some validation.</p>
* <p>For dissect pattern of {@code %{a} %{+a} %{b}} the dissect keys are:
* <ul>
* <li>{@code a}</li>
* <li>{@code +a}</li>
* <li>{@code b}</li>
* </ul>
* This class represents a single key.
* <p>A single key is composed of a name and it's modifiers. For the key {@code +a}, {@code a} is the name and {@code +} is the modifier.
* @see DissectParser
*/
public final class DissectKey {
private static final Pattern LEFT_MODIFIER_PATTERN = Pattern.compile("([+*&?])(.*?)(->)?$", Pattern.DOTALL);
private static final Pattern RIGHT_PADDING_PATTERN = Pattern.compile("^(.*?)(->)?$", Pattern.DOTALL);
private static final Pattern APPEND_WITH_ORDER_PATTERN = Pattern.compile("[+](.*?)(/)([0-9]+)(->)?$", Pattern.DOTALL);
private final Modifier modifier;
private boolean skip;
private boolean skipRightPadding;
private int appendPosition;
private String name;
/**
* Constructor - parses the String key into it's name and modifier(s)
*
* @param key The key without the leading <code>%{</code> or trailing <code>}</code>, for example {@code a->}
*/
DissectKey(String key) {
skip = key == null || key.isEmpty();
modifier = Modifier.findModifier(key);
switch (modifier) {
case NONE:
Matcher matcher = RIGHT_PADDING_PATTERN.matcher(key);
while (matcher.find()) {
name = matcher.group(1);
skipRightPadding = matcher.group(2) != null;
}
skip = name.isEmpty();
break;
case NAMED_SKIP:
matcher = LEFT_MODIFIER_PATTERN.matcher(key);
while (matcher.find()) {
name = matcher.group(2);
skipRightPadding = matcher.group(3) != null;
}
skip = true;
break;
case APPEND:
matcher = LEFT_MODIFIER_PATTERN.matcher(key);
while (matcher.find()) {
name = matcher.group(2);
skipRightPadding = matcher.group(3) != null;
}
break;
case FIELD_NAME:
matcher = LEFT_MODIFIER_PATTERN.matcher(key);
while (matcher.find()) {
name = matcher.group(2);
skipRightPadding = matcher.group(3) != null;
}
break;
case FIELD_VALUE:
matcher = LEFT_MODIFIER_PATTERN.matcher(key);
while (matcher.find()) {
name = matcher.group(2);
skipRightPadding = matcher.group(3) != null;
}
break;
case APPEND_WITH_ORDER:
matcher = APPEND_WITH_ORDER_PATTERN.matcher(key);
while (matcher.find()) {
name = matcher.group(1);
appendPosition = Short.valueOf(matcher.group(3));
skipRightPadding = matcher.group(4) != null;
}
break;
}
if (name == null || (name.isEmpty() && !skip)) {
throw new DissectException.KeyParse(key, "The key name could be determined");
}
}
/**
* Copy constructor to explicitly override the modifier.
* @param key The key to copy (except for the modifier)
* @param modifier the modifer to use for this copy
*/
DissectKey(DissectKey key, DissectKey.Modifier modifier){
this.modifier = modifier;
this.skipRightPadding = key.skipRightPadding;
this.skip = key.skip;
this.name = key.name;
this.appendPosition = key.appendPosition;
}
Modifier getModifier() {
return modifier;
}
boolean skip() {
return skip;
}
boolean skipRightPadding() {
return skipRightPadding;
}
int getAppendPosition() {
return appendPosition;
}
String getName() {
return name;
}
//generated
@Override
public String toString() {
return "DissectKey{" +
"modifier=" + modifier +
", skip=" + skip +
", appendPosition=" + appendPosition +
", name='" + name + '\'' +
'}';
}
public enum Modifier {
NONE(""), APPEND_WITH_ORDER("/"), APPEND("+"), FIELD_NAME("*"), FIELD_VALUE("&"), NAMED_SKIP("?");
private static final Pattern MODIFIER_PATTERN = Pattern.compile("[/+*&?]");
private final String modifier;
@Override
public String toString() {
return modifier;
}
Modifier(final String modifier) {
this.modifier = modifier;
}
//package private for testing
static Modifier fromString(String modifier) {
return EnumSet.allOf(Modifier.class).stream().filter(km -> km.modifier.equals(modifier))
.findFirst().orElseThrow(() -> new IllegalArgumentException("Found invalid modifier.")); //throw should never happen
}
private static Modifier findModifier(String key) {
Modifier modifier = Modifier.NONE;
if (key != null && !key.isEmpty()) {
Matcher matcher = MODIFIER_PATTERN.matcher(key);
int matches = 0;
while (matcher.find()) {
Modifier priorModifier = modifier;
modifier = Modifier.fromString(matcher.group());
if (++matches > 1 && !(APPEND.equals(priorModifier) && APPEND_WITH_ORDER.equals(modifier))) {
throw new DissectException.KeyParse(key, "multiple modifiers are not allowed.");
}
}
}
return modifier;
}
}
}

View File

@ -0,0 +1,198 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Represents the matches of a {@link DissectParser#parse(String)}. Handles the appending and referencing based on the key instruction.
*/
final class DissectMatch {
private final String appendSeparator;
private final Map<String, String> results;
private final Map<String, String> simpleResults;
private final Map<String, ReferenceResult> referenceResults;
private final Map<String, AppendResult> appendResults;
private int implicitAppendOrder = -1000;
private final int maxMatches;
private final int maxResults;
private final int appendCount;
private final int referenceCount;
private final int simpleCount;
private int matches = 0;
DissectMatch(String appendSeparator, int maxMatches, int maxResults, int appendCount, int referenceCount) {
if (maxMatches <= 0 || maxResults <= 0) {
throw new IllegalArgumentException("Expected results are zero, can not construct DissectMatch");//should never happen
}
this.maxMatches = maxMatches;
this.maxResults = maxResults;
this.appendCount = appendCount;
this.referenceCount = referenceCount;
this.appendSeparator = appendSeparator;
results = new HashMap<>(maxResults);
this.simpleCount = maxMatches - referenceCount - appendCount;
simpleResults = simpleCount <= 0 ? null : new HashMap<>(simpleCount);
referenceResults = referenceCount <= 0 ? null : new HashMap<>(referenceCount);
appendResults = appendCount <= 0 ? null : new HashMap<>(appendCount);
}
/**
* Add the key/value that was found as result of the parsing
* @param key the {@link DissectKey}
* @param value the discovered value for the key
*/
void add(DissectKey key, String value) {
matches++;
if (key.skip()) {
return;
}
switch (key.getModifier()) {
case NONE:
simpleResults.put(key.getName(), value);
break;
case APPEND:
appendResults.computeIfAbsent(key.getName(), k -> new AppendResult(appendSeparator)).addValue(value, implicitAppendOrder++);
break;
case APPEND_WITH_ORDER:
appendResults.computeIfAbsent(key.getName(),
k -> new AppendResult(appendSeparator)).addValue(value, key.getAppendPosition());
break;
case FIELD_NAME:
referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setKey(value);
break;
case FIELD_VALUE:
referenceResults.computeIfAbsent(key.getName(), k -> new ReferenceResult()).setValue(value);
break;
}
}
boolean fullyMatched() {
return matches == maxMatches;
}
/**
* Checks if results are valid.
* @param results the results to check
* @return true if all dissect keys have been matched and the results are of the expected size.
*/
boolean isValid(Map<String, String> results) {
return fullyMatched() && results.size() == maxResults;
}
/**
* Gets all the current matches. Pass the results of this to isValid to determine if a fully successful match has occured.
*
* @return the map of the results.
*/
Map<String, String> getResults() {
results.clear();
if (simpleCount > 0) {
results.putAll(simpleResults);
}
if (referenceCount > 0) {
referenceResults.forEach((k, v) -> results.put(v.getKey(), v.getValue()));
}
if (appendCount > 0) {
appendResults.forEach((k, v) -> results.put(k, v.getAppendResult()));
}
return results;
}
/**
* a result that will need to be part of an append operation.
*/
private final class AppendResult {
private final List<AppendValue> values = new ArrayList<>();
private final String appendSeparator;
private AppendResult(String appendSeparator) {
this.appendSeparator = appendSeparator;
}
private void addValue(String value, int order) {
values.add(new AppendValue(value, order));
}
private String getAppendResult() {
Collections.sort(values);
return values.stream().map(AppendValue::getValue).collect(Collectors.joining(appendSeparator));
}
}
/**
* An appendable value that can be sorted based on the provided order
*/
private final class AppendValue implements Comparable<AppendValue> {
private final String value;
private final int order;
private AppendValue(String value, int order) {
this.value = value;
this.order = order;
}
private String getValue() {
return value;
}
private int getOrder() {
return order;
}
@Override
public int compareTo(AppendValue o) {
return Integer.compare(this.order, o.getOrder());
}
}
/**
* A result that needs to be converted to a key/value reference
*/
private final class ReferenceResult {
private String key;
private String getKey() {
return key;
}
private String getValue() {
return value;
}
private String value;
private void setValue(String value) {
this.value = value;
}
private void setKey(String key) {
this.key = key;
}
}
}

View File

@ -0,0 +1,310 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* <p>Splits (dissects) a string into its parts based on a pattern.</p><p>A dissect pattern is composed of a set of keys and delimiters.
* For example the dissect pattern: <pre>%{a} %{b},%{c}</pre> has 3 keys (a,b,c) and two delimiters (space and comma). This pattern will
* match a string of the form: <pre>foo bar,baz</pre> and will result a key/value pairing of <pre>a=foo, b=bar, and c=baz.</pre>
* <p>Matches are all or nothing. For example, the same pattern will NOT match <pre>foo bar baz</pre> since all of the delimiters did not
* match. (the comma did not match)
* <p>Dissect patterns can optionally have modifiers. These modifiers instruct the parser to change it's behavior. For example the
* dissect pattern of <pre>%{a},%{b}:%{c}</pre> would not match <pre>foo,bar,baz</pre> since there the colon never matches.
* <p>Modifiers appear to the left or the right of the key name. The supported modifiers are:
* <ul>
* <li>{@code ->} Instructs the parser to ignore repeating delimiters to the right of the key. Example: <pre>
* pattern: {@code %{a->} %{b} %{c}}
* string: {@code foo bar baz}
* result: {@code a=foo, b=bar, c=baz}
* </pre></li>
* <li>{@code +} Instructs the parser to appends this key's value to value of prior key with the same name.
* Example: <pre>
* pattern: {@code %{a} %{+a} %{+a}}
* string: {@code foo bar baz}
* result: {@code a=foobarbaz}
* </pre></li>
* <li>{@code /} Instructs the parser to appends this key's value to value of a key based based on the order specified after the
* {@code /}. Requires the {@code +} modifier to also be present in the key. Example: <pre>
* pattern: {@code %{a} %{+a/2} %{+a/1}}
* string: {@code foo bar baz}
* result: {@code a=foobazbar}
* </pre>
* </li>
* <li>{@code *} Instructs the parser to ignore the name of this key, instead use the value of key as the key name.
* Requires another key with the same name and the {@code &} modifier to be the value. Example: <pre>
* pattern: {@code %{*a} %{b} %{&a}}
* string: {@code foo bar baz}
* result: {@code foo=baz, b=bar}
* </pre></li>
* <li>{@code &} Instructs the parser to ignore this key and place the matched value to a key of the same name with the {@code *} modifier.
* Requires another key with the same name and the {@code *} modifier.
* Example: <pre>
* pattern: {@code %{*a} %{b} %{&a}}
* string: {@code foo bar baz}
* result: {@code foo=baz, b=bar}
* </pre></li>
* <li>{@code ?} Instructs the parser to ignore this key. The key name exists only for the purpose of human readability. Example
* <pre>
* pattern: {@code %{a} %{?skipme} %{c}}
* string: {@code foo bar baz}
* result: {@code a=foo, c=baz}
* </pre>
* </ul>
* <p>Empty key names patterns are also supported. They behave just like the {@code ?} modifier, except the name is not required.
* The result will simply be ignored. Example
* <pre>
* pattern: {@code %{a} %{} %{c}}
* string: {@code foo bar baz}
* result: {@code a=foo, c=baz}
* </pre>
* <p>
* Inspired by the Logstash Dissect Filter by Guy Boertje
*/
public final class DissectParser {
private static final Pattern LEADING_DELIMITER_PATTERN = Pattern.compile("^(.*?)%");
private static final Pattern KEY_DELIMITER_FIELD_PATTERN = Pattern.compile("%\\{([^}]*?)}([^%]*)", Pattern.DOTALL);
private static final EnumSet<DissectKey.Modifier> ASSOCIATE_MODIFIERS = EnumSet.of(
DissectKey.Modifier.FIELD_NAME,
DissectKey.Modifier.FIELD_VALUE);
private static final EnumSet<DissectKey.Modifier> APPEND_MODIFIERS = EnumSet.of(
DissectKey.Modifier.APPEND,
DissectKey.Modifier.APPEND_WITH_ORDER);
private static final Function<DissectPair, String> KEY_NAME = val -> val.getKey().getName();
private final List<DissectPair> matchPairs;
private final String pattern;
private String leadingDelimiter = "";
private final int maxMatches;
private final int maxResults;
private final int appendCount;
private final int referenceCount;
private final String appendSeparator;
public DissectParser(String pattern, String appendSeparator) {
this.pattern = pattern;
this.appendSeparator = appendSeparator == null ? "" : appendSeparator;
Matcher matcher = LEADING_DELIMITER_PATTERN.matcher(pattern);
while (matcher.find()) {
leadingDelimiter = matcher.group(1);
}
List<DissectPair> matchPairs = new ArrayList<>();
matcher = KEY_DELIMITER_FIELD_PATTERN.matcher(pattern.substring(leadingDelimiter.length()));
while (matcher.find()) {
DissectKey key = new DissectKey(matcher.group(1));
String delimiter = matcher.group(2);
matchPairs.add(new DissectPair(key, delimiter));
}
this.maxMatches = matchPairs.size();
this.maxResults = Long.valueOf(matchPairs.stream()
.filter(dissectPair -> !dissectPair.getKey().skip()).map(KEY_NAME).distinct().count()).intValue();
if (this.maxMatches == 0 || maxResults == 0) {
throw new DissectException.PatternParse(pattern, "Unable to find any keys or delimiters.");
}
//append validation - look through all of the keys to see if there are any keys that need to participate in an append operation
// but don't have the '+' defined
Set<String> appendKeyNames = matchPairs.stream()
.filter(dissectPair -> APPEND_MODIFIERS.contains(dissectPair.getKey().getModifier()))
.map(KEY_NAME).distinct().collect(Collectors.toSet());
if (appendKeyNames.size() > 0) {
List<DissectPair> modifiedMatchPairs = new ArrayList<>(matchPairs.size());
for (DissectPair p : matchPairs) {
if (p.getKey().getModifier().equals(DissectKey.Modifier.NONE) && appendKeyNames.contains(p.getKey().getName())) {
modifiedMatchPairs.add(new DissectPair(new DissectKey(p.getKey(), DissectKey.Modifier.APPEND), p.getDelimiter()));
} else {
modifiedMatchPairs.add(p);
}
}
matchPairs = modifiedMatchPairs;
}
appendCount = appendKeyNames.size();
//reference validation - ensure that '*' and '&' come in pairs
Map<String, List<DissectPair>> referenceGroupings = matchPairs.stream()
.filter(dissectPair -> ASSOCIATE_MODIFIERS.contains(dissectPair.getKey().getModifier()))
.collect(Collectors.groupingBy(KEY_NAME));
for (Map.Entry<String, List<DissectPair>> entry : referenceGroupings.entrySet()) {
if (entry.getValue().size() != 2) {
throw new DissectException.PatternParse(pattern, "Found invalid key/reference associations: '"
+ entry.getValue().stream().map(KEY_NAME).collect(Collectors.joining(",")) +
"' Please ensure each '*<key>' is matched with a matching '&<key>");
}
}
referenceCount = referenceGroupings.size() * 2;
this.matchPairs = Collections.unmodifiableList(matchPairs);
}
/**
* <p>Entry point to dissect a string into it's parts.</p>
*
* @param inputString The string to dissect
* @return the key/value Map of the results
* @throws DissectException if unable to dissect a pair into it's parts.
*/
public Map<String, String> parse(String inputString) {
/**
*
* This implements a naive string matching algorithm. The string is walked left to right, comparing each byte against
* another string's bytes looking for matches. If the bytes match, then a second cursor looks ahead to see if all the bytes
* of the other string matches. If they all match, record it and advances the primary cursor to the match point. If it can not match
* all of the bytes then progress the main cursor. Repeat till the end of the input string. Since the string being searching for
* (the delimiter) is generally small and rare the naive approach is efficient.
*
* In this case the the string that is walked is the input string, and the string being searched for is the current delimiter.
* For example for a dissect pattern of {@code %{a},%{b}:%{c}} the delimiters (comma then colon) are searched for in the
* input string. At class construction the list of keys+delimiters are found (dissectPairs), which allows the use of that ordered
* list to know which delimiter to use for the search. The delimiters is progressed once the current delimiter is matched.
*
* There are two special cases that requires additional parsing beyond the standard naive algorithm. Consecutive delimiters should
* results in a empty matches unless the {@code ->} is provided. For example given the dissect pattern of
* {@code %{a},%{b},%{c},%{d}} and input string of {@code foo,,,} the match should be successful with empty values for b,c and d.
* However, if the key modifier {@code ->}, is present it will simply skip over any delimiters just to the right of the key
* without assigning any values. For example {@code %{a->},{%b}} will match the input string of {@code foo,,,,,,bar} with a=foo and
* b=bar.
*
*/
DissectMatch dissectMatch = new DissectMatch(appendSeparator, maxMatches, maxResults, appendCount, referenceCount);
Iterator<DissectPair> it = matchPairs.iterator();
//ensure leading delimiter matches
if (inputString != null && inputString.length() > leadingDelimiter.length()
&& leadingDelimiter.equals(inputString.substring(0, leadingDelimiter.length()))) {
byte[] input = inputString.getBytes(StandardCharsets.UTF_8);
//grab the first key/delimiter pair
DissectPair dissectPair = it.next();
DissectKey key = dissectPair.getKey();
byte[] delimiter = dissectPair.getDelimiter().getBytes(StandardCharsets.UTF_8);
//start dissection after the first delimiter
int i = leadingDelimiter.length();
int valueStart = i;
int lookAheadMatches;
//start walking the input string byte by byte, look ahead for matches where needed
//if a match is found jump forward to the end of the match
for (; i < input.length; i++) {
lookAheadMatches = 0;
//potential match between delimiter and input string
if (delimiter.length > 0 && input[i] == delimiter[0]) {
//look ahead to see if the entire delimiter matches the input string
for (int j = 0; j < delimiter.length; j++) {
if (i + j < input.length && input[i + j] == delimiter[j]) {
lookAheadMatches++;
}
}
//found a full delimiter match
if (lookAheadMatches == delimiter.length) {
//record the key/value tuple
byte[] value = Arrays.copyOfRange(input, valueStart, i);
dissectMatch.add(key, new String(value, StandardCharsets.UTF_8));
//jump to the end of the match
i += lookAheadMatches;
//look for consecutive delimiters (e.g. a,,,,d,e)
while (i < input.length) {
lookAheadMatches = 0;
for (int j = 0; j < delimiter.length; j++) {
if (i + j < input.length && input[i + j] == delimiter[j]) {
lookAheadMatches++;
}
}
//found consecutive delimiters
if (lookAheadMatches == delimiter.length) {
//jump to the end of the match
i += lookAheadMatches;
if (!key.skipRightPadding()) {
//progress the keys/delimiter if possible
if (!it.hasNext()) {
break; //the while loop
}
dissectPair = it.next();
key = dissectPair.getKey();
//add the key with an empty value for the empty delimiter
dissectMatch.add(key, "");
}
} else {
break; //the while loop
}
}
//progress the keys/delimiter if possible
if (!it.hasNext()) {
break; //the for loop
}
dissectPair = it.next();
key = dissectPair.getKey();
delimiter = dissectPair.getDelimiter().getBytes(StandardCharsets.UTF_8);
//i is always one byte after the last found delimiter, aka the start of the next value
valueStart = i;
}
}
}
//the last key, grab the rest of the input (unless consecutive delimiters already grabbed the last key)
//and there is no trailing delimiter
if (!dissectMatch.fullyMatched() && delimiter.length == 0 ) {
byte[] value = Arrays.copyOfRange(input, valueStart, input.length);
String valueString = new String(value, StandardCharsets.UTF_8);
dissectMatch.add(key, valueString);
}
}
Map<String, String> results = dissectMatch.getResults();
if (!dissectMatch.isValid(results)) {
throw new DissectException.FindMatch(pattern, inputString);
}
return results;
}
/**
* A tuple class to hold the dissect key and delimiter
*/
private class DissectPair {
private final DissectKey key;
private final String delimiter;
private DissectPair(DissectKey key, String delimiter) {
this.key = key;
this.delimiter = delimiter;
}
private DissectKey getKey() {
return key;
}
private String getDelimiter() {
return delimiter;
}
}
}

View File

@ -0,0 +1,7 @@
// this is just shell gradle file for eclipse to have separate projects for dissect src and tests
apply from: '../../build.gradle'
dependencies {
testCompile project(':libs:dissect')
}

View File

@ -0,0 +1,178 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.CoreMatchers;
import java.util.EnumSet;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class DissectKeyTests extends ESTestCase {
public void testNoModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey(keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testAppendModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey("+" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.APPEND));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testAppendWithOrderModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
int length = randomIntBetween(1, 100);
DissectKey dissectKey = new DissectKey("+" + keyName + "/" + length);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.APPEND_WITH_ORDER));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(length));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testAppendWithOrderModifierNoName() {
int length = randomIntBetween(1, 100);
DissectException e = expectThrows(DissectException.class, () -> new DissectKey("+/" + length));
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
}
public void testOrderModifierWithoutAppend() {
String keyName = randomAlphaOfLengthBetween(1, 10);
int length = randomIntBetween(1, 100);
DissectException e = expectThrows(DissectException.class, () -> new DissectKey(keyName + "/" + length));
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
}
public void testFieldNameModifier() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey("*" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.FIELD_NAME));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testFieldValueModifiers() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey("&" + keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.FIELD_VALUE));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testRightPaddingModifiers() {
String keyName = randomAlphaOfLengthBetween(1, 10);
DissectKey dissectKey = new DissectKey(keyName + "->");
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(false));
assertThat(dissectKey.skipRightPadding(), is(true));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
dissectKey = new DissectKey("*" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("&" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("+" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("?" + keyName + "->");
assertThat(dissectKey.skipRightPadding(), is(true));
dissectKey = new DissectKey("+" + keyName + "/2->");
assertThat(dissectKey.skipRightPadding(), is(true));
}
public void testMultipleLeftModifiers() {
String keyName = randomAlphaOfLengthBetween(1, 10);
List<String> validModifiers = EnumSet.allOf(DissectKey.Modifier.class).stream()
.filter(m -> !m.equals(DissectKey.Modifier.NONE))
.map(DissectKey.Modifier::toString)
.collect(Collectors.toList());
String modifier1 = randomFrom(validModifiers);
String modifier2 = randomFrom(validModifiers);
DissectException e = expectThrows(DissectException.class, () -> new DissectKey(modifier1 + modifier2 + keyName));
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
}
public void testSkipKey() {
String keyName = "";
DissectKey dissectKey = new DissectKey(keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testNamedSkipKey() {
String keyName = "myname";
DissectKey dissectKey = new DissectKey("?" +keyName);
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NAMED_SKIP));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(false));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testSkipKeyWithPadding() {
String keyName = "";
DissectKey dissectKey = new DissectKey(keyName + "->");
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NONE));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(true));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testNamedEmptySkipKeyWithPadding() {
String keyName = "";
DissectKey dissectKey = new DissectKey("?" +keyName + "->");
assertThat(dissectKey.getModifier(), equalTo(DissectKey.Modifier.NAMED_SKIP));
assertThat(dissectKey.skip(), is(true));
assertThat(dissectKey.skipRightPadding(), is(true));
assertThat(dissectKey.getAppendPosition(), equalTo(0));
assertThat(dissectKey.getName(), equalTo(keyName));
}
public void testInvalidModifiers() {
//should never happen due to regex
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DissectKey.Modifier.fromString("x"));
assertThat(e.getMessage(), CoreMatchers.containsString("invalid modifier"));
}
}

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.test.ESTestCase;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.stream.IntStream;
import static org.hamcrest.Matchers.equalTo;
public class DissectMatchTests extends ESTestCase {
public void testIllegalArgs() {
expectThrows(IllegalArgumentException.class, () -> new DissectMatch("", 0, 1, 0, 0));
expectThrows(IllegalArgumentException.class, () -> new DissectMatch("", 1, 0, 0, 0));
}
public void testValidAndFullyMatched() {
int expectedMatches = randomIntBetween(1, 26);
DissectMatch dissectMatch = new DissectMatch("", expectedMatches, expectedMatches, 0, 0);
IntStream.range(97, 97 + expectedMatches) //allow for a-z values
.forEach(i -> dissectMatch.add(new DissectKey(new String(new byte[]{(byte) i}, StandardCharsets.UTF_8)), ""));
assertThat(dissectMatch.fullyMatched(), equalTo(true));
assertThat(dissectMatch.isValid(dissectMatch.getResults()), equalTo(true));
}
public void testNotValidAndFullyMatched() {
int expectedMatches = randomIntBetween(1, 26);
DissectMatch dissectMatch = new DissectMatch("", expectedMatches, expectedMatches, 0, 0);
IntStream.range(97, 97 + expectedMatches - 1) //allow for a-z values
.forEach(i -> dissectMatch.add(new DissectKey(new String(new byte[]{(byte) i}, StandardCharsets.UTF_8)), ""));
assertThat(dissectMatch.fullyMatched(), equalTo(false));
assertThat(dissectMatch.isValid(dissectMatch.getResults()), equalTo(false));
}
public void testGetResultsIdempotent(){
int expectedMatches = randomIntBetween(1, 26);
DissectMatch dissectMatch = new DissectMatch("", expectedMatches, expectedMatches, 0, 0);
IntStream.range(97, 97 + expectedMatches) //allow for a-z values
.forEach(i -> dissectMatch.add(new DissectKey(new String(new byte[]{(byte) i}, StandardCharsets.UTF_8)), ""));
assertThat(dissectMatch.getResults(), equalTo(dissectMatch.getResults()));
}
public void testAppend(){
DissectMatch dissectMatch = new DissectMatch("-", 3, 1, 3, 0);
dissectMatch.add(new DissectKey("+a"), "x");
dissectMatch.add(new DissectKey("+a"), "y");
dissectMatch.add(new DissectKey("+a"), "z");
Map<String, String> results = dissectMatch.getResults();
assertThat(dissectMatch.isValid(results), equalTo(true));
assertThat(results, equalTo(MapBuilder.newMapBuilder().put("a", "x-y-z").map()));
}
public void testAppendWithOrder(){
DissectMatch dissectMatch = new DissectMatch("-", 3, 1, 3, 0);
dissectMatch.add(new DissectKey("+a/3"), "x");
dissectMatch.add(new DissectKey("+a"), "y");
dissectMatch.add(new DissectKey("+a/1"), "z");
Map<String, String> results = dissectMatch.getResults();
assertThat(dissectMatch.isValid(results), equalTo(true));
assertThat(results, equalTo(MapBuilder.newMapBuilder().put("a", "y-z-x").map()));
}
public void testReference(){
DissectMatch dissectMatch = new DissectMatch("-", 2, 1, 0, 1);
dissectMatch.add(new DissectKey("&a"), "x");
dissectMatch.add(new DissectKey("*a"), "y");
Map<String, String> results = dissectMatch.getResults();
assertThat(dissectMatch.isValid(results), equalTo(true));
assertThat(results, equalTo(MapBuilder.newMapBuilder().put("y", "x").map()));
}
}

View File

@ -0,0 +1,386 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.dissect;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matchers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiAlphanumOfLengthBetween;
public class DissectParserTests extends ESTestCase {
public void testJavaDocExamples() {
assertMatch("%{a} %{b},%{c}", "foo bar,baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz"));
assertMiss("%{a},%{b}:%{c}", "foo,bar,baz");
assertMatch("%{a->} %{b} %{c}", "foo bar baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz"));
assertMatch("%{a} %{+a} %{+a}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobarbaz"));
assertMatch("%{a} %{+a/2} %{+a/1}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobazbar"));
assertMatch("%{*a} %{b} %{&a}", "foo bar baz", Arrays.asList("foo", "b"), Arrays.asList("baz", "bar"));
assertMatch("%{a} %{} %{c}", "foo bar baz", Arrays.asList("a", "c"), Arrays.asList("foo", "baz"));
assertMatch("%{a} %{?skipme} %{c}", "foo bar baz", Arrays.asList("a", "c"), Arrays.asList("foo", "baz"));
assertMatch("%{a},%{b},%{c},%{d}", "foo,,,", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "", ""));
assertMatch("%{a->},%{b}", "foo,,,,,,bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
}
/**
* Borrowed from Logstash's test cases:
* https://github.com/logstash-plugins/logstash-filter-dissect/blob/master/src/test/java/org/logstash/dissect/DissectorTest.java
* Append Note - Logstash appends with the delimiter as the separator between values, this uses a user defined separator
*/
public void testLogstashSpecs() {
assertMatch("%{a} %{b->} %{c}", "foo bar baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz"));
assertMiss("%{a}%{b} %{c}", null);
assertMiss("%{a} %{b}%{c} %{d}", "foo bar baz");
assertMiss("%{a} %{b} %{c}%{d}", "foo bar baz quux");
assertMatch("%{a} %{b->} %{c}", "foo bar baz", Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz"));
assertMatch("%{a} %{} %{c}", "foo bar baz", Arrays.asList("a", "c"), Arrays.asList("foo", "baz"));
assertMatch("%{a} %{b} %{+b} %{z}", "foo bar baz quux", Arrays.asList("a", "b", "z"), Arrays.asList("foo", "bar baz", "quux"), " ");
assertMatch("%{a}------->%{b}", "foo------->bar baz quux", Arrays.asList("a", "b"), Arrays.asList("foo", "bar baz quux"));
assertMatch("%{a}------->%{}", "foo------->bar baz quux", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{a} » %{b}»%{c}€%{d}", "foo » bar»baz€quux",
Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "bar", "baz", "quux"));
assertMatch("%{a} %{b} %{+a}", "foo bar baz quux", Arrays.asList("a", "b"), Arrays.asList("foo baz quux", "bar"), " ");
//Logstash supports implicit ordering based anchored by the the key without the '+'
//This implementation will only honor implicit ordering for appending right to left else explicit order (/N) is required.
//The results of this test differ from Logstash.
assertMatch("%{+a} %{a} %{+a} %{b}", "December 31 1999 quux",
Arrays.asList("a", "b"), Arrays.asList("December 31 1999", "quux"), " ");
//Same test as above, but with same result as Logstash using explicit ordering in the pattern
assertMatch("%{+a/1} %{a} %{+a/2} %{b}", "December 31 1999 quux",
Arrays.asList("a", "b"), Arrays.asList("31 December 1999", "quux"), " ");
assertMatch("%{+a/2} %{+a/4} %{+a/1} %{+a/3}", "bar quux foo baz", Arrays.asList("a"), Arrays.asList("foo bar baz quux"), " ");
assertMatch("%{+a} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{+a} %{b} %{+a} %{c}", "foo bar baz quux",
Arrays.asList("a", "b", "c"), Arrays.asList("foo baz", "bar", "quux"), " ");
assertMatch("%{} %{syslog_timestamp} %{hostname} %{rt}: %{reason} %{+reason} %{src_ip}/%{src_port}->%{dst_ip}/%{dst_port} " +
"%{polrt} %{+polrt} %{+polrt} %{from_zone} %{to_zone} %{rest}",
"42 2016-05-25T14:47:23Z host.name.com RT_FLOW - RT_FLOW_SESSION_DENY: session denied 2.2.2.20/60000->1.1.1.10/8090 None " +
"6(0) DEFAULT-DENY ZONE-UNTRUST ZONE-DMZ UNKNOWN UNKNOWN N/A(N/A) ge-0/0/0.0",
Arrays.asList("syslog_timestamp", "hostname", "rt", "reason", "src_ip", "src_port", "dst_ip", "dst_port", "polrt"
, "from_zone", "to_zone", "rest"),
Arrays.asList("2016-05-25T14:47:23Z", "host.name.com", "RT_FLOW - RT_FLOW_SESSION_DENY", "session denied", "2.2.2.20", "60000"
, "1.1.1.10", "8090", "None 6(0) DEFAULT-DENY", "ZONE-UNTRUST", "ZONE-DMZ", "UNKNOWN UNKNOWN N/A(N/A) ge-0/0/0.0"), " ");
assertBadKey("%{+/2}");
assertBadKey("%{&+a_field}");
assertMatch("%{a->} %{b->}---%{c}", "foo bar------------baz",
Arrays.asList("a", "b", "c"), Arrays.asList("foo", "bar", "baz"));
assertMatch("%{->}-%{a}", "-----666", Arrays.asList("a"), Arrays.asList("666"));
assertMatch("%{?skipme->}-%{a}", "-----666", Arrays.asList("a"), Arrays.asList("666"));
assertMatch("%{a},%{b},%{c},%{d},%{e},%{f}", "111,,333,,555,666",
Arrays.asList("a", "b", "c", "d", "e", "f"), Arrays.asList("111", "", "333", "", "555", "666"));
assertMatch("%{a}.࿏.%{b}", "⟳༒.࿏.༒⟲", Arrays.asList("a", "b"), Arrays.asList("⟳༒", "༒⟲"));
assertMatch("%{a}", "", Arrays.asList("a"), Arrays.asList(""));
assertMatch("%{a}{\n}%{b}", "aaa{\n}bbb", Arrays.asList("a", "b"), Arrays.asList("aaa", "bbb"));
assertMiss("MACHINE[%{a}] %{b}", "1234567890 MACHINE[foo] bar");
assertMiss("%{a} %{b} %{c}", "foo:bar:baz");
assertMatch("/var/%{key1}/log/%{key2}.log", "/var/foo/log/bar.log", Arrays.asList("key1", "key2"), Arrays.asList("foo", "bar"));
assertMatch("%{a->} %{b}-.-%{c}-%{d}-..-%{e}-%{f}-%{g}-%{h}", "foo bar-.-baz-1111-..-22-333-4444-55555",
Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h"),
Arrays.asList("foo", "bar", "baz", "1111", "22", "333", "4444", "55555"));
}
public void testBasicMatch() {
String valueFirstInput = "";
String keyFirstPattern = "";
String delimiterFirstInput = "";
String delimiterFirstPattern = "";
//parallel arrays
List<String> expectedKeys = Arrays.asList(generateRandomStringArray(100, 10, false, false));
List<String> expectedValues = new ArrayList<>(expectedKeys.size());
for (String key : expectedKeys) {
String value = randomAsciiAlphanumOfLengthBetween(1, 100);
String delimiter = Integer.toString(randomInt()); //int to ensures values and delimiters don't overlap, else validation can fail
keyFirstPattern += "%{" + key + "}" + delimiter;
valueFirstInput += value + delimiter;
delimiterFirstPattern += delimiter + "%{" + key + "}";
delimiterFirstInput += delimiter + value;
expectedValues.add(value);
}
assertMatch(keyFirstPattern, valueFirstInput, expectedKeys, expectedValues);
assertMatch(delimiterFirstPattern, delimiterFirstInput, expectedKeys, expectedValues);
}
public void testBasicMatchUnicode() {
String valueFirstInput = "";
String keyFirstPattern = "";
String delimiterFirstInput = "";
String delimiterFirstPattern = "";
//parallel arrays
List<String> expectedKeys = new ArrayList<>();
List<String> expectedValues = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 100); i++) {
String key = randomAsciiAlphanumOfLengthBetween(1, 100);
String value = randomRealisticUnicodeOfCodepointLengthBetween(1, 100);
String delimiter = Integer.toString(randomInt()); //int to ensures values and delimiters don't overlap, else validation can fail
keyFirstPattern += "%{" + key + "}" + delimiter;
valueFirstInput += value + delimiter;
delimiterFirstPattern += delimiter + "%{" + key + "}";
delimiterFirstInput += delimiter + value;
expectedKeys.add(key);
expectedValues.add(value);
}
assertMatch(keyFirstPattern, valueFirstInput, expectedKeys, expectedValues);
assertMatch(delimiterFirstPattern, delimiterFirstInput, expectedKeys, expectedValues);
}
public void testMatchUnicode() {
assertMatch("%{a} %{b}", "foo 子", Arrays.asList("a", "b"), Arrays.asList("foo", ""));
assertMatch("%{a}࿏%{b} %{c}", "⟳༒࿏༒⟲ 子", Arrays.asList("a", "b", "c"), Arrays.asList("⟳༒", "༒⟲", ""));
assertMatch("%{a}࿏%{+a} %{+a}", "⟳༒࿏༒⟲ 子", Arrays.asList("a"), Arrays.asList("⟳༒༒⟲子"));
assertMatch("%{a}࿏%{+a/2} %{+a/1}", "⟳༒࿏༒⟲ 子", Arrays.asList("a"), Arrays.asList("⟳༒子༒⟲"));
assertMatch("%{a->}࿏%{b}", "⟳༒࿏࿏࿏࿏࿏༒⟲", Arrays.asList("a", "b"), Arrays.asList("⟳༒", "༒⟲"));
assertMatch("%{*a}࿏%{&a}", "⟳༒࿏༒⟲", Arrays.asList("⟳༒"), Arrays.asList("༒⟲"));
assertMatch("%{}࿏%{a}", "⟳༒࿏༒⟲", Arrays.asList("a"), Arrays.asList("༒⟲"));
}
public void testMatchRemainder() {
assertMatch("%{a}", "foo bar the rest", Arrays.asList("a"), Arrays.asList("foo bar the rest"));
assertMatch("%{a} %{b}", "foo bar the rest", Arrays.asList("a", "b"), Arrays.asList("foo", "bar the rest"));
assertMatch("%{} %{b}", "foo bar the rest", Arrays.asList("b"), Arrays.asList("bar the rest"));
assertMatch("%{a} %{b->}", "foo bar the rest", Arrays.asList("a", "b"), Arrays.asList("foo", "bar the rest"));
assertMatch("%{*a} %{&a}", "foo bar the rest", Arrays.asList("foo"), Arrays.asList("bar the rest"));
assertMatch("%{a} %{+a}", "foo bar the rest", Arrays.asList("a"), Arrays.asList("foo bar the rest"), " ");
}
public void testAppend() {
assertMatch("%{a} %{+a} %{+a}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobarbaz"));
assertMatch("%{a} %{+a} %{b} %{+b}", "foo bar baz lol", Arrays.asList("a", "b"), Arrays.asList("foobar", "bazlol"));
assertMatch("%{a} %{+a/2} %{+a/1}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foobazbar"));
assertMatch("%{a} %{+a/2} %{+a/1}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foo baz bar"), " ");
}
public void testAssociate() {
assertMatch("%{*a} %{&a}", "foo bar", Arrays.asList("foo"), Arrays.asList("bar"));
assertMatch("%{&a} %{*a}", "foo bar", Arrays.asList("bar"), Arrays.asList("foo"));
assertMatch("%{*a} %{&a} %{*b} %{&b}", "foo bar baz lol", Arrays.asList("foo", "baz"), Arrays.asList("bar", "lol"));
assertMatch("%{*a} %{&a} %{c} %{*b} %{&b}", "foo bar x baz lol",
Arrays.asList("foo", "baz", "c"), Arrays.asList("bar", "lol", "x"));
assertBadPattern("%{*a} %{a}");
assertBadPattern("%{a} %{&a}");
assertMiss("%{*a} %{&a} {a} %{*b} %{&b}", "foo bar x baz lol");
}
public void testAppendAndAssociate() {
assertMatch("%{a} %{+a} %{*b} %{&b}", "foo bar baz lol", Arrays.asList("a", "baz"), Arrays.asList("foobar", "lol"));
assertMatch("%{a->} %{+a/2} %{+a/1} %{*b} %{&b}", "foo bar baz lol x",
Arrays.asList("a", "lol"), Arrays.asList("foobazbar", "x"));
}
public void testEmptyKey() {
assertMatch("%{} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{a} %{}", "foo bar", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{->} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{->} %{b}", " bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{a} %{->}", "foo bar ", Arrays.asList("a"), Arrays.asList("foo"));
}
public void testNamedSkipKey() {
assertMatch("%{?foo} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{?} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{a} %{?bar}", "foo bar", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{?foo->} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{?->} %{b}", "foo bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{?foo->} %{b}", " bar", Arrays.asList("b"), Arrays.asList("bar"));
assertMatch("%{a} %{->?bar}", "foo bar ", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{a} %{?skipme} %{?skipme}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{a} %{?} %{?}", "foo bar baz", Arrays.asList("a"), Arrays.asList("foo"));
}
public void testConsecutiveDelimiters() {
//leading
assertMatch("%{->},%{a}", ",,,,,foo", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{a->},%{b}", ",,,,,foo", Arrays.asList("a", "b"), Arrays.asList("", "foo"));
//trailing
assertMatch("%{a->},", "foo,,,,,", Arrays.asList("a"), Arrays.asList("foo"));
assertMatch("%{a} %{b},", "foo bar,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{a} %{b->},", "foo bar,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
//middle
assertMatch("%{a->},%{b}", "foo,,,,,bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{a->} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{a->}x%{b}", "fooxxxxxbar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{a->} xyz%{b}", "foo xyz xyz xyz xyz xyzbar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
//skipped with empty values
assertMatch("%{a},%{b},%{c},%{d}", "foo,,,", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "", ""));
assertMatch("%{a},%{b},%{c},%{d}", "foo,,bar,baz", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "bar", "baz"));
assertMatch("%{a},%{b},%{c},%{d}", "foo,,,baz", Arrays.asList("a", "b", "c", "d"), Arrays.asList("foo", "", "", "baz"));
assertMatch("%{a},%{b},%{c},%{d}", ",bar,,baz", Arrays.asList("a", "b", "c", "d"), Arrays.asList("", "bar", "", "baz"));
assertMatch("%{->},%{a->},%{b}", ",,,bar,,baz", Arrays.asList("a", "b"), Arrays.asList("bar", "baz"));
}
public void testAppendWithConsecutiveDelimiters() {
assertMatch("%{+a/1},%{+a/3}-%{+a/2} %{b}", "foo,bar----baz lol", Arrays.asList("a", "b"), Arrays.asList("foobar", ""));
assertMatch("%{+a/1},%{+a/3->}-%{+a/2} %{b}", "foo,bar----baz lol", Arrays.asList("a", "b"), Arrays.asList("foobazbar", "lol"));
}
public void testSkipRightPadding() {
assertMatch("%{a->} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{a->} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{->} %{a}", "foo bar", Arrays.asList("a"), Arrays.asList("bar"));
assertMatch("%{a->} %{+a->} %{*b->} %{&b->} %{c}", "foo bar baz lol x",
Arrays.asList("a", "baz", "c"), Arrays.asList("foobar", "lol", "x"));
}
public void testTrimmedEnd() {
assertMatch("%{a} %{b}", "foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch("%{a} %{b->} ", "foo bar ", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
//only whitespace is trimmed in the absence of trailing characters
assertMatch("%{a} %{b->}", "foo bar,,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar,,,,,,"));
//consecutive delimiters + right padding can be used to skip over the trailing delimiters
assertMatch("%{a} %{b->},", "foo bar,,,,,,", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
}
public void testLeadingDelimiter() {
assertMatch(",,,%{a} %{b}", ",,,foo bar", Arrays.asList("a", "b"), Arrays.asList("foo", "bar"));
assertMatch(",%{a} %{b}", ",,foo bar", Arrays.asList("a", "b"), Arrays.asList(",foo", "bar"));
}
/**
* Runtime errors
*/
public void testMiss() {
assertMiss("%{a}%{b}", "foo");
assertMiss("%{a},%{b}", "foo bar");
assertMiss("%{a}, %{b}", "foo,bar");
assertMiss("x%{a},%{b}", "foo,bar");
assertMiss("x%{},%{b}", "foo,bar");
assertMiss("leading_delimiter_long%{a}", "foo");
assertMiss("%{a}trailing_delimiter_long", "foo");
assertMiss("leading_delimiter_long%{a}trailing_delimiter_long", "foo");
assertMiss("%{a}x", "foo");
assertMiss("%{a},%{b}x", "foo,bar");
}
/**
* Construction errors
*/
public void testBadPatternOrKey() {
assertBadPattern("");
assertBadPattern("{}");
assertBadPattern("%{*a} %{&b}");
assertBadKey("%{*}");
assertBadKey("%{++}");
}
public void testSyslog() {
assertMatch("%{timestamp} %{+timestamp} %{+timestamp} %{logsource} %{program}[%{pid}]: %{message}",
"Mar 16 00:01:25 evita postfix/smtpd[1713]: connect from camomile.cloud9.net[168.100.1.3]",
Arrays.asList("timestamp", "logsource", "program", "pid", "message"),
Arrays.asList("Mar 16 00:01:25", "evita", "postfix/smtpd", "1713", "connect from camomile.cloud9.net[168.100.1.3]"), " ");
}
public void testApacheLog() {
assertMatch("%{clientip} %{ident} %{auth} [%{timestamp}] \"%{verb} %{request} HTTP/%{httpversion}\" %{response} %{bytes}" +
" \"%{referrer}\" \"%{agent}\" %{->}",
"31.184.238.164 - - [24/Jul/2014:05:35:37 +0530] \"GET /logs/access.log HTTP/1.0\" 200 69849 " +
"\"http://8rursodiol.enjin.com\" \"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) " +
"Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36\" \"www.dlwindianrailways.com\"",
Arrays.asList("clientip", "ident", "auth", "timestamp", "verb", "request", "httpversion", "response", "bytes",
"referrer", "agent"),
Arrays.asList("31.184.238.164", "-", "-", "24/Jul/2014:05:35:37 +0530", "GET", "/logs/access.log", "1.0", "200", "69849",
"http://8rursodiol.enjin.com", "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36" +
" (KHTML, like Gecko) Chrome/30.0.1599.12785 YaBrowser/13.12.1599.12785 Safari/537.36"));
}
/**
* Shared specification between Beats, Logstash, and Ingest node
*/
public void testJsonSpecification() throws Exception {
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = mapper.readTree(this.getClass().getResourceAsStream("/specification/tests.json"));
Iterator<JsonNode> tests = rootNode.elements();
while (tests.hasNext()) {
JsonNode test = tests.next();
boolean skip = test.path("skip").asBoolean();
if (!skip) {
String name = test.path("name").asText();
logger.debug("Running Json specification: " + name);
String pattern = test.path("tok").asText();
String input = test.path("msg").asText();
String append = test.path("append").asText();
boolean fail = test.path("fail").asBoolean();
Iterator<Map.Entry<String, JsonNode>> expected = test.path("expected").fields();
List<String> expectedKeys = new ArrayList<>();
List<String> expectedValues = new ArrayList<>();
expected.forEachRemaining(entry -> {
expectedKeys.add(entry.getKey());
expectedValues.add(entry.getValue().asText());
});
if (fail) {
assertFail(pattern, input);
} else {
assertMatch(pattern, input, expectedKeys, expectedValues, append);
}
}
}
}
private DissectException assertFail(String pattern, String input){
return expectThrows(DissectException.class, () -> new DissectParser(pattern, null).parse(input));
}
private void assertMiss(String pattern, String input) {
DissectException e = assertFail(pattern, input);
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to find match for dissect pattern"));
assertThat(e.getMessage(), CoreMatchers.containsString(pattern));
assertThat(e.getMessage(), input == null ? CoreMatchers.containsString("null") : CoreMatchers.containsString(input));
}
private void assertBadPattern(String pattern) {
DissectException e = assertFail(pattern, null);
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse pattern"));
assertThat(e.getMessage(), CoreMatchers.containsString(pattern));
}
private void assertBadKey(String pattern, String key) {
DissectException e = assertFail(pattern, null);
assertThat(e.getMessage(), CoreMatchers.containsString("Unable to parse key"));
assertThat(e.getMessage(), CoreMatchers.containsString(key));
}
private void assertBadKey(String pattern) {
assertBadKey(pattern, pattern.replace("%{", "").replace("}", ""));
}
private void assertMatch(String pattern, String input, List<String> expectedKeys, List<String> expectedValues) {
assertMatch(pattern, input, expectedKeys, expectedValues, null);
}
private void assertMatch(String pattern, String input, List<String> expectedKeys, List<String> expectedValues, String appendSeperator) {
Map<String, String> results = new DissectParser(pattern, appendSeperator).parse(input);
List<String> foundKeys = new ArrayList<>(results.keySet());
List<String> foundValues = new ArrayList<>(results.values());
Collections.sort(foundKeys);
Collections.sort(foundValues);
Collections.sort(expectedKeys);
Collections.sort(expectedValues);
assertThat(foundKeys, Matchers.equalTo(expectedKeys));
assertThat(foundValues, Matchers.equalTo(expectedValues));
}
}

View File

@ -0,0 +1,363 @@
[
{
"name": "When all the defined fields are captured by we have remaining data",
"tok": "level=%{level} ts=%{timestamp} caller=%{caller} msg=\"%{message}\"",
"msg": "level=info ts=2018-06-27T17:19:13.036579993Z caller=main.go:222 msg=\"Starting OK\" version=\"(version=2.3.1, branch=HEAD, revision=188ca45bd85ce843071e768d855722a9d9dabe03)\"}",
"expected": {
"caller": "main.go:222",
"level": "info",
"message": "Starting OK",
"timestamp": "2018-06-27T17:19:13.036579993Z"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "Complex stack trace",
"tok": "%{day}-%{month}-%{year} %{hour} %{severity} [%{thread_id}] %{origin} %{message}",
"msg": "18-Apr-2018 06:53:20.411 INFO [http-nio-8080-exec-1] org.apache.coyote.http11.Http11Processor.service Error parsing HTTP request header\n Note: further occurrences of HTTP header parsing errors will be logged at DEBUG level.\n java.lang.IllegalArgumentException: Invalid character found in method name. HTTP method names must be tokens\n at org.apache.coyote.http11.Http11InputBuffer.parseRequestLine(Http11InputBuffer.java:426)\n at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:687)\n at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)\n at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:790)\n at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1459)\n at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)\n at java.lang.Thread.run(Thread.java:748)",
"expected": {
"day": "18",
"hour": "06:53:20.411",
"message": "Error parsing HTTP request header\n Note: further occurrences of HTTP header parsing errors will be logged at DEBUG level.\n java.lang.IllegalArgumentException: Invalid character found in method name. HTTP method names must be tokens\n at org.apache.coyote.http11.Http11InputBuffer.parseRequestLine(Http11InputBuffer.java:426)\n at org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:687)\n at org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:66)\n at org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:790)\n at org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1459)\n at org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)\n at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n at org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)\n at java.lang.Thread.run(Thread.java:748)",
"month": "Apr",
"origin": "org.apache.coyote.http11.Http11Processor.service",
"severity": "INFO",
"thread_id": "http-nio-8080-exec-1",
"year": "2018"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "success when delimiter found at the beginning and end of the string",
"tok": "/var/log/%{key}.log",
"msg": "/var/log/foobar.log",
"expected": {
"key": "foobar"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "fails when delimiter is not found at the beginning of the string",
"tok": "/var/log/%{key}.log",
"msg": "foobar",
"expected": null,
"skip": false,
"fail": true,
"append": ""
},
{
"name": "fails when delimiter is not found after the key",
"tok": "/var/log/%{key}.log",
"msg": "/var/log/foobar",
"expected": null,
"skip": false,
"fail": true,
"append": ""
},
{
"name": "simple dissect",
"tok": "%{key}",
"msg": "foobar",
"expected": {
"key": "foobar"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "dissect two replacement",
"tok": "%{key1} %{key2}",
"msg": "foo bar",
"expected": {
"key1": "foo",
"key2": "bar"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "fail on partial match",
"tok": "%{key1} %{key2} %{key3}",
"msg": "foo bar",
"expected": null,
"skip": false,
"fail": true,
"append": ""
},
{
"name": "one level dissect not end of string",
"tok": "/var/%{key}/log",
"msg": "/var/foobar/log",
"expected": {
"key": "foobar"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "one level dissect",
"tok": "/var/%{key}",
"msg": "/var/foobar/log",
"expected": {
"key": "foobar/log"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "multiple keys dissect end of string",
"tok": "/var/%{key}/log/%{key1}",
"msg": "/var/foobar/log/apache",
"expected": {
"key": "foobar",
"key1": "apache"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "multiple keys not end of string",
"tok": "/var/%{key}/log/%{key1}.log",
"msg": "/var/foobar/log/apache.log",
"expected": {
"key": "foobar",
"key1": "apache"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "append with order",
"tok": "%{+key/3} %{+key/1} %{+key/2}",
"msg": "1 2 3",
"expected": {
"key": "231"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "append with order and separator",
"tok": "%{+key/3} %{+key/1} %{+key/2}",
"msg": "1 2 3",
"expected": {
"key": "2::3::1"
},
"skip": false,
"fail": false,
"append": "::"
},
{
"name": "append with order and right padding",
"tok": "%{+key/3} %{+key/1-\u003e} %{+key/2}",
"msg": "1 2 3",
"expected": {
"key": "231"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "simple append",
"tok": "%{key}-%{+key}-%{+key}",
"msg": "1-2-3",
"expected": {
"key": "123"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "simple append with separator",
"tok": "%{key}-%{+key}-%{+key}",
"msg": "1-2-3",
"expected": {
"key": "1,2,3"
},
"skip": false,
"fail": false,
"append": ","
},
{
"name": "reference field",
"tok": "%{*key} %{\u0026key}",
"msg": "hello world",
"expected": {
"hello": "world"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "reference field alt order",
"tok": "%{\u0026key} %{*key}",
"msg": "hello world",
"expected": {
"world": "hello"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "nameless skip field",
"tok": "%{} %{key}",
"msg": "hello world",
"expected": {
"key": "world"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "named skip field",
"tok": "%{?skipme} %{key}",
"msg": "hello world",
"expected": {
"key": "world"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "reference without pairing",
"tok": "%{key} %{\u0026key}",
"msg": "hello world",
"expected": null,
"skip": false,
"fail": true,
"append": ""
},
{
"name": "missing fields (consecutive delimiters)",
"tok": "%{name},%{addr1},%{addr2},%{addr3},%{city},%{zip}",
"msg": "Jane Doe,4321 Fifth Avenue,,,New York,87432",
"expected": {
"addr1": "4321 Fifth Avenue",
"addr2": "",
"addr3": "",
"city": "New York",
"name": "Jane Doe",
"zip": "87432"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "missing fields with right padding (consecutive delimiters)",
"tok": "%{name},%{addr1-\u003e},%{city},%{zip}",
"msg": "Jane Doe,4321 Fifth Avenue,,,New York,87432",
"expected": {
"addr1": "4321 Fifth Avenue",
"city": "New York",
"name": "Jane Doe",
"zip": "87432"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "ignore right padding",
"tok": "%{id} %{function-\u003e} %{server}",
"msg": "00000043 ViewReceive machine-321",
"expected": {
"function": "ViewReceive",
"id": "00000043",
"server": "machine-321"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "padding on the last key need a delimiter",
"tok": "%{id} %{function} %{server-\u003e} ",
"msg": "00000043 ViewReceive machine-321 ",
"expected": {
"function": "ViewReceive",
"id": "00000043",
"server": "machine-321"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "ignore left padding",
"tok": "%{id-\u003e} %{function} %{server}",
"msg": "00000043 ViewReceive machine-321",
"expected": {
"function": "ViewReceive",
"id": "00000043",
"server": "machine-321"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "when the delimiters contains `{` and `}`",
"tok": "{%{a}}{%{b}} %{rest}",
"msg": "{c}{d} anything",
"expected": {
"a": "c",
"b": "d",
"rest": "anything"
},
"skip": false,
"fail": false,
"append": ""
},
{
"name": "no keys defined",
"tok": "anything",
"msg": "anything",
"expected": null,
"skip": false,
"fail": true,
"append": ""
},
{
"name": "invalid key",
"tok": "%{some?thing}",
"msg": "anything",
"expected": null,
"skip": false,
"fail": true,
"append": ""
},
{
"name": "matches non-ascii",
"tok": "%{a}࿏%{b} %{c}",
"msg": "⟳༒࿏༒⟲ 子",
"expected": {
"a": "⟳༒",
"b": "༒⟲",
"c": "子"
},
"skip": false,
"fail": false,
"append": ""
}
]

View File

@ -53,7 +53,7 @@ public final class WhitelistLoader {
* a Painless type name with the exception that any dollar symbols used as part of inner classes will * a Painless type name with the exception that any dollar symbols used as part of inner classes will
* be replaced with dot symbols. </li> * be replaced with dot symbols. </li>
* <li> short Java type name - The text after the final dot symbol of any specified Java class. A * <li> short Java type name - The text after the final dot symbol of any specified Java class. A
* short type Java name may be excluded by using the 'only_fqn' token during Painless class parsing * short type Java name may be excluded by using the 'no_import' token during Painless class parsing
* as described later. </li> * as described later. </li>
* </ul> * </ul>
* *
@ -65,7 +65,7 @@ public final class WhitelistLoader {
* <li> Primitive types may be specified starting with 'class' and followed by the Java type name, * <li> Primitive types may be specified starting with 'class' and followed by the Java type name,
* an opening bracket, a newline, a closing bracket, and a final newline. </li> * an opening bracket, a newline, a closing bracket, and a final newline. </li>
* <li> Complex types may be specified starting with 'class' and followed the fully-qualified Java * <li> Complex types may be specified starting with 'class' and followed the fully-qualified Java
* class name, optionally followed by an 'only_fqn' token, an opening bracket, a newline, * class name, optionally followed by an 'no_import' token, an opening bracket, a newline,
* constructor/method/field specifications, a closing bracket, and a final newline. Within a complex * constructor/method/field specifications, a closing bracket, and a final newline. Within a complex
* type the following may be parsed: * type the following may be parsed:
* <ul> * <ul>
@ -109,7 +109,7 @@ public final class WhitelistLoader {
* *
* # complex types * # complex types
* *
* class my.package.Example only_fqn { * class my.package.Example no_import {
* # constructors * # constructors
* () * ()
* (int) * (int)
@ -145,7 +145,7 @@ public final class WhitelistLoader {
String whitelistClassOrigin = null; String whitelistClassOrigin = null;
String javaClassName = null; String javaClassName = null;
boolean onlyFQNJavaClassName = false; boolean noImport = false;
List<WhitelistConstructor> whitelistConstructors = null; List<WhitelistConstructor> whitelistConstructors = null;
List<WhitelistMethod> whitelistMethods = null; List<WhitelistMethod> whitelistMethods = null;
List<WhitelistField> whitelistFields = null; List<WhitelistField> whitelistFields = null;
@ -160,7 +160,7 @@ public final class WhitelistLoader {
} }
// Handle a new class by resetting all the variables necessary to construct a new WhitelistClass for the whitelist. // Handle a new class by resetting all the variables necessary to construct a new WhitelistClass for the whitelist.
// Expects the following format: 'class' ID 'only_fqn'? '{' '\n' // Expects the following format: 'class' ID 'no_import'? '{' '\n'
if (line.startsWith("class ")) { if (line.startsWith("class ")) {
// Ensure the final token of the line is '{'. // Ensure the final token of the line is '{'.
if (line.endsWith("{") == false) { if (line.endsWith("{") == false) {
@ -172,8 +172,8 @@ public final class WhitelistLoader {
String[] tokens = line.substring(5, line.length() - 1).trim().split("\\s+"); String[] tokens = line.substring(5, line.length() - 1).trim().split("\\s+");
// Ensure the correct number of tokens. // Ensure the correct number of tokens.
if (tokens.length == 2 && "only_fqn".equals(tokens[1])) { if (tokens.length == 2 && "no_import".equals(tokens[1])) {
onlyFQNJavaClassName = true; noImport = true;
} else if (tokens.length != 1) { } else if (tokens.length != 1) {
throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]"); throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]");
} }
@ -194,13 +194,13 @@ public final class WhitelistLoader {
throw new IllegalArgumentException("invalid class definition: extraneous closing bracket"); throw new IllegalArgumentException("invalid class definition: extraneous closing bracket");
} }
whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, onlyFQNJavaClassName, whitelistClasses.add(new WhitelistClass(whitelistClassOrigin, javaClassName, noImport,
whitelistConstructors, whitelistMethods, whitelistFields)); whitelistConstructors, whitelistMethods, whitelistFields));
// Set all the variables to null to ensure a new class definition is found before other parsable values. // Set all the variables to null to ensure a new class definition is found before other parsable values.
whitelistClassOrigin = null; whitelistClassOrigin = null;
javaClassName = null; javaClassName = null;
onlyFQNJavaClassName = false; noImport = false;
whitelistConstructors = null; whitelistConstructors = null;
whitelistMethods = null; whitelistMethods = null;
whitelistFields = null; whitelistFields = null;

View File

@ -293,7 +293,7 @@ public final class PainlessLookupBuilder {
if (canonicalClassName.equals(importedCanonicalClassName)) { if (canonicalClassName.equals(importedCanonicalClassName)) {
if (importClassName == true) { if (importClassName == true) {
throw new IllegalArgumentException("must use only_fqn parameter on class [" + canonicalClassName + "] with no package"); throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package");
} }
} else { } else {
Class<?> importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); Class<?> importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName);
@ -301,7 +301,8 @@ public final class PainlessLookupBuilder {
if (importedPainlessClass == null) { if (importedPainlessClass == null) {
if (importClassName) { if (importClassName) {
if (existingPainlessClassBuilder != null) { if (existingPainlessClassBuilder != null) {
throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); throw new IllegalArgumentException(
"inconsistent no_import parameters found for class [" + canonicalClassName + "]");
} }
canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz);
@ -310,7 +311,7 @@ public final class PainlessLookupBuilder {
throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " +
"classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]");
} else if (importClassName == false) { } else if (importClassName == false) {
throw new IllegalArgumentException("inconsistent only_fqn parameters found for class [" + canonicalClassName + "]"); throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]");
} }
} }
} }

View File

@ -24,31 +24,31 @@
#### Primitive types #### Primitive types
class void only_fqn { class void no_import {
} }
class boolean only_fqn { class boolean no_import {
} }
class byte only_fqn { class byte no_import {
} }
class short only_fqn { class short no_import {
} }
class char only_fqn { class char no_import {
} }
class int only_fqn { class int no_import {
} }
class long only_fqn { class long no_import {
} }
class float only_fqn { class float no_import {
} }
class double only_fqn { class double no_import {
} }
#### Painless debugging API #### Painless debugging API
@ -134,7 +134,7 @@ class org.elasticsearch.index.mapper.IpFieldMapper$IpFieldType$IpScriptDocValues
# for testing. # for testing.
# currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods # currently FeatureTest exposes overloaded constructor, field load store, and overloaded static methods
class org.elasticsearch.painless.FeatureTest only_fqn { class org.elasticsearch.painless.FeatureTest no_import {
int z int z
() ()
(int,int) (int,int)

View File

@ -132,7 +132,7 @@ if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3T
final String minioVersion = 'RELEASE.2018-06-22T23-48-46Z' final String minioVersion = 'RELEASE.2018-06-22T23-48-46Z'
final String minioBinDir = "${buildDir}/minio/bin" final String minioBinDir = "${buildDir}/minio/bin"
final String minioDataDir = "${buildDir}/minio/data" final String minioDataDir = "${buildDir}/minio/data"
final String minioAddress = "127.0.0.1:60920" final String minioAddress = "127.0.0.1"
final String minioDistribution final String minioDistribution
final String minioCheckSum final String minioCheckSum
@ -187,15 +187,30 @@ if (useFixture && minioDistribution) {
dependsOn installMinio dependsOn installMinio
ext.minioPid = 0L ext.minioPid = 0L
ext.minioPort = 0
doLast { doLast {
// get free port
for (int port = 60920; port < 60940; port++) {
try {
javax.net.ServerSocketFactory.getDefault().createServerSocket(port, 1, InetAddress.getByName(minioAddress)).close()
minioPort = port
break
} catch (BindException e) {
logger.info("Port " + port + " for Minio process is already taken", e)
}
}
if (minioPort == 0) {
throw new GradleException("Could not find a free port for Minio")
}
new File("${minioDataDir}/${s3PermanentBucket}").mkdirs() new File("${minioDataDir}/${s3PermanentBucket}").mkdirs()
// we skip these tests on Windows so we do no need to worry about compatibility here // we skip these tests on Windows so we do no need to worry about compatibility here
final ProcessBuilder minio = new ProcessBuilder( final ProcessBuilder minio = new ProcessBuilder(
"${minioBinDir}/${minioFileName}", "${minioBinDir}/${minioFileName}",
"server", "server",
"--address", "--address",
minioAddress, minioAddress + ":" + minioPort,
minioDataDir) minioDataDir)
minio.environment().put('MINIO_ACCESS_KEY', s3PermanentAccessKey) minio.environment().put('MINIO_ACCESS_KEY', s3PermanentAccessKey)
minio.environment().put('MINIO_SECRET_KEY', s3PermanentSecretKey) minio.environment().put('MINIO_SECRET_KEY', s3PermanentSecretKey)
@ -227,6 +242,7 @@ if (useFixture && minioDistribution) {
final int index = line.lastIndexOf(":") final int index = line.lastIndexOf(":")
assert index >= 0 assert index >= 0
httpPort = Integer.parseInt(line.substring(index + 1)) httpPort = Integer.parseInt(line.substring(index + 1))
assert httpPort == minioPort : "Port mismatch, expected ${minioPort} but was ${httpPort}"
final File script = new File(project.buildDir, "minio/minio.killer.sh") final File script = new File(project.buildDir, "minio/minio.killer.sh")
script.setText( script.setText(
@ -269,10 +285,15 @@ if (useFixture && minioDistribution) {
project.afterEvaluate { project.afterEvaluate {
ClusterConfiguration cluster = project.extensions.getByName('integTestMinioCluster') as ClusterConfiguration ClusterConfiguration cluster = project.extensions.getByName('integTestMinioCluster') as ClusterConfiguration
cluster.dependsOn(project.bundlePlugin) cluster.dependsOn(project.bundlePlugin)
cluster.dependsOn(startMinio) // otherwise we don't know the Minio port
cluster.keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey cluster.keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey
cluster.keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey cluster.keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey
cluster.setting 's3.client.integration_test_permanent.endpoint', "http://${minioAddress}" Closure<String> minioAddressAndPort = {
assert startMinio.minioPort > 0
return 'http://' + minioAddress + ':' + startMinio.minioPort
}
cluster.setting 's3.client.integration_test_permanent.endpoint', "${ -> minioAddressAndPort.call()}"
Task restIntegTestTask = project.tasks.getByName('integTestMinio') Task restIntegTestTask = project.tasks.getByName('integTestMinio')
restIntegTestTask.clusterConfig.plugin(project.path) restIntegTestTask.clusterConfig.plugin(project.path)

View File

@ -1,34 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest;
/**
* Enables filtering the index templates that will be applied for an index, per create index request.
*/
public interface IndexTemplateFilter {
/**
* @return {@code true} if the given template should be applied on the newly created index,
* {@code false} otherwise.
*/
boolean apply(CreateIndexClusterStateUpdateRequest request, IndexTemplateMetaData template);
}

View File

@ -2391,6 +2391,7 @@ public class IndexShardTests extends IndexShardTestCase {
closeShards(sourceShard, targetShard); closeShards(sourceShard, targetShard);
} }
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32766")
public void testDocStats() throws IOException { public void testDocStats() throws IOException {
IndexShard indexShard = null; IndexShard indexShard = null;
try { try {

View File

@ -28,19 +28,10 @@ subprojects {
apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-base-publish'
apply plugin: 'nebula.maven-scm' apply plugin: 'nebula.maven-scm'
// the main files are actually test files, so use the appropriate forbidden api sigs
forbiddenApisMain {
signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'),
PrecommitTasks.getResource('/forbidden/es-signatures.txt'),
PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')]
}
// TODO: should we have licenses for our test deps? // TODO: should we have licenses for our test deps?
dependencyLicenses.enabled = false dependencyLicenses.enabled = false
dependenciesInfo.enabled = false dependenciesInfo.enabled = false
// TODO: why is the test framework pulled in... // TODO: why is the test framework pulled in...
forbiddenApisMain.enabled = false
jarHell.enabled = false jarHell.enabled = false
} }

View File

@ -0,0 +1,5 @@
subprojects {
// fixtures are mostly external and by default we don't want to check forbidden apis
forbiddenApisMain.enabled = false
}

View File

@ -41,8 +41,9 @@ compileTestJava.options.compilerArgs << '-Xlint:-rawtypes'
// the main files are actually test files, so use the appropriate forbidden api sigs // the main files are actually test files, so use the appropriate forbidden api sigs
forbiddenApisMain { forbiddenApisMain {
signaturesURLs = [PrecommitTasks.getResource('/forbidden/all-signatures.txt'), signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'),
PrecommitTasks.getResource('/forbidden/test-signatures.txt')] PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'),
PrecommitTasks.getResource('/forbidden/es-test-signatures.txt')]
} }
// TODO: should we have licenses for our test deps? // TODO: should we have licenses for our test deps?

View File

@ -67,7 +67,7 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase {
Matcher m = UNDERSCORE_THEN_ANYTHING.matcher(s); Matcher m = UNDERSCORE_THEN_ANYTHING.matcher(s);
StringBuffer sb = new StringBuffer(); StringBuffer sb = new StringBuffer();
while (m.find()) { while (m.find()) {
m.appendReplacement(sb, m.group(1).toUpperCase()); m.appendReplacement(sb, m.group(1).toUpperCase(Locale.ROOT));
} }
m.appendTail(sb); m.appendTail(sb);
sb.setCharAt(0, Character.toUpperCase(sb.charAt(0))); sb.setCharAt(0, Character.toUpperCase(sb.charAt(0)));

View File

@ -21,7 +21,6 @@ package org.elasticsearch.test;
import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.RandomizedTest;
import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.SeedUtils;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.elasticsearch.Version; import org.elasticsearch.Version;
@ -42,6 +41,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.settings.SettingsModule;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -194,8 +194,8 @@ public abstract class AbstractBuilderTestCase extends ESTestCase {
@AfterClass @AfterClass
public static void afterClass() throws Exception { public static void afterClass() throws Exception {
org.apache.lucene.util.IOUtils.close(serviceHolder); IOUtils.close(serviceHolder);
org.apache.lucene.util.IOUtils.close(serviceHolderWithNoType); IOUtils.close(serviceHolderWithNoType);
serviceHolder = null; serviceHolder = null;
serviceHolderWithNoType = null; serviceHolderWithNoType = null;
} }

View File

@ -54,6 +54,7 @@ import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.CheckedBiFunction;
import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.PathUtilsForTesting; import org.elasticsearch.common.io.PathUtilsForTesting;
@ -198,13 +199,9 @@ public abstract class ESTestCase extends LuceneTestCase {
} }
static { static {
System.setProperty("log4j.shutdownHookEnabled", "false"); setTestSysProps();
System.setProperty("log4j2.disable.jmx", "true");
LogConfigurator.loadLog4jPlugins(); LogConfigurator.loadLog4jPlugins();
// Enable Netty leak detection and monitor logger for logged leak errors
System.setProperty("io.netty.leakDetection.level", "paranoid");
String leakLoggerName = "io.netty.util.ResourceLeakDetector"; String leakLoggerName = "io.netty.util.ResourceLeakDetector";
Logger leakLogger = LogManager.getLogger(leakLoggerName); Logger leakLogger = LogManager.getLogger(leakLoggerName);
Appender leakAppender = new AbstractAppender(leakLoggerName, null, Appender leakAppender = new AbstractAppender(leakLoggerName, null,
@ -243,6 +240,14 @@ public abstract class ESTestCase extends LuceneTestCase {
Collections.sort(javaZoneIds); Collections.sort(javaZoneIds);
JAVA_ZONE_IDS = Collections.unmodifiableList(javaZoneIds); JAVA_ZONE_IDS = Collections.unmodifiableList(javaZoneIds);
} }
@SuppressForbidden(reason = "force log4j and netty sysprops")
private static void setTestSysProps() {
System.setProperty("log4j.shutdownHookEnabled", "false");
System.setProperty("log4j2.disable.jmx", "true");
// Enable Netty leak detection and monitor logger for logged leak errors
System.setProperty("io.netty.leakDetection.level", "paranoid");
}
protected final Logger logger = Loggers.getLogger(getClass()); protected final Logger logger = Loggers.getLogger(getClass());
protected final DeprecationLogger deprecationLogger = new DeprecationLogger(logger); protected final DeprecationLogger deprecationLogger = new DeprecationLogger(logger);

View File

@ -20,6 +20,8 @@
package org.elasticsearch.test.fixture; package org.elasticsearch.test.fixture;
import com.sun.net.httpserver.HttpServer; import com.sun.net.httpserver.HttpServer;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
@ -32,7 +34,6 @@ import java.net.SocketAddress;
import java.net.URI; import java.net.URI;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption; import java.nio.file.StandardCopyOption;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -48,6 +49,7 @@ import static java.util.Collections.singletonMap;
/** /**
* Base class for test fixtures that requires a {@link HttpServer} to work. * Base class for test fixtures that requires a {@link HttpServer} to work.
*/ */
@SuppressForbidden(reason = "uses httpserver by design")
public abstract class AbstractHttpFixture { public abstract class AbstractHttpFixture {
protected static final Map<String, String> TEXT_PLAIN_CONTENT_TYPE = contentType("text/plain; charset=utf-8"); protected static final Map<String, String> TEXT_PLAIN_CONTENT_TYPE = contentType("text/plain; charset=utf-8");
@ -62,7 +64,7 @@ public abstract class AbstractHttpFixture {
private final Path workingDirectory; private final Path workingDirectory;
protected AbstractHttpFixture(final String workingDir) { protected AbstractHttpFixture(final String workingDir) {
this.workingDirectory = Paths.get(Objects.requireNonNull(workingDir)); this.workingDirectory = PathUtils.get(Objects.requireNonNull(workingDir));
} }
/** /**

View File

@ -22,6 +22,7 @@ import com.carrotsearch.randomizedtesting.ReproduceErrorMessageBuilder;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.Constants; import org.apache.lucene.util.Constants;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -86,7 +87,12 @@ public class ReproduceInfoPrinter extends RunListener {
gradleMessageBuilder.appendClientYamlSuiteProperties(); gradleMessageBuilder.appendClientYamlSuiteProperties();
} }
System.err.println(b.toString()); printToErr(b.toString());
}
@SuppressForbidden(reason = "printing repro info")
private static void printToErr(String s) {
System.err.println(s);
} }
protected static class GradleMessageBuilder extends ReproduceErrorMessageBuilder { protected static class GradleMessageBuilder extends ReproduceErrorMessageBuilder {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionListenerResponseHandler;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
@ -64,6 +65,7 @@ import java.net.InetAddress;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.ServerSocket; import java.net.ServerSocket;
import java.net.Socket; import java.net.Socket;
import java.net.UnknownHostException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -1894,7 +1896,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
// means that once we received an ACK from the client we just drop the packet on the floor (which is what we want) and we run // means that once we received an ACK from the client we just drop the packet on the floor (which is what we want) and we run
// into a connection timeout quickly. Yet other implementations can for instance can terminate the connection within the 3 way // into a connection timeout quickly. Yet other implementations can for instance can terminate the connection within the 3 way
// handshake which I haven't tested yet. // handshake which I haven't tested yet.
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); socket.bind(getLocalEphemeral(), 1);
socket.setReuseAddress(true); socket.setReuseAddress(true);
DiscoveryNode first = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), DiscoveryNode first = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
socket.getLocalPort()), emptyMap(), socket.getLocalPort()), emptyMap(),
@ -2008,7 +2010,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
public void testTcpHandshakeTimeout() throws IOException { public void testTcpHandshakeTimeout() throws IOException {
try (ServerSocket socket = new MockServerSocket()) { try (ServerSocket socket = new MockServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); socket.bind(getLocalEphemeral(), 1);
socket.setReuseAddress(true); socket.setReuseAddress(true);
DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
socket.getLocalPort()), emptyMap(), socket.getLocalPort()), emptyMap(),
@ -2029,7 +2031,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
public void testTcpHandshakeConnectionReset() throws IOException, InterruptedException { public void testTcpHandshakeConnectionReset() throws IOException, InterruptedException {
try (ServerSocket socket = new MockServerSocket()) { try (ServerSocket socket = new MockServerSocket()) {
socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); socket.bind(getLocalEphemeral(), 1);
socket.setReuseAddress(true); socket.setReuseAddress(true);
DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), DiscoveryNode dummy = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(),
socket.getLocalPort()), emptyMap(), socket.getLocalPort()), emptyMap(),
@ -2665,4 +2667,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
protected abstract void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException; protected abstract void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException;
@SuppressForbidden(reason = "need local ephemeral port")
private InetSocketAddress getLocalEphemeral() throws UnknownHostException {
return new InetSocketAddress(InetAddress.getLocalHost(), 0);
}
} }

View File

@ -18,6 +18,7 @@
*/ */
package org.elasticsearch.transport; package org.elasticsearch.transport;
import org.elasticsearch.cli.SuppressForbidden;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
@ -160,6 +161,7 @@ public class MockTcpTransport extends TcpTransport {
} }
@Override @Override
@SuppressForbidden(reason = "real socket for mocking remote connections")
protected MockChannel initiateChannel(InetSocketAddress address, ActionListener<Void> connectListener) throws IOException { protected MockChannel initiateChannel(InetSocketAddress address, ActionListener<Void> connectListener) throws IOException {
final MockSocket socket = new MockSocket(); final MockSocket socket = new MockSocket();
final MockChannel channel = new MockChannel(socket, address, "none"); final MockChannel channel = new MockChannel(socket, address, "none");

View File

@ -15,6 +15,7 @@ import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings;
import org.elasticsearch.xpack.core.ssl.VerificationMode; import org.elasticsearch.xpack.core.ssl.VerificationMode;
import javax.crypto.Cipher; import javax.crypto.Cipher;
import javax.crypto.SecretKeyFactory;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.ArrayList; import java.util.ArrayList;
@ -134,8 +135,16 @@ public class XPackSettings {
public static final Setting<String> PASSWORD_HASHING_ALGORITHM = new Setting<>( public static final Setting<String> PASSWORD_HASHING_ALGORITHM = new Setting<>(
"xpack.security.authc.password_hashing.algorithm", "bcrypt", Function.identity(), (v, s) -> { "xpack.security.authc.password_hashing.algorithm", "bcrypt", Function.identity(), (v, s) -> {
if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) { if (Hasher.getAvailableAlgoStoredHash().contains(v.toLowerCase(Locale.ROOT)) == false) {
throw new IllegalArgumentException("Invalid algorithm: " + v + ". Only pbkdf2 or bcrypt family algorithms can be used for " + throw new IllegalArgumentException("Invalid algorithm: " + v + ". Valid values for password hashing are " +
"password hashing."); Hasher.getAvailableAlgoStoredHash().toString());
} else if (v.regionMatches(true, 0, "pbkdf2", 0, "pbkdf2".length())) {
try {
SecretKeyFactory.getInstance("PBKDF2withHMACSHA512");
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException(
"Support for PBKDF2WithHMACSHA512 must be available in order to use any of the " +
"PBKDF2 algorithms for the [xpack.security.authc.password_hashing.algorithm] setting.", e);
}
} }
}, Setting.Property.NodeScope); }, Setting.Property.NodeScope);

View File

@ -64,7 +64,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
private static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets"); private static final ParseField OVERLAPPING_BUCKETS = new ParseField("overlapping_buckets");
private static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window"); private static final ParseField RESULT_FINALIZATION_WINDOW = new ParseField("result_finalization_window");
private static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields"); private static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields");
private static final ParseField USER_PER_PARTITION_NORMALIZATION = new ParseField("use_per_partition_normalization");
public static final String ML_CATEGORY_FIELD = "mlcategory"; public static final String ML_CATEGORY_FIELD = "mlcategory";
public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Collections.singletonList(ML_CATEGORY_FIELD)); public static final Set<String> AUTO_CREATED_FIELDS = new HashSet<>(Collections.singletonList(ML_CATEGORY_FIELD));
@ -98,7 +97,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
parser.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS); parser.declareBoolean(Builder::setOverlappingBuckets, OVERLAPPING_BUCKETS);
parser.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW); parser.declareLong(Builder::setResultFinalizationWindow, RESULT_FINALIZATION_WINDOW);
parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); parser.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS);
parser.declareBoolean(Builder::setUsePerPartitionNormalization, USER_PER_PARTITION_NORMALIZATION);
return parser; return parser;
} }
@ -117,12 +115,11 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
private final Boolean overlappingBuckets; private final Boolean overlappingBuckets;
private final Long resultFinalizationWindow; private final Long resultFinalizationWindow;
private final Boolean multivariateByFields; private final Boolean multivariateByFields;
private final boolean usePerPartitionNormalization;
private AnalysisConfig(TimeValue bucketSpan, String categorizationFieldName, List<String> categorizationFilters, private AnalysisConfig(TimeValue bucketSpan, String categorizationFieldName, List<String> categorizationFilters,
CategorizationAnalyzerConfig categorizationAnalyzerConfig, TimeValue latency, String summaryCountFieldName, CategorizationAnalyzerConfig categorizationAnalyzerConfig, TimeValue latency, String summaryCountFieldName,
List<Detector> detectors, List<String> influencers, Boolean overlappingBuckets, Long resultFinalizationWindow, List<Detector> detectors, List<String> influencers, Boolean overlappingBuckets, Long resultFinalizationWindow,
Boolean multivariateByFields, boolean usePerPartitionNormalization) { Boolean multivariateByFields) {
this.detectors = detectors; this.detectors = detectors;
this.bucketSpan = bucketSpan; this.bucketSpan = bucketSpan;
this.latency = latency; this.latency = latency;
@ -134,7 +131,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
this.overlappingBuckets = overlappingBuckets; this.overlappingBuckets = overlappingBuckets;
this.resultFinalizationWindow = resultFinalizationWindow; this.resultFinalizationWindow = resultFinalizationWindow;
this.multivariateByFields = multivariateByFields; this.multivariateByFields = multivariateByFields;
this.usePerPartitionNormalization = usePerPartitionNormalization;
} }
public AnalysisConfig(StreamInput in) throws IOException { public AnalysisConfig(StreamInput in) throws IOException {
@ -165,7 +161,12 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
} }
} }
usePerPartitionNormalization = in.readBoolean(); // BWC for removed per-partition normalization
// Version check is temporarily against the latest to satisfy CI tests
// TODO change to V_6_5_0 after successful backport to 6.x
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readBoolean();
}
} }
@Override @Override
@ -195,7 +196,12 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
out.writeBoolean(false); out.writeBoolean(false);
} }
out.writeBoolean(usePerPartitionNormalization); // BWC for removed per-partition normalization
// Version check is temporarily against the latest to satisfy CI tests
// TODO change to V_6_5_0 after successful backport to 6.x
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeBoolean(false);
}
} }
/** /**
@ -299,10 +305,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
return multivariateByFields; return multivariateByFields;
} }
public boolean getUsePerPartitionNormalization() {
return usePerPartitionNormalization;
}
/** /**
* Return the set of fields required by the analysis. * Return the set of fields required by the analysis.
* These are the influencer fields, metric field, partition field, * These are the influencer fields, metric field, partition field,
@ -403,9 +405,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
if (multivariateByFields != null) { if (multivariateByFields != null) {
builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields); builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields);
} }
if (usePerPartitionNormalization) {
builder.field(USER_PER_PARTITION_NORMALIZATION.getPreferredName(), usePerPartitionNormalization);
}
builder.endObject(); builder.endObject();
return builder; return builder;
} }
@ -416,7 +415,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
if (o == null || getClass() != o.getClass()) return false; if (o == null || getClass() != o.getClass()) return false;
AnalysisConfig that = (AnalysisConfig) o; AnalysisConfig that = (AnalysisConfig) o;
return Objects.equals(latency, that.latency) && return Objects.equals(latency, that.latency) &&
usePerPartitionNormalization == that.usePerPartitionNormalization &&
Objects.equals(bucketSpan, that.bucketSpan) && Objects.equals(bucketSpan, that.bucketSpan) &&
Objects.equals(categorizationFieldName, that.categorizationFieldName) && Objects.equals(categorizationFieldName, that.categorizationFieldName) &&
Objects.equals(categorizationFilters, that.categorizationFilters) && Objects.equals(categorizationFilters, that.categorizationFilters) &&
@ -434,7 +432,7 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
return Objects.hash( return Objects.hash(
bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, latency, bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, latency,
summaryCountFieldName, detectors, influencers, overlappingBuckets, resultFinalizationWindow, summaryCountFieldName, detectors, influencers, overlappingBuckets, resultFinalizationWindow,
multivariateByFields, usePerPartitionNormalization multivariateByFields
); );
} }
@ -453,7 +451,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
private Boolean overlappingBuckets; private Boolean overlappingBuckets;
private Long resultFinalizationWindow; private Long resultFinalizationWindow;
private Boolean multivariateByFields; private Boolean multivariateByFields;
private boolean usePerPartitionNormalization = false;
public Builder(List<Detector> detectors) { public Builder(List<Detector> detectors) {
setDetectors(detectors); setDetectors(detectors);
@ -472,7 +469,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
this.overlappingBuckets = analysisConfig.overlappingBuckets; this.overlappingBuckets = analysisConfig.overlappingBuckets;
this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow; this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow;
this.multivariateByFields = analysisConfig.multivariateByFields; this.multivariateByFields = analysisConfig.multivariateByFields;
this.usePerPartitionNormalization = analysisConfig.usePerPartitionNormalization;
} }
public void setDetectors(List<Detector> detectors) { public void setDetectors(List<Detector> detectors) {
@ -535,10 +531,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
this.multivariateByFields = multivariateByFields; this.multivariateByFields = multivariateByFields;
} }
public void setUsePerPartitionNormalization(boolean usePerPartitionNormalization) {
this.usePerPartitionNormalization = usePerPartitionNormalization;
}
/** /**
* Checks the configuration is valid * Checks the configuration is valid
* <ol> * <ol>
@ -571,16 +563,11 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
overlappingBuckets = verifyOverlappingBucketsConfig(overlappingBuckets, detectors); overlappingBuckets = verifyOverlappingBucketsConfig(overlappingBuckets, detectors);
if (usePerPartitionNormalization) {
checkDetectorsHavePartitionFields(detectors);
checkNoInfluencersAreSet(influencers);
}
verifyNoInconsistentNestedFieldNames(); verifyNoInconsistentNestedFieldNames();
return new AnalysisConfig(bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig, return new AnalysisConfig(bucketSpan, categorizationFieldName, categorizationFilters, categorizationAnalyzerConfig,
latency, summaryCountFieldName, detectors, influencers, overlappingBuckets, latency, summaryCountFieldName, detectors, influencers, overlappingBuckets,
resultFinalizationWindow, multivariateByFields, usePerPartitionNormalization); resultFinalizationWindow, multivariateByFields);
} }
private void verifyNoMetricFunctionsWhenSummaryCountFieldNameIsSet() { private void verifyNoMetricFunctionsWhenSummaryCountFieldNameIsSet() {
@ -704,23 +691,6 @@ public class AnalysisConfig implements ToXContentObject, Writeable {
} }
} }
private static void checkDetectorsHavePartitionFields(List<Detector> detectors) {
for (Detector detector : detectors) {
if (!Strings.isNullOrEmpty(detector.getPartitionFieldName())) {
return;
}
}
throw ExceptionsHelper.badRequestException(Messages.getMessage(
Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD));
}
private static void checkNoInfluencersAreSet(List<String> influencers) {
if (!influencers.isEmpty()) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(
Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS));
}
}
private static boolean isValidRegex(String exp) { private static boolean isValidRegex(String exp) {
try { try {
Pattern.compile(exp); Pattern.compile(exp);

View File

@ -130,10 +130,6 @@ public final class Messages {
"over_field_name cannot be used with function ''{0}''"; "over_field_name cannot be used with function ''{0}''";
public static final String JOB_CONFIG_OVERLAPPING_BUCKETS_INCOMPATIBLE_FUNCTION = public static final String JOB_CONFIG_OVERLAPPING_BUCKETS_INCOMPATIBLE_FUNCTION =
"Overlapping buckets cannot be used with function ''{0}''"; "Overlapping buckets cannot be used with function ''{0}''";
public static final String JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS =
"A job configured with Per-Partition Normalization cannot use influencers";
public static final String JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD =
"If the job is configured with Per-Partition Normalization enabled a detector must have a partition field";
public static final String JOB_CONFIG_UNKNOWN_FUNCTION = "Unknown function ''{0}''"; public static final String JOB_CONFIG_UNKNOWN_FUNCTION = "Unknown function ''{0}''";
public static final String JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED = public static final String JOB_CONFIG_UPDATE_ANALYSIS_LIMITS_MODEL_MEMORY_LIMIT_CANNOT_BE_DECREASED =
"Invalid update value for analysis_limits: model_memory_limit cannot be decreased below current usage; " + "Invalid update value for analysis_limits: model_memory_limit cannot be decreased below current usage; " +

View File

@ -227,23 +227,6 @@ public class ElasticsearchMappings {
.startObject(Bucket.SCHEDULED_EVENTS.getPreferredName()) .startObject(Bucket.SCHEDULED_EVENTS.getPreferredName())
.field(TYPE, KEYWORD) .field(TYPE, KEYWORD)
.endObject() .endObject()
.startObject(Bucket.PARTITION_SCORES.getPreferredName())
.field(TYPE, NESTED)
.startObject(PROPERTIES)
.startObject(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName())
.field(TYPE, KEYWORD)
.endObject()
.startObject(Bucket.INITIAL_ANOMALY_SCORE.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.startObject(AnomalyRecord.PROBABILITY.getPreferredName())
.field(TYPE, DOUBLE)
.endObject()
.endObject()
.endObject()
.startObject(Bucket.BUCKET_INFLUENCERS.getPreferredName()) .startObject(Bucket.BUCKET_INFLUENCERS.getPreferredName())
.field(TYPE, NESTED) .field(TYPE, NESTED)
@ -328,7 +311,7 @@ public class ElasticsearchMappings {
} }
private static void addForecastFieldsToMapping(XContentBuilder builder) throws IOException { private static void addForecastFieldsToMapping(XContentBuilder builder) throws IOException {
// Forecast Output // Forecast Output
builder.startObject(Forecast.FORECAST_LOWER.getPreferredName()) builder.startObject(Forecast.FORECAST_LOWER.getPreferredName())
.field(TYPE, DOUBLE) .field(TYPE, DOUBLE)
@ -370,7 +353,7 @@ public class ElasticsearchMappings {
.field(TYPE, LONG) .field(TYPE, LONG)
.endObject(); .endObject();
} }
/** /**
* AnomalyRecord fields to be added under the 'properties' section of the mapping * AnomalyRecord fields to be added under the 'properties' section of the mapping
* @param builder Add properties to this builder * @param builder Add properties to this builder

View File

@ -25,7 +25,6 @@ import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Optional;
/** /**
* Bucket Result POJO * Bucket Result POJO
@ -43,7 +42,6 @@ public class Bucket implements ToXContentObject, Writeable {
public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers"); public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers");
public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); public static final ParseField BUCKET_SPAN = new ParseField("bucket_span");
public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms");
public static final ParseField PARTITION_SCORES = new ParseField("partition_scores");
public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events"); public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events");
// Used for QueryPage // Used for QueryPage
@ -58,6 +56,19 @@ public class Bucket implements ToXContentObject, Writeable {
public static final ConstructingObjectParser<Bucket, Void> STRICT_PARSER = createParser(false); public static final ConstructingObjectParser<Bucket, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<Bucket, Void> LENIENT_PARSER = createParser(true); public static final ConstructingObjectParser<Bucket, Void> LENIENT_PARSER = createParser(true);
/* *
* Read and discard the old (prior to 6.5) perPartitionNormalization values
*/
public static Bucket readOldPerPartitionNormalization(StreamInput in) throws IOException {
in.readString();
in.readString();
in.readDouble();
in.readDouble();
in.readDouble();
return null;
}
private static ConstructingObjectParser<Bucket, Void> createParser(boolean ignoreUnknownFields) { private static ConstructingObjectParser<Bucket, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<Bucket, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, ignoreUnknownFields, ConstructingObjectParser<Bucket, Void> parser = new ConstructingObjectParser<>(RESULT_TYPE_VALUE, ignoreUnknownFields,
a -> new Bucket((String) a[0], (Date) a[1], (long) a[2])); a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]));
@ -82,8 +93,6 @@ public class Bucket implements ToXContentObject, Writeable {
parser.declareObjectArray(Bucket::setBucketInfluencers, ignoreUnknownFields ? parser.declareObjectArray(Bucket::setBucketInfluencers, ignoreUnknownFields ?
BucketInfluencer.LENIENT_PARSER : BucketInfluencer.STRICT_PARSER, BUCKET_INFLUENCERS); BucketInfluencer.LENIENT_PARSER : BucketInfluencer.STRICT_PARSER, BUCKET_INFLUENCERS);
parser.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS); parser.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS);
parser.declareObjectArray(Bucket::setPartitionScores, ignoreUnknownFields ?
PartitionScore.LENIENT_PARSER : PartitionScore.STRICT_PARSER, PARTITION_SCORES);
parser.declareString((bucket, s) -> {}, Result.RESULT_TYPE); parser.declareString((bucket, s) -> {}, Result.RESULT_TYPE);
parser.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS); parser.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS);
@ -100,7 +109,6 @@ public class Bucket implements ToXContentObject, Writeable {
private boolean isInterim; private boolean isInterim;
private List<BucketInfluencer> bucketInfluencers = new ArrayList<>(); // Can't use emptyList as might be appended to private List<BucketInfluencer> bucketInfluencers = new ArrayList<>(); // Can't use emptyList as might be appended to
private long processingTimeMs; private long processingTimeMs;
private List<PartitionScore> partitionScores = Collections.emptyList();
private List<String> scheduledEvents = Collections.emptyList(); private List<String> scheduledEvents = Collections.emptyList();
public Bucket(String jobId, Date timestamp, long bucketSpan) { public Bucket(String jobId, Date timestamp, long bucketSpan) {
@ -120,7 +128,6 @@ public class Bucket implements ToXContentObject, Writeable {
this.isInterim = other.isInterim; this.isInterim = other.isInterim;
this.bucketInfluencers = new ArrayList<>(other.bucketInfluencers); this.bucketInfluencers = new ArrayList<>(other.bucketInfluencers);
this.processingTimeMs = other.processingTimeMs; this.processingTimeMs = other.processingTimeMs;
this.partitionScores = new ArrayList<>(other.partitionScores);
this.scheduledEvents = new ArrayList<>(other.scheduledEvents); this.scheduledEvents = new ArrayList<>(other.scheduledEvents);
} }
@ -143,7 +150,10 @@ public class Bucket implements ToXContentObject, Writeable {
if (in.getVersion().before(Version.V_5_5_0)) { if (in.getVersion().before(Version.V_5_5_0)) {
in.readGenericValue(); in.readGenericValue();
} }
partitionScores = in.readList(PartitionScore::new); // bwc for perPartitionNormalization
if (in.getVersion().before(Version.V_6_5_0)) {
in.readList(Bucket::readOldPerPartitionNormalization);
}
if (in.getVersion().onOrAfter(Version.V_6_2_0)) { if (in.getVersion().onOrAfter(Version.V_6_2_0)) {
scheduledEvents = in.readList(StreamInput::readString); scheduledEvents = in.readList(StreamInput::readString);
if (scheduledEvents.isEmpty()) { if (scheduledEvents.isEmpty()) {
@ -174,7 +184,10 @@ public class Bucket implements ToXContentObject, Writeable {
if (out.getVersion().before(Version.V_5_5_0)) { if (out.getVersion().before(Version.V_5_5_0)) {
out.writeGenericValue(Collections.emptyMap()); out.writeGenericValue(Collections.emptyMap());
} }
out.writeList(partitionScores); // bwc for perPartitionNormalization
if (out.getVersion().before(Version.V_6_5_0)) {
out.writeList(Collections.emptyList());
}
if (out.getVersion().onOrAfter(Version.V_6_2_0)) { if (out.getVersion().onOrAfter(Version.V_6_2_0)) {
out.writeStringList(scheduledEvents); out.writeStringList(scheduledEvents);
} }
@ -195,9 +208,7 @@ public class Bucket implements ToXContentObject, Writeable {
builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); builder.field(Result.IS_INTERIM.getPreferredName(), isInterim);
builder.field(BUCKET_INFLUENCERS.getPreferredName(), bucketInfluencers); builder.field(BUCKET_INFLUENCERS.getPreferredName(), bucketInfluencers);
builder.field(PROCESSING_TIME_MS.getPreferredName(), processingTimeMs); builder.field(PROCESSING_TIME_MS.getPreferredName(), processingTimeMs);
if (partitionScores.isEmpty() == false) {
builder.field(PARTITION_SCORES.getPreferredName(), partitionScores);
}
if (scheduledEvents.isEmpty() == false) { if (scheduledEvents.isEmpty() == false) {
builder.field(SCHEDULED_EVENTS.getPreferredName(), scheduledEvents); builder.field(SCHEDULED_EVENTS.getPreferredName(), scheduledEvents);
} }
@ -304,14 +315,6 @@ public class Bucket implements ToXContentObject, Writeable {
bucketInfluencers.add(bucketInfluencer); bucketInfluencers.add(bucketInfluencer);
} }
public List<PartitionScore> getPartitionScores() {
return partitionScores;
}
public void setPartitionScores(List<PartitionScore> scores) {
partitionScores = Objects.requireNonNull(scores);
}
public List<String> getScheduledEvents() { public List<String> getScheduledEvents() {
return scheduledEvents; return scheduledEvents;
} }
@ -320,24 +323,10 @@ public class Bucket implements ToXContentObject, Writeable {
this.scheduledEvents = ExceptionsHelper.requireNonNull(scheduledEvents, SCHEDULED_EVENTS.getPreferredName()); this.scheduledEvents = ExceptionsHelper.requireNonNull(scheduledEvents, SCHEDULED_EVENTS.getPreferredName());
} }
public double partitionInitialAnomalyScore(String partitionValue) {
Optional<PartitionScore> first = partitionScores.stream().filter(s -> partitionValue.equals(s.getPartitionFieldValue()))
.findFirst();
return first.isPresent() ? first.get().getInitialRecordScore() : 0.0;
}
public double partitionAnomalyScore(String partitionValue) {
Optional<PartitionScore> first = partitionScores.stream().filter(s -> partitionValue.equals(s.getPartitionFieldValue()))
.findFirst();
return first.isPresent() ? first.get().getRecordScore() : 0.0;
}
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(jobId, timestamp, eventCount, initialAnomalyScore, anomalyScore, records, return Objects.hash(jobId, timestamp, eventCount, initialAnomalyScore, anomalyScore, records,
isInterim, bucketSpan, bucketInfluencers, partitionScores, processingTimeMs, scheduledEvents); isInterim, bucketSpan, bucketInfluencers, processingTimeMs, scheduledEvents);
} }
/** /**
@ -360,7 +349,6 @@ public class Bucket implements ToXContentObject, Writeable {
&& (this.anomalyScore == that.anomalyScore) && (this.initialAnomalyScore == that.initialAnomalyScore) && (this.anomalyScore == that.anomalyScore) && (this.initialAnomalyScore == that.initialAnomalyScore)
&& Objects.equals(this.records, that.records) && Objects.equals(this.isInterim, that.isInterim) && Objects.equals(this.records, that.records) && Objects.equals(this.isInterim, that.isInterim)
&& Objects.equals(this.bucketInfluencers, that.bucketInfluencers) && Objects.equals(this.bucketInfluencers, that.bucketInfluencers)
&& Objects.equals(this.partitionScores, that.partitionScores)
&& (this.processingTimeMs == that.processingTimeMs) && (this.processingTimeMs == that.processingTimeMs)
&& Objects.equals(this.scheduledEvents, that.scheduledEvents); && Objects.equals(this.scheduledEvents, that.scheduledEvents);
} }
@ -374,6 +362,6 @@ public class Bucket implements ToXContentObject, Writeable {
* @return true if the bucket should be normalized or false otherwise * @return true if the bucket should be normalized or false otherwise
*/ */
public boolean isNormalizable() { public boolean isNormalizable() {
return anomalyScore > 0.0 || partitionScores.stream().anyMatch(s -> s.getRecordScore() > 0); return anomalyScore > 0.0;
} }
} }

View File

@ -1,131 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.job.results;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Objects;
public class PartitionScore implements ToXContentObject, Writeable {
public static final ParseField PARTITION_SCORE = new ParseField("partition_score");
private final String partitionFieldValue;
private final String partitionFieldName;
private final double initialRecordScore;
private double recordScore;
private double probability;
public static final ConstructingObjectParser<PartitionScore, Void> STRICT_PARSER = createParser(false);
public static final ConstructingObjectParser<PartitionScore, Void> LENIENT_PARSER = createParser(true);
private static ConstructingObjectParser<PartitionScore, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<PartitionScore, Void> parser = new ConstructingObjectParser<>(PARTITION_SCORE.getPreferredName(),
ignoreUnknownFields, a -> new PartitionScore((String) a[0], (String) a[1], (Double) a[2], (Double) a[3], (Double) a[4]));
parser.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_NAME);
parser.declareString(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARTITION_FIELD_VALUE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.INITIAL_RECORD_SCORE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.RECORD_SCORE);
parser.declareDouble(ConstructingObjectParser.constructorArg(), AnomalyRecord.PROBABILITY);
return parser;
}
public PartitionScore(String fieldName, String fieldValue, double initialRecordScore, double recordScore, double probability) {
partitionFieldName = fieldName;
partitionFieldValue = fieldValue;
this.initialRecordScore = initialRecordScore;
this.recordScore = recordScore;
this.probability = probability;
}
public PartitionScore(StreamInput in) throws IOException {
partitionFieldName = in.readString();
partitionFieldValue = in.readString();
initialRecordScore = in.readDouble();
recordScore = in.readDouble();
probability = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(partitionFieldName);
out.writeString(partitionFieldValue);
out.writeDouble(initialRecordScore);
out.writeDouble(recordScore);
out.writeDouble(probability);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(AnomalyRecord.PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName);
builder.field(AnomalyRecord.PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue);
builder.field(AnomalyRecord.INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
builder.field(AnomalyRecord.RECORD_SCORE.getPreferredName(), recordScore);
builder.field(AnomalyRecord.PROBABILITY.getPreferredName(), probability);
builder.endObject();
return builder;
}
public double getInitialRecordScore() {
return initialRecordScore;
}
public double getRecordScore() {
return recordScore;
}
public void setRecordScore(double recordScore) {
this.recordScore = recordScore;
}
public String getPartitionFieldName() {
return partitionFieldName;
}
public String getPartitionFieldValue() {
return partitionFieldValue;
}
public double getProbability() {
return probability;
}
public void setProbability(double probability) {
this.probability = probability;
}
@Override
public int hashCode() {
return Objects.hash(partitionFieldName, partitionFieldValue, probability, initialRecordScore, recordScore);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof PartitionScore == false) {
return false;
}
PartitionScore that = (PartitionScore) other;
// id is excluded from the test as it is generated by the datastore
return Objects.equals(this.partitionFieldValue, that.partitionFieldValue)
&& Objects.equals(this.partitionFieldName, that.partitionFieldName) && (this.probability == that.probability)
&& (this.initialRecordScore == that.initialRecordScore) && (this.recordScore == that.recordScore);
}
}

View File

@ -81,7 +81,6 @@ public final class ReservedFieldNames {
Bucket.EVENT_COUNT.getPreferredName(), Bucket.EVENT_COUNT.getPreferredName(),
Bucket.INITIAL_ANOMALY_SCORE.getPreferredName(), Bucket.INITIAL_ANOMALY_SCORE.getPreferredName(),
Bucket.PROCESSING_TIME_MS.getPreferredName(), Bucket.PROCESSING_TIME_MS.getPreferredName(),
Bucket.PARTITION_SCORES.getPreferredName(),
Bucket.SCHEDULED_EVENTS.getPreferredName(), Bucket.SCHEDULED_EVENTS.getPreferredName(),
BucketInfluencer.INITIAL_ANOMALY_SCORE.getPreferredName(), BucketInfluencer.ANOMALY_SCORE.getPreferredName(), BucketInfluencer.INITIAL_ANOMALY_SCORE.getPreferredName(), BucketInfluencer.ANOMALY_SCORE.getPreferredName(),

View File

@ -13,10 +13,7 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.core.security.authc.support.CharArrays; import org.elasticsearch.xpack.core.security.authc.support.CharArrays;
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
import org.elasticsearch.xpack.core.security.support.Validation;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -34,6 +31,7 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
private String email; private String email;
private Map<String, Object> metadata; private Map<String, Object> metadata;
private char[] passwordHash; private char[] passwordHash;
private char[] password;
private boolean enabled = true; private boolean enabled = true;
private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE; private RefreshPolicy refreshPolicy = RefreshPolicy.IMMEDIATE;
@ -45,18 +43,15 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
ActionRequestValidationException validationException = null; ActionRequestValidationException validationException = null;
if (username == null) { if (username == null) {
validationException = addValidationError("user is missing", validationException); validationException = addValidationError("user is missing", validationException);
} else {
Validation.Error error = Validation.Users.validateUsername(username, false, Settings.EMPTY);
if (error != null) {
validationException = addValidationError(error.toString(), validationException);
}
} }
if (roles == null) { if (roles == null) {
validationException = addValidationError("roles are missing", validationException); validationException = addValidationError("roles are missing", validationException);
} }
if (metadata != null && MetadataUtils.containsReservedMetadata(metadata)) { if (metadata != null && metadata.keySet().stream().anyMatch(s -> s.startsWith("_"))) {
validationException = addValidationError("metadata keys may not start with [" + MetadataUtils.RESERVED_PREFIX + "]", validationException = addValidationError("metadata keys may not start with [_]", validationException);
validationException); }
if (password != null && passwordHash != null) {
validationException = addValidationError("only one of [password, passwordHash] can be provided", validationException);
} }
// we do not check for a password hash here since it is possible that the user exists and we don't want to update the password // we do not check for a password hash here since it is possible that the user exists and we don't want to update the password
return validationException; return validationException;
@ -86,8 +81,12 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
this.passwordHash = passwordHash; this.passwordHash = passwordHash;
} }
public boolean enabled() { public void enabled(boolean enabled) {
return enabled; this.enabled = enabled;
}
public void password(@Nullable char[] password) {
this.password = password;
} }
/** /**
@ -130,8 +129,8 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
return passwordHash; return passwordHash;
} }
public void enabled(boolean enabled) { public boolean enabled() {
this.enabled = enabled; return enabled;
} }
@Override @Override
@ -139,16 +138,16 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
return new String[] { username }; return new String[] { username };
} }
@Nullable
public char[] password() {
return password;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
username = in.readString(); username = in.readString();
BytesReference passwordHashRef = in.readBytesReference(); passwordHash = readCharArrayFromStream(in);
if (passwordHashRef == BytesArray.EMPTY) {
passwordHash = null;
} else {
passwordHash = CharArrays.utf8BytesToChars(BytesReference.toBytes(passwordHashRef));
}
roles = in.readStringArray(); roles = in.readStringArray();
fullName = in.readOptionalString(); fullName = in.readOptionalString();
email = in.readOptionalString(); email = in.readOptionalString();
@ -161,13 +160,10 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out); super.writeTo(out);
out.writeString(username); out.writeString(username);
BytesReference passwordHashRef; writeCharArrayToStream(out, passwordHash);
if (passwordHash == null) { if (password != null) {
passwordHashRef = null; throw new IllegalStateException("password cannot be serialized. it is only used for HL rest");
} else {
passwordHashRef = new BytesArray(CharArrays.toUtf8Bytes(passwordHash));
} }
out.writeBytesReference(passwordHashRef);
out.writeStringArray(roles); out.writeStringArray(roles);
out.writeOptionalString(fullName); out.writeOptionalString(fullName);
out.writeOptionalString(email); out.writeOptionalString(email);
@ -180,4 +176,23 @@ public class PutUserRequest extends ActionRequest implements UserRequest, WriteR
refreshPolicy.writeTo(out); refreshPolicy.writeTo(out);
out.writeBoolean(enabled); out.writeBoolean(enabled);
} }
private static char[] readCharArrayFromStream(StreamInput in) throws IOException {
BytesReference charBytesRef = in.readBytesReference();
if (charBytesRef == BytesArray.EMPTY) {
return null;
} else {
return CharArrays.utf8BytesToChars(BytesReference.toBytes(charBytesRef));
}
}
private static void writeCharArrayToStream(StreamOutput out, char[] chars) throws IOException {
final BytesReference charBytesRef;
if (chars == null) {
charBytesRef = null;
} else {
charBytesRef = new BytesArray(CharArrays.toUtf8Bytes(chars));
}
out.writeBytesReference(charBytesRef);
}
} }

View File

@ -5,9 +5,14 @@
*/ */
package org.elasticsearch.xpack.core; package org.elasticsearch.xpack.core;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import javax.crypto.Cipher; import javax.crypto.Cipher;
import javax.crypto.SecretKeyFactory;
import java.security.NoSuchAlgorithmException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
@ -25,4 +30,30 @@ public class XPackSettingsTests extends ESTestCase {
assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_RSA_WITH_AES_256_CBC_SHA"))); assertThat(XPackSettings.DEFAULT_CIPHERS, not(hasItem("TLS_RSA_WITH_AES_256_CBC_SHA")));
} }
} }
public void testPasswordHashingAlgorithmSettingValidation() {
final boolean isPBKDF2Available = isSecretkeyFactoryAlgoAvailable("PBKDF2WithHMACSHA512");
final String pbkdf2Algo = randomFrom("PBKDF2_10000", "PBKDF2");
final Settings settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), pbkdf2Algo).build();
if (isPBKDF2Available) {
assertEquals(pbkdf2Algo, XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings));
} else {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> XPackSettings.PASSWORD_HASHING_ALGORITHM.get(settings));
assertThat(e.getMessage(), containsString("Support for PBKDF2WithHMACSHA512 must be available"));
}
final String bcryptAlgo = randomFrom("BCRYPT", "BCRYPT11");
assertEquals(bcryptAlgo, XPackSettings.PASSWORD_HASHING_ALGORITHM.get(
Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), bcryptAlgo).build()));
}
private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) {
try {
SecretKeyFactory.getInstance(algorithmId);
return true;
} catch (NoSuchAlgorithmException e) {
return false;
}
}
} }

View File

@ -11,7 +11,6 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer; import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
@ -53,15 +52,6 @@ public class GetBucketActionResponseTests extends AbstractStreamableTestCase<Res
if (randomBoolean()) { if (randomBoolean()) {
bucket.setInterim(randomBoolean()); bucket.setInterim(randomBoolean());
} }
if (randomBoolean()) {
int size = randomInt(10);
List<PartitionScore> partitionScores = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
partitionScores.add(new PartitionScore(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20),
randomDouble(), randomDouble(), randomDouble()));
}
bucket.setPartitionScores(partitionScores);
}
if (randomBoolean()) { if (randomBoolean()) {
bucket.setProcessingTimeMs(randomLong()); bucket.setProcessingTimeMs(randomLong());
} }

View File

@ -97,11 +97,8 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
builder.setResultFinalizationWindow(randomNonNegativeLong()); builder.setResultFinalizationWindow(randomNonNegativeLong());
} }
boolean usePerPartitionNormalisation = randomBoolean(); builder.setInfluencers(Arrays.asList(generateRandomStringArray(10, 10, false)));
builder.setUsePerPartitionNormalization(usePerPartitionNormalisation);
if (!usePerPartitionNormalisation) { // influencers can't be used with per partition normalisation
builder.setInfluencers(Arrays.asList(generateRandomStringArray(10, 10, false)));
}
return builder; return builder;
} }
@ -690,40 +687,15 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY), e.getMessage()); assertEquals(Messages.getMessage(Messages.JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY), e.getMessage());
} }
public void testCheckDetectorsHavePartitionFields() {
AnalysisConfig.Builder config = createValidConfig();
config.setUsePerPartitionNormalization(true);
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_REQUIRES_PARTITION_FIELD), e.getMessage());
}
public void testCheckDetectorsHavePartitionFields_doesntThrowWhenValid() { public void testCheckDetectorsHavePartitionFields_doesntThrowWhenValid() {
AnalysisConfig.Builder config = createValidConfig(); AnalysisConfig.Builder config = createValidConfig();
Detector.Builder builder = new Detector.Builder(config.build().getDetectors().get(0)); Detector.Builder builder = new Detector.Builder(config.build().getDetectors().get(0));
builder.setPartitionFieldName("pField"); builder.setPartitionFieldName("pField");
config.build().getDetectors().set(0, builder.build()); config.build().getDetectors().set(0, builder.build());
config.setUsePerPartitionNormalization(true);
config.build(); config.build();
} }
public void testCheckNoInfluencersAreSet() {
AnalysisConfig.Builder config = createValidConfig();
Detector.Builder builder = new Detector.Builder(config.build().getDetectors().get(0));
builder.setPartitionFieldName("pField");
config.build().getDetectors().set(0, builder.build());
config.setInfluencers(Arrays.asList("inf1", "inf2"));
config.setUsePerPartitionNormalization(true);
ElasticsearchException e = ESTestCase.expectThrows(ElasticsearchException.class, config::build);
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_PER_PARTITION_NORMALIZATION_CANNOT_USE_INFLUENCERS), e.getMessage());
}
public void testVerify_GivenCategorizationFiltersContainInvalidRegex() { public void testVerify_GivenCategorizationFiltersContainInvalidRegex() {
AnalysisConfig.Builder config = createValidCategorizationConfig(); AnalysisConfig.Builder config = createValidCategorizationConfig();
config.setCategorizationFilters(Arrays.asList("foo", "(")); config.setCategorizationFilters(Arrays.asList("foo", "("));
@ -756,7 +728,7 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
@Override @Override
protected AnalysisConfig mutateInstance(AnalysisConfig instance) { protected AnalysisConfig mutateInstance(AnalysisConfig instance) {
AnalysisConfig.Builder builder = new AnalysisConfig.Builder(instance); AnalysisConfig.Builder builder = new AnalysisConfig.Builder(instance);
switch (between(0, 11)) { switch (between(0, 10)) {
case 0: case 0:
List<Detector> detectors = new ArrayList<>(instance.getDetectors()); List<Detector> detectors = new ArrayList<>(instance.getDetectors());
Detector.Builder detector = new Detector.Builder(); Detector.Builder detector = new Detector.Builder();
@ -832,7 +804,6 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
List<String> influencers = new ArrayList<>(instance.getInfluencers()); List<String> influencers = new ArrayList<>(instance.getInfluencers());
influencers.add(randomAlphaOfLengthBetween(5, 10)); influencers.add(randomAlphaOfLengthBetween(5, 10));
builder.setInfluencers(influencers); builder.setInfluencers(influencers);
builder.setUsePerPartitionNormalization(false);
break; break;
case 8: case 8:
if (instance.getOverlappingBuckets() == null) { if (instance.getOverlappingBuckets() == null) {
@ -855,13 +826,6 @@ public class AnalysisConfigTests extends AbstractSerializingTestCase<AnalysisCon
builder.setMultivariateByFields(instance.getMultivariateByFields() == false); builder.setMultivariateByFields(instance.getMultivariateByFields() == false);
} }
break; break;
case 11:
boolean usePerPartitionNormalization = instance.getUsePerPartitionNormalization() == false;
builder.setUsePerPartitionNormalization(usePerPartitionNormalization);
if (usePerPartitionNormalization) {
builder.setInfluencers(Collections.emptyList());
}
break;
default: default:
throw new AssertionError("Illegal randomisation branch"); throw new AssertionError("Illegal randomisation branch");
} }

View File

@ -384,8 +384,8 @@ public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlu
autodetectProcessFactory = (job, autodetectParams, executorService, onProcessCrash) -> autodetectProcessFactory = (job, autodetectParams, executorService, onProcessCrash) ->
new BlackHoleAutodetectProcess(job.getId()); new BlackHoleAutodetectProcess(job.getId());
// factor of 1.0 makes renormalization a no-op // factor of 1.0 makes renormalization a no-op
normalizerProcessFactory = (jobId, quantilesState, bucketSpan, perPartitionNormalization, normalizerProcessFactory = (jobId, quantilesState, bucketSpan, executorService) ->
executorService) -> new MultiplyingNormalizerProcess(settings, 1.0); new MultiplyingNormalizerProcess(settings, 1.0);
} }
NormalizerFactory normalizerFactory = new NormalizerFactory(normalizerProcessFactory, NormalizerFactory normalizerFactory = new NormalizerFactory(normalizerProcessFactory,
threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME)); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME));

View File

@ -61,7 +61,6 @@ public class AutodetectBuilder {
public static final String LENGTH_ENCODED_INPUT_ARG = "--lengthEncodedInput"; public static final String LENGTH_ENCODED_INPUT_ARG = "--lengthEncodedInput";
public static final String MODEL_CONFIG_ARG = "--modelconfig="; public static final String MODEL_CONFIG_ARG = "--modelconfig=";
public static final String QUANTILES_STATE_PATH_ARG = "--quantilesState="; public static final String QUANTILES_STATE_PATH_ARG = "--quantilesState=";
public static final String PER_PARTITION_NORMALIZATION = "--perPartitionNormalization";
private static final String CONF_EXTENSION = ".conf"; private static final String CONF_EXTENSION = ".conf";
static final String JOB_ID_ARG = "--jobid="; static final String JOB_ID_ARG = "--jobid=";
@ -207,10 +206,6 @@ public class AutodetectBuilder {
if (Boolean.TRUE.equals(analysisConfig.getMultivariateByFields())) { if (Boolean.TRUE.equals(analysisConfig.getMultivariateByFields())) {
command.add(MULTIVARIATE_BY_FIELDS_ARG); command.add(MULTIVARIATE_BY_FIELDS_ARG);
} }
if (analysisConfig.getUsePerPartitionNormalization()) {
command.add(PER_PARTITION_NORMALIZATION);
}
} }
// Input is always length encoded // Input is always length encoded

View File

@ -499,7 +499,7 @@ public class AutodetectProcessManager extends AbstractComponent {
new JobRenormalizedResultsPersister(job.getId(), settings, client), normalizerFactory); new JobRenormalizedResultsPersister(job.getId(), settings, client), normalizerFactory);
ExecutorService renormalizerExecutorService = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); ExecutorService renormalizerExecutorService = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME);
Renormalizer renormalizer = new ShortCircuitingRenormalizer(jobId, scoresUpdater, Renormalizer renormalizer = new ShortCircuitingRenormalizer(jobId, scoresUpdater,
renormalizerExecutorService, job.getAnalysisConfig().getUsePerPartitionNormalization()); renormalizerExecutorService);
AutodetectProcess process = autodetectProcessFactory.createAutodetectProcess(job, autodetectParams, autoDetectExecutorService, AutodetectProcess process = autodetectProcessFactory.createAutodetectProcess(job, autodetectParams, autoDetectExecutorService,
onProcessCrash(jobTask)); onProcessCrash(jobTask));

View File

@ -16,12 +16,10 @@ import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable.ChildType.BUCKET_INFLUENCER; import static org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable.ChildType.BUCKET_INFLUENCER;
import static org.elasticsearch.xpack.ml.job.process.normalizer.Normalizable.ChildType.PARTITION_SCORE;
public class BucketNormalizable extends Normalizable { public class BucketNormalizable extends Normalizable {
private static final List<ChildType> CHILD_TYPES = Arrays.asList(BUCKET_INFLUENCER, PARTITION_SCORE); private static final List<ChildType> CHILD_TYPES = Arrays.asList(BUCKET_INFLUENCER);
private final Bucket bucket; private final Bucket bucket;
@ -117,11 +115,6 @@ public class BucketNormalizable extends Normalizable {
.map(bi -> new BucketInfluencerNormalizable(bi, getOriginatingIndex())) .map(bi -> new BucketInfluencerNormalizable(bi, getOriginatingIndex()))
.collect(Collectors.toList())); .collect(Collectors.toList()));
break; break;
case PARTITION_SCORE:
children.addAll(bucket.getPartitionScores().stream()
.map(ps -> new PartitionScoreNormalizable(ps, getOriginatingIndex()))
.collect(Collectors.toList()));
break;
default: default:
throw new IllegalArgumentException("Invalid type: " + type); throw new IllegalArgumentException("Invalid type: " + type);
} }
@ -135,8 +128,6 @@ public class BucketNormalizable extends Normalizable {
double oldScore = bucket.getAnomalyScore(); double oldScore = bucket.getAnomalyScore();
bucket.setAnomalyScore(maxScore); bucket.setAnomalyScore(maxScore);
return maxScore != oldScore; return maxScore != oldScore;
case PARTITION_SCORE:
return false;
default: default:
throw new IllegalArgumentException("Invalid type: " + childrenType); throw new IllegalArgumentException("Invalid type: " + childrenType);
} }

View File

@ -38,20 +38,19 @@ public class NativeNormalizerProcessFactory implements NormalizerProcessFactory
@Override @Override
public NormalizerProcess createNormalizerProcess(String jobId, String quantilesState, Integer bucketSpan, public NormalizerProcess createNormalizerProcess(String jobId, String quantilesState, Integer bucketSpan,
boolean perPartitionNormalization, ExecutorService executorService) { ExecutorService executorService) {
ProcessPipes processPipes = new ProcessPipes(env, NAMED_PIPE_HELPER, NormalizerBuilder.NORMALIZE, jobId, ProcessPipes processPipes = new ProcessPipes(env, NAMED_PIPE_HELPER, NormalizerBuilder.NORMALIZE, jobId,
true, false, true, true, false, false); true, false, true, true, false, false);
createNativeProcess(jobId, quantilesState, processPipes, bucketSpan, perPartitionNormalization); createNativeProcess(jobId, quantilesState, processPipes, bucketSpan);
return new NativeNormalizerProcess(jobId, settings, processPipes.getLogStream().get(), return new NativeNormalizerProcess(jobId, settings, processPipes.getLogStream().get(),
processPipes.getProcessInStream().get(), processPipes.getProcessOutStream().get(), executorService); processPipes.getProcessInStream().get(), processPipes.getProcessOutStream().get(), executorService);
} }
private void createNativeProcess(String jobId, String quantilesState, ProcessPipes processPipes, Integer bucketSpan, private void createNativeProcess(String jobId, String quantilesState, ProcessPipes processPipes, Integer bucketSpan) {
boolean perPartitionNormalization) {
try { try {
List<String> command = new NormalizerBuilder(env, jobId, quantilesState, bucketSpan, perPartitionNormalization).build(); List<String> command = new NormalizerBuilder(env, jobId, quantilesState, bucketSpan).build();
processPipes.addArgs(command); processPipes.addArgs(command);
nativeController.startProcess(command); nativeController.startProcess(command);
processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT); processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT);

View File

@ -11,7 +11,7 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
public abstract class Normalizable implements ToXContentObject { public abstract class Normalizable implements ToXContentObject {
public enum ChildType {BUCKET_INFLUENCER, RECORD, PARTITION_SCORE}; public enum ChildType {BUCKET_INFLUENCER, RECORD};
private final String indexName; private final String indexName;
private boolean hadBigNormalizedUpdate; private boolean hadBigNormalizedUpdate;

View File

@ -46,15 +46,14 @@ public class Normalizer {
* and normalizes the given results. * and normalizes the given results.
* *
* @param bucketSpan If <code>null</code> the default is used * @param bucketSpan If <code>null</code> the default is used
* @param perPartitionNormalization Is normalization per partition (rather than per job)?
* @param results Will be updated with the normalized results * @param results Will be updated with the normalized results
* @param quantilesState The state to be used to seed the system change * @param quantilesState The state to be used to seed the system change
* normalizer * normalizer
*/ */
public void normalize(Integer bucketSpan, boolean perPartitionNormalization, public void normalize(Integer bucketSpan,
List<? extends Normalizable> results, String quantilesState) { List<? extends Normalizable> results, String quantilesState) {
NormalizerProcess process = processFactory.createNormalizerProcess(jobId, quantilesState, bucketSpan, NormalizerProcess process = processFactory.createNormalizerProcess(jobId, quantilesState, bucketSpan,
perPartitionNormalization, executorService); executorService);
NormalizerResultHandler resultsHandler = process.createNormalizedResultsHandler(); NormalizerResultHandler resultsHandler = process.createNormalizedResultsHandler();
Future<?> resultsHandlerFuture = executorService.submit(() -> { Future<?> resultsHandlerFuture = executorService.submit(() -> {
try { try {

View File

@ -29,15 +29,12 @@ public class NormalizerBuilder {
private final String jobId; private final String jobId;
private final String quantilesState; private final String quantilesState;
private final Integer bucketSpan; private final Integer bucketSpan;
private final boolean perPartitionNormalization;
public NormalizerBuilder(Environment env, String jobId, String quantilesState, Integer bucketSpan, public NormalizerBuilder(Environment env, String jobId, String quantilesState, Integer bucketSpan) {
boolean perPartitionNormalization) {
this.env = env; this.env = env;
this.jobId = jobId; this.jobId = jobId;
this.quantilesState = quantilesState; this.quantilesState = quantilesState;
this.bucketSpan = bucketSpan; this.bucketSpan = bucketSpan;
this.perPartitionNormalization = perPartitionNormalization;
} }
/** /**
@ -49,9 +46,6 @@ public class NormalizerBuilder {
command.add(NORMALIZE_PATH); command.add(NORMALIZE_PATH);
addIfNotNull(bucketSpan, AutodetectBuilder.BUCKET_SPAN_ARG, command); addIfNotNull(bucketSpan, AutodetectBuilder.BUCKET_SPAN_ARG, command);
command.add(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG); command.add(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG);
if (perPartitionNormalization) {
command.add(AutodetectBuilder.PER_PARTITION_NORMALIZATION);
}
if (quantilesState != null) { if (quantilesState != null) {
Path quantilesStateFilePath = AutodetectBuilder.writeNormalizerInitState(jobId, quantilesState, env); Path quantilesStateFilePath = AutodetectBuilder.writeNormalizerInitState(jobId, quantilesState, env);

View File

@ -17,6 +17,5 @@ public interface NormalizerProcessFactory {
* @param executorService Executor service used to start the async tasks a job needs to operate the analytical process * @param executorService Executor service used to start the async tasks a job needs to operate the analytical process
* @return The process * @return The process
*/ */
NormalizerProcess createNormalizerProcess(String jobId, String quantilesState, Integer bucketSpan, boolean perPartitionNormalization, NormalizerProcess createNormalizerProcess(String jobId, String quantilesState, Integer bucketSpan, ExecutorService executorService);
ExecutorService executorService);
} }

View File

@ -1,87 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.job.process.normalizer;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import java.io.IOException;
import java.util.Objects;
public class PartitionScoreNormalizable extends AbstractLeafNormalizable {
private final PartitionScore score;
public PartitionScoreNormalizable(PartitionScore score, String indexName) {
super(indexName);
this.score = Objects.requireNonNull(score);
}
@Override
public String getId() {
throw new UnsupportedOperationException("PartitionScore has no ID as it should not be persisted outside of the owning bucket");
}
@Override
public Level getLevel() {
return Level.PARTITION;
}
@Override
public String getPartitionFieldName() {
return score.getPartitionFieldName();
}
@Override
public String getPartitionFieldValue() {
return score.getPartitionFieldValue();
}
@Override
public String getPersonFieldName() {
return null;
}
@Override
public String getPersonFieldValue() {
return null;
}
@Override
public String getFunctionName() {
return null;
}
@Override
public String getValueFieldName() {
return null;
}
@Override
public double getProbability() {
return score.getProbability();
}
@Override
public double getNormalizedScore() {
return score.getRecordScore();
}
@Override
public void setNormalizedScore(double normalizedScore) {
score.setRecordScore(normalizedScore);
}
@Override
public void setParentScore(double parentScore) {
// Do nothing as it is not holding the parent score.
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return score.toXContent(builder, params);
}
}

View File

@ -79,12 +79,12 @@ public class ScoresUpdater {
* Update the anomaly score field on all previously persisted buckets * Update the anomaly score field on all previously persisted buckets
* and all contained records * and all contained records
*/ */
public void update(String quantilesState, long endBucketEpochMs, long windowExtensionMs, boolean perPartitionNormalization) { public void update(String quantilesState, long endBucketEpochMs, long windowExtensionMs) {
Normalizer normalizer = normalizerFactory.create(jobId); Normalizer normalizer = normalizerFactory.create(jobId);
int[] counts = {0, 0}; int[] counts = {0, 0};
updateBuckets(normalizer, quantilesState, endBucketEpochMs, windowExtensionMs, counts, perPartitionNormalization); updateBuckets(normalizer, quantilesState, endBucketEpochMs, windowExtensionMs, counts);
updateRecords(normalizer, quantilesState, endBucketEpochMs, windowExtensionMs, counts, perPartitionNormalization); updateRecords(normalizer, quantilesState, endBucketEpochMs, windowExtensionMs, counts);
updateInfluencers(normalizer, quantilesState, endBucketEpochMs, windowExtensionMs, counts, perPartitionNormalization); updateInfluencers(normalizer, quantilesState, endBucketEpochMs, windowExtensionMs, counts);
// The updates will have been persisted in batches throughout the renormalization // The updates will have been persisted in batches throughout the renormalization
// process - this call just catches any leftovers // process - this call just catches any leftovers
@ -94,7 +94,7 @@ public class ScoresUpdater {
} }
private void updateBuckets(Normalizer normalizer, String quantilesState, long endBucketEpochMs, private void updateBuckets(Normalizer normalizer, String quantilesState, long endBucketEpochMs,
long windowExtensionMs, int[] counts, boolean perPartitionNormalization) { long windowExtensionMs, int[] counts) {
BatchedDocumentsIterator<Result<Bucket>> bucketsIterator = BatchedDocumentsIterator<Result<Bucket>> bucketsIterator =
jobResultsProvider.newBatchedBucketsIterator(jobId) jobResultsProvider.newBatchedBucketsIterator(jobId)
.timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs) .timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs)
@ -114,14 +114,14 @@ public class ScoresUpdater {
if (current.result.isNormalizable()) { if (current.result.isNormalizable()) {
bucketsToRenormalize.add(new BucketNormalizable(current.result, current.index)); bucketsToRenormalize.add(new BucketNormalizable(current.result, current.index));
if (bucketsToRenormalize.size() >= TARGET_BUCKETS_TO_RENORMALIZE) { if (bucketsToRenormalize.size() >= TARGET_BUCKETS_TO_RENORMALIZE) {
normalizeBuckets(normalizer, bucketsToRenormalize, quantilesState, counts, perPartitionNormalization); normalizeBuckets(normalizer, bucketsToRenormalize, quantilesState, counts);
bucketsToRenormalize.clear(); bucketsToRenormalize.clear();
} }
} }
} }
} }
if (!bucketsToRenormalize.isEmpty()) { if (!bucketsToRenormalize.isEmpty()) {
normalizeBuckets(normalizer, bucketsToRenormalize, quantilesState, counts, perPartitionNormalization); normalizeBuckets(normalizer, bucketsToRenormalize, quantilesState, counts);
} }
} }
@ -130,8 +130,8 @@ public class ScoresUpdater {
} }
private void normalizeBuckets(Normalizer normalizer, List<BucketNormalizable> normalizableBuckets, private void normalizeBuckets(Normalizer normalizer, List<BucketNormalizable> normalizableBuckets,
String quantilesState, int[] counts, boolean perPartitionNormalization) { String quantilesState, int[] counts) {
normalizer.normalize(bucketSpan, perPartitionNormalization, normalizableBuckets, quantilesState); normalizer.normalize(bucketSpan, normalizableBuckets, quantilesState);
for (BucketNormalizable bucketNormalizable : normalizableBuckets) { for (BucketNormalizable bucketNormalizable : normalizableBuckets) {
if (bucketNormalizable.hadBigNormalizedUpdate()) { if (bucketNormalizable.hadBigNormalizedUpdate()) {
@ -144,7 +144,7 @@ public class ScoresUpdater {
} }
private void updateRecords(Normalizer normalizer, String quantilesState, long endBucketEpochMs, private void updateRecords(Normalizer normalizer, String quantilesState, long endBucketEpochMs,
long windowExtensionMs, int[] counts, boolean perPartitionNormalization) { long windowExtensionMs, int[] counts) {
BatchedDocumentsIterator<Result<AnomalyRecord>> recordsIterator = jobResultsProvider.newBatchedRecordsIterator(jobId) BatchedDocumentsIterator<Result<AnomalyRecord>> recordsIterator = jobResultsProvider.newBatchedRecordsIterator(jobId)
.timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs) .timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs)
.includeInterim(false); .includeInterim(false);
@ -160,14 +160,14 @@ public class ScoresUpdater {
List<Normalizable> asNormalizables = records.stream() List<Normalizable> asNormalizables = records.stream()
.map(recordResultIndex -> new RecordNormalizable(recordResultIndex.result, recordResultIndex.index)) .map(recordResultIndex -> new RecordNormalizable(recordResultIndex.result, recordResultIndex.index))
.collect(Collectors.toList()); .collect(Collectors.toList());
normalizer.normalize(bucketSpan, perPartitionNormalization, asNormalizables, quantilesState); normalizer.normalize(bucketSpan, asNormalizables, quantilesState);
persistChanged(counts, asNormalizables); persistChanged(counts, asNormalizables);
} }
} }
private void updateInfluencers(Normalizer normalizer, String quantilesState, long endBucketEpochMs, private void updateInfluencers(Normalizer normalizer, String quantilesState, long endBucketEpochMs,
long windowExtensionMs, int[] counts, boolean perPartitionNormalization) { long windowExtensionMs, int[] counts) {
BatchedDocumentsIterator<Result<Influencer>> influencersIterator = jobResultsProvider.newBatchedInfluencersIterator(jobId) BatchedDocumentsIterator<Result<Influencer>> influencersIterator = jobResultsProvider.newBatchedInfluencersIterator(jobId)
.timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs) .timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs)
.includeInterim(false); .includeInterim(false);
@ -183,7 +183,7 @@ public class ScoresUpdater {
List<Normalizable> asNormalizables = influencers.stream() List<Normalizable> asNormalizables = influencers.stream()
.map(influencerResultIndex -> new InfluencerNormalizable(influencerResultIndex.result, influencerResultIndex.index)) .map(influencerResultIndex -> new InfluencerNormalizable(influencerResultIndex.result, influencerResultIndex.index))
.collect(Collectors.toList()); .collect(Collectors.toList());
normalizer.normalize(bucketSpan, perPartitionNormalization, asNormalizables, quantilesState); normalizer.normalize(bucketSpan, asNormalizables, quantilesState);
persistChanged(counts, asNormalizables); persistChanged(counts, asNormalizables);
} }

View File

@ -26,7 +26,6 @@ public class ShortCircuitingRenormalizer implements Renormalizer {
private final String jobId; private final String jobId;
private final ScoresUpdater scoresUpdater; private final ScoresUpdater scoresUpdater;
private final ExecutorService executorService; private final ExecutorService executorService;
private final boolean isPerPartitionNormalization;
private final Deque<QuantilesWithLatch> quantilesDeque = new ConcurrentLinkedDeque<>(); private final Deque<QuantilesWithLatch> quantilesDeque = new ConcurrentLinkedDeque<>();
private final Deque<CountDownLatch> latchDeque = new ConcurrentLinkedDeque<>(); private final Deque<CountDownLatch> latchDeque = new ConcurrentLinkedDeque<>();
/** /**
@ -34,12 +33,10 @@ public class ShortCircuitingRenormalizer implements Renormalizer {
*/ */
private final Semaphore semaphore = new Semaphore(1); private final Semaphore semaphore = new Semaphore(1);
public ShortCircuitingRenormalizer(String jobId, ScoresUpdater scoresUpdater, ExecutorService executorService, public ShortCircuitingRenormalizer(String jobId, ScoresUpdater scoresUpdater, ExecutorService executorService) {
boolean isPerPartitionNormalization) {
this.jobId = jobId; this.jobId = jobId;
this.scoresUpdater = scoresUpdater; this.scoresUpdater = scoresUpdater;
this.executorService = executorService; this.executorService = executorService;
this.isPerPartitionNormalization = isPerPartitionNormalization;
} }
@Override @Override
@ -161,8 +158,7 @@ public class ShortCircuitingRenormalizer implements Renormalizer {
jobId, latestBucketTimeMs, earliestBucketTimeMs); jobId, latestBucketTimeMs, earliestBucketTimeMs);
windowExtensionMs = 0; windowExtensionMs = 0;
} }
scoresUpdater.update(latestQuantiles.getQuantileState(), latestBucketTimeMs, windowExtensionMs, scoresUpdater.update(latestQuantiles.getQuantileState(), latestBucketTimeMs, windowExtensionMs);
isPerPartitionNormalization);
latch.countDown(); latch.countDown();
latch = null; latch = null;
} }

View File

@ -56,7 +56,6 @@ public class AutodetectBuilderTests extends ESTestCase {
acBuilder.setSummaryCountFieldName("summaryField"); acBuilder.setSummaryCountFieldName("summaryField");
acBuilder.setOverlappingBuckets(true); acBuilder.setOverlappingBuckets(true);
acBuilder.setMultivariateByFields(true); acBuilder.setMultivariateByFields(true);
acBuilder.setUsePerPartitionNormalization(true);
job.setAnalysisConfig(acBuilder); job.setAnalysisConfig(acBuilder);
DataDescription.Builder dd = new DataDescription.Builder(); DataDescription.Builder dd = new DataDescription.Builder();
@ -66,7 +65,7 @@ public class AutodetectBuilderTests extends ESTestCase {
job.setDataDescription(dd); job.setDataDescription(dd);
List<String> command = autodetectBuilder(job.build()).buildAutodetectCommand(); List<String> command = autodetectBuilder(job.build()).buildAutodetectCommand();
assertEquals(13, command.size()); assertEquals(12, command.size());
assertTrue(command.contains(AutodetectBuilder.AUTODETECT_PATH)); assertTrue(command.contains(AutodetectBuilder.AUTODETECT_PATH));
assertTrue(command.contains(AutodetectBuilder.BUCKET_SPAN_ARG + "120")); assertTrue(command.contains(AutodetectBuilder.BUCKET_SPAN_ARG + "120"));
assertTrue(command.contains(AutodetectBuilder.LATENCY_ARG + "360")); assertTrue(command.contains(AutodetectBuilder.LATENCY_ARG + "360"));
@ -80,8 +79,6 @@ public class AutodetectBuilderTests extends ESTestCase {
assertTrue(command.contains(AutodetectBuilder.TIME_FIELD_ARG + "tf")); assertTrue(command.contains(AutodetectBuilder.TIME_FIELD_ARG + "tf"));
assertTrue(command.contains(AutodetectBuilder.JOB_ID_ARG + "unit-test-job")); assertTrue(command.contains(AutodetectBuilder.JOB_ID_ARG + "unit-test-job"));
assertTrue(command.contains(AutodetectBuilder.PER_PARTITION_NORMALIZATION));
int expectedPersistInterval = 10800 + AutodetectBuilder.calculateStaggeringInterval(job.getId()); int expectedPersistInterval = 10800 + AutodetectBuilder.calculateStaggeringInterval(job.getId());
assertTrue(command.contains(AutodetectBuilder.PERSIST_INTERVAL_ARG + expectedPersistInterval)); assertTrue(command.contains(AutodetectBuilder.PERSIST_INTERVAL_ARG + expectedPersistInterval));
int expectedMaxQuantileInterval = 21600 + AutodetectBuilder.calculateStaggeringInterval(job.getId()); int expectedMaxQuantileInterval = 21600 + AutodetectBuilder.calculateStaggeringInterval(job.getId());
@ -116,4 +113,4 @@ public class AutodetectBuilderTests extends ESTestCase {
private AutodetectBuilder autodetectBuilder(Job job) { private AutodetectBuilder autodetectBuilder(Job job) {
return new AutodetectBuilder(job, filesToDelete, logger, env, settings, nativeController, processPipes); return new AutodetectBuilder(job, filesToDelete, logger, env, settings, nativeController, processPipes);
} }
} }

View File

@ -172,25 +172,6 @@ public class AutoDetectResultProcessorTests extends ESTestCase {
verifyNoMoreInteractions(persister); verifyNoMoreInteractions(persister);
} }
public void testProcessResult_records_isPerPartitionNormalization() {
JobResultsPersister.Builder bulkBuilder = mock(JobResultsPersister.Builder.class);
when(persister.bulkPersisterBuilder(JOB_ID)).thenReturn(bulkBuilder);
AutoDetectResultProcessor.Context context = new AutoDetectResultProcessor.Context("foo", bulkBuilder);
context.deleteInterimRequired = false;
AutodetectResult result = mock(AutodetectResult.class);
AnomalyRecord record1 = new AnomalyRecord("foo", new Date(123), 123);
record1.setPartitionFieldValue("pValue");
AnomalyRecord record2 = new AnomalyRecord("foo", new Date(123), 123);
record2.setPartitionFieldValue("pValue");
List<AnomalyRecord> records = Arrays.asList(record1, record2);
when(result.getRecords()).thenReturn(records);
processorUnderTest.processResult(context, result);
verify(bulkBuilder, times(1)).persistRecords(records);
verify(bulkBuilder, never()).executeRequest();
verifyNoMoreInteractions(persister);
}
public void testProcessResult_influencers() { public void testProcessResult_influencers() {
JobResultsPersister.Builder bulkBuilder = mock(JobResultsPersister.Builder.class); JobResultsPersister.Builder bulkBuilder = mock(JobResultsPersister.Builder.class);

View File

@ -9,10 +9,8 @@ import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer; import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import org.junit.Before; import org.junit.Before;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@ -46,11 +44,6 @@ public class BucketNormalizableTests extends ESTestCase {
AnomalyRecord record2 = new AnomalyRecord("foo", bucket.getTimestamp(), 600); AnomalyRecord record2 = new AnomalyRecord("foo", bucket.getTimestamp(), 600);
record2.setRecordScore(2.0); record2.setRecordScore(2.0);
bucket.setRecords(Arrays.asList(record1, record2)); bucket.setRecords(Arrays.asList(record1, record2));
List<PartitionScore> partitionScores = new ArrayList<>();
partitionScores.add(new PartitionScore("pf1", "pv1", 0.3, 0.2, 0.1));
partitionScores.add(new PartitionScore("pf1", "pv2", 0.5, 0.4, 0.01));
bucket.setPartitionScores(partitionScores);
} }
public void testIsContainerOnly() { public void testIsContainerOnly() {
@ -106,15 +99,11 @@ public class BucketNormalizableTests extends ESTestCase {
BucketNormalizable bn = new BucketNormalizable(bucket, INDEX_NAME); BucketNormalizable bn = new BucketNormalizable(bucket, INDEX_NAME);
List<Normalizable> children = bn.getChildren(); List<Normalizable> children = bn.getChildren();
assertEquals(4, children.size()); assertEquals(2, children.size());
assertTrue(children.get(0) instanceof BucketInfluencerNormalizable); assertTrue(children.get(0) instanceof BucketInfluencerNormalizable);
assertEquals(42.0, children.get(0).getNormalizedScore(), EPSILON); assertEquals(42.0, children.get(0).getNormalizedScore(), EPSILON);
assertTrue(children.get(1) instanceof BucketInfluencerNormalizable); assertTrue(children.get(1) instanceof BucketInfluencerNormalizable);
assertEquals(88.0, children.get(1).getNormalizedScore(), EPSILON); assertEquals(88.0, children.get(1).getNormalizedScore(), EPSILON);
assertTrue(children.get(2) instanceof PartitionScoreNormalizable);
assertEquals(0.2, children.get(2).getNormalizedScore(), EPSILON);
assertTrue(children.get(3) instanceof PartitionScoreNormalizable);
assertEquals(0.4, children.get(3).getNormalizedScore(), EPSILON);
} }
public void testGetChildren_GivenTypeBucketInfluencer() { public void testGetChildren_GivenTypeBucketInfluencer() {
@ -132,7 +121,6 @@ public class BucketNormalizableTests extends ESTestCase {
BucketNormalizable bucketNormalizable = new BucketNormalizable(bucket, INDEX_NAME); BucketNormalizable bucketNormalizable = new BucketNormalizable(bucket, INDEX_NAME);
assertTrue(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.BUCKET_INFLUENCER, 95.0)); assertTrue(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.BUCKET_INFLUENCER, 95.0));
assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.PARTITION_SCORE, 42.0));
assertEquals(95.0, bucket.getAnomalyScore(), EPSILON); assertEquals(95.0, bucket.getAnomalyScore(), EPSILON);
} }
@ -141,7 +129,6 @@ public class BucketNormalizableTests extends ESTestCase {
BucketNormalizable bucketNormalizable = new BucketNormalizable(bucket, INDEX_NAME); BucketNormalizable bucketNormalizable = new BucketNormalizable(bucket, INDEX_NAME);
assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.BUCKET_INFLUENCER, 88.0)); assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.BUCKET_INFLUENCER, 88.0));
assertFalse(bucketNormalizable.setMaxChildrenScore(Normalizable.ChildType.PARTITION_SCORE, 2.0));
assertEquals(88.0, bucket.getAnomalyScore(), EPSILON); assertEquals(88.0, bucket.getAnomalyScore(), EPSILON);
} }

View File

@ -21,11 +21,10 @@ public class NormalizerBuilderTests extends ESTestCase {
Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()); Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build());
String jobId = "unit-test-job"; String jobId = "unit-test-job";
List<String> command = new NormalizerBuilder(env, jobId, null, 300, true).build(); List<String> command = new NormalizerBuilder(env, jobId, null, 300).build();
assertEquals(4, command.size()); assertEquals(3, command.size());
assertTrue(command.contains("./normalize")); assertTrue(command.contains("./normalize"));
assertTrue(command.contains(AutodetectBuilder.BUCKET_SPAN_ARG + "300")); assertTrue(command.contains(AutodetectBuilder.BUCKET_SPAN_ARG + "300"));
assertTrue(command.contains(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG)); assertTrue(command.contains(AutodetectBuilder.LENGTH_ENCODED_INPUT_ARG));
assertTrue(command.contains(AutodetectBuilder.PER_PARTITION_NORMALIZATION));
} }
} }

View File

@ -49,7 +49,7 @@ public class NormalizerTests extends ESTestCase {
ExecutorService threadpool = Executors.newScheduledThreadPool(1); ExecutorService threadpool = Executors.newScheduledThreadPool(1);
try { try {
NormalizerProcessFactory processFactory = mock(NormalizerProcessFactory.class); NormalizerProcessFactory processFactory = mock(NormalizerProcessFactory.class);
when(processFactory.createNormalizerProcess(eq(JOB_ID), eq(QUANTILES_STATE), eq(BUCKET_SPAN), eq(false), when(processFactory.createNormalizerProcess(eq(JOB_ID), eq(QUANTILES_STATE), eq(BUCKET_SPAN),
any())).thenReturn(new MultiplyingNormalizerProcess(Settings.EMPTY, FACTOR)); any())).thenReturn(new MultiplyingNormalizerProcess(Settings.EMPTY, FACTOR));
Normalizer normalizer = new Normalizer(JOB_ID, processFactory, threadpool); Normalizer normalizer = new Normalizer(JOB_ID, processFactory, threadpool);
@ -58,7 +58,7 @@ public class NormalizerTests extends ESTestCase {
bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.07, INITIAL_SCORE)); bucket.addBucketInfluencer(createTimeBucketInfluencer(bucket.getTimestamp(), 0.07, INITIAL_SCORE));
List<Normalizable> asNormalizables = Arrays.asList(new BucketNormalizable(bucket, INDEX_NAME)); List<Normalizable> asNormalizables = Arrays.asList(new BucketNormalizable(bucket, INDEX_NAME));
normalizer.normalize(BUCKET_SPAN, false, asNormalizables, QUANTILES_STATE); normalizer.normalize(BUCKET_SPAN, asNormalizables, QUANTILES_STATE);
assertEquals(1, asNormalizables.size()); assertEquals(1, asNormalizables.size());
assertEquals(FACTOR * INITIAL_SCORE, asNormalizables.get(0).getNormalizedScore(), 0.0001); assertEquals(FACTOR * INITIAL_SCORE, asNormalizables.get(0).getNormalizedScore(), 0.0001);

View File

@ -33,7 +33,6 @@ import java.util.Deque;
import java.util.List; import java.util.List;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyList; import static org.mockito.Matchers.anyList;
import static org.mockito.Matchers.anyListOf; import static org.mockito.Matchers.anyListOf;
@ -95,7 +94,7 @@ public class ScoresUpdaterTests extends ESTestCase {
buckets.add(bucket); buckets.add(bucket);
givenProviderReturnsBuckets(buckets); givenProviderReturnsBuckets(buckets);
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(0); verifyNormalizerWasInvoked(0);
verifyNothingWasUpdated(); verifyNothingWasUpdated();
@ -113,7 +112,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsBuckets(buckets); givenProviderReturnsBuckets(buckets);
givenProviderReturnsRecords(new ArrayDeque<>()); givenProviderReturnsRecords(new ArrayDeque<>());
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
verify(jobRenormalizedResultsPersister, times(1)).updateBucket(any()); verify(jobRenormalizedResultsPersister, times(1)).updateBucket(any());
@ -129,7 +128,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsBuckets(buckets); givenProviderReturnsBuckets(buckets);
givenProviderReturnsRecords(new ArrayDeque<>()); givenProviderReturnsRecords(new ArrayDeque<>());
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
verifyBucketWasUpdated(1); verifyBucketWasUpdated(1);
@ -150,7 +149,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsBuckets(buckets); givenProviderReturnsBuckets(buckets);
givenProviderReturnsRecords(records); givenProviderReturnsRecords(records);
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(2); verifyNormalizerWasInvoked(2);
verify(jobRenormalizedResultsPersister, times(1)).updateBucket(any()); verify(jobRenormalizedResultsPersister, times(1)).updateBucket(any());
@ -176,7 +175,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsBuckets(batch1, batch2); givenProviderReturnsBuckets(batch1, batch2);
givenProviderReturnsRecords(new ArrayDeque<>()); givenProviderReturnsRecords(new ArrayDeque<>());
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
@ -212,7 +211,7 @@ public class ScoresUpdaterTests extends ESTestCase {
recordIter.requireIncludeInterim(false); recordIter.requireIncludeInterim(false);
when(jobResultsProvider.newBatchedRecordsIterator(JOB_ID)).thenReturn(recordIter); when(jobResultsProvider.newBatchedRecordsIterator(JOB_ID)).thenReturn(recordIter);
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(2); verifyNormalizerWasInvoked(2);
} }
@ -224,7 +223,7 @@ public class ScoresUpdaterTests extends ESTestCase {
influencers.add(influencer); influencers.add(influencer);
givenProviderReturnsInfluencers(influencers); givenProviderReturnsInfluencers(influencers);
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
verify(jobRenormalizedResultsPersister, times(1)).updateResults(any()); verify(jobRenormalizedResultsPersister, times(1)).updateResults(any());
@ -253,7 +252,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsRecords(records); givenProviderReturnsRecords(records);
scoresUpdater.shutdown(); scoresUpdater.shutdown();
scoresUpdater.update(QUANTILES_STATE, 3600, 0, false); scoresUpdater.update(QUANTILES_STATE, 3600, 0);
verifyNormalizerWasInvoked(0); verifyNormalizerWasInvoked(0);
verify(jobRenormalizedResultsPersister, never()).updateBucket(any()); verify(jobRenormalizedResultsPersister, never()).updateBucket(any());
@ -272,7 +271,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsRecords(new ArrayDeque<>()); givenProviderReturnsRecords(new ArrayDeque<>());
givenProviderReturnsNoInfluencers(); givenProviderReturnsNoInfluencers();
scoresUpdater.update(QUANTILES_STATE, 2595600000L, 0, false); scoresUpdater.update(QUANTILES_STATE, 2595600000L, 0);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
verifyBucketWasUpdated(1); verifyBucketWasUpdated(1);
@ -289,7 +288,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsRecords(new ArrayDeque<>()); givenProviderReturnsRecords(new ArrayDeque<>());
givenProviderReturnsNoInfluencers(); givenProviderReturnsNoInfluencers();
scoresUpdater.update(QUANTILES_STATE, 90000000L, 0, false); scoresUpdater.update(QUANTILES_STATE, 90000000L, 0);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
verifyBucketWasUpdated(1); verifyBucketWasUpdated(1);
@ -307,7 +306,7 @@ public class ScoresUpdaterTests extends ESTestCase {
givenProviderReturnsRecords(new ArrayDeque<>()); givenProviderReturnsRecords(new ArrayDeque<>());
givenProviderReturnsNoInfluencers(); givenProviderReturnsNoInfluencers();
scoresUpdater.update(QUANTILES_STATE, 90000000L, 900000, false); scoresUpdater.update(QUANTILES_STATE, 90000000L, 900000);
verifyNormalizerWasInvoked(1); verifyNormalizerWasInvoked(1);
verifyBucketWasUpdated(1); verifyBucketWasUpdated(1);
@ -339,7 +338,7 @@ public class ScoresUpdaterTests extends ESTestCase {
doAnswer(new Answer<Void>() { doAnswer(new Answer<Void>() {
@Override @Override
public Void answer(InvocationOnMock invocationOnMock) throws Throwable { public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
List<Normalizable> normalizables = (List<Normalizable>) invocationOnMock.getArguments()[2]; List<Normalizable> normalizables = (List<Normalizable>) invocationOnMock.getArguments()[1];
for (Normalizable normalizable : normalizables) { for (Normalizable normalizable : normalizables) {
normalizable.raiseBigChangeFlag(); normalizable.raiseBigChangeFlag();
for (Normalizable child : normalizable.getChildren()) { for (Normalizable child : normalizable.getChildren()) {
@ -348,7 +347,7 @@ public class ScoresUpdaterTests extends ESTestCase {
} }
return null; return null;
} }
}).when(normalizer).normalize(anyInt(), anyBoolean(), anyList(), anyString()); }).when(normalizer).normalize(anyInt(), anyList(), anyString());
} }
private void givenProviderReturnsBuckets(Deque<Bucket> batch1, Deque<Bucket> batch2) { private void givenProviderReturnsBuckets(Deque<Bucket> batch1, Deque<Bucket> batch2) {
@ -416,7 +415,7 @@ public class ScoresUpdaterTests extends ESTestCase {
private void verifyNormalizerWasInvoked(int times) throws IOException { private void verifyNormalizerWasInvoked(int times) throws IOException {
int bucketSpan = job.getAnalysisConfig() == null ? 0 : ((Long) job.getAnalysisConfig().getBucketSpan().seconds()).intValue(); int bucketSpan = job.getAnalysisConfig() == null ? 0 : ((Long) job.getAnalysisConfig().getBucketSpan().seconds()).intValue();
verify(normalizer, times(times)).normalize( verify(normalizer, times(times)).normalize(
eq(bucketSpan), eq(false), anyListOf(Normalizable.class), eq(bucketSpan), anyListOf(Normalizable.class),
eq(QUANTILES_STATE)); eq(QUANTILES_STATE));
} }

View File

@ -18,7 +18,6 @@ import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.mockito.Matchers.anyLong; import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
@ -43,10 +42,7 @@ public class ShortCircuitingRenormalizerTests extends ESTestCase {
public void testNormalize() throws InterruptedException { public void testNormalize() throws InterruptedException {
ExecutorService threadpool = Executors.newScheduledThreadPool(10); ExecutorService threadpool = Executors.newScheduledThreadPool(10);
try { try {
boolean isPerPartitionNormalization = randomBoolean(); ShortCircuitingRenormalizer renormalizer = new ShortCircuitingRenormalizer(JOB_ID, scoresUpdater, threadpool);
ShortCircuitingRenormalizer renormalizer = new ShortCircuitingRenormalizer(JOB_ID, scoresUpdater, threadpool,
isPerPartitionNormalization);
// Blast through many sets of quantiles in quick succession, faster than the normalizer can process them // Blast through many sets of quantiles in quick succession, faster than the normalizer can process them
for (int i = 1; i < TEST_SIZE / 2; ++i) { for (int i = 1; i < TEST_SIZE / 2; ++i) {
@ -61,7 +57,7 @@ public class ShortCircuitingRenormalizerTests extends ESTestCase {
renormalizer.waitUntilIdle(); renormalizer.waitUntilIdle();
ArgumentCaptor<String> stateCaptor = ArgumentCaptor.forClass(String.class); ArgumentCaptor<String> stateCaptor = ArgumentCaptor.forClass(String.class);
verify(scoresUpdater, atLeastOnce()).update(stateCaptor.capture(), anyLong(), anyLong(), eq(isPerPartitionNormalization)); verify(scoresUpdater, atLeastOnce()).update(stateCaptor.capture(), anyLong(), anyLong());
List<String> quantilesUsed = stateCaptor.getAllValues(); List<String> quantilesUsed = stateCaptor.getAllValues();
assertFalse(quantilesUsed.isEmpty()); assertFalse(quantilesUsed.isEmpty());
@ -91,7 +87,7 @@ public class ShortCircuitingRenormalizerTests extends ESTestCase {
public void testIsEnabled_GivenNormalizationWindowIsZero() { public void testIsEnabled_GivenNormalizationWindowIsZero() {
ScoresUpdater scoresUpdater = mock(ScoresUpdater.class); ScoresUpdater scoresUpdater = mock(ScoresUpdater.class);
when(scoresUpdater.getNormalizationWindow()).thenReturn(0L); when(scoresUpdater.getNormalizationWindow()).thenReturn(0L);
ShortCircuitingRenormalizer renormalizer = new ShortCircuitingRenormalizer(JOB_ID, scoresUpdater, null, randomBoolean()); ShortCircuitingRenormalizer renormalizer = new ShortCircuitingRenormalizer(JOB_ID, scoresUpdater, null);
assertThat(renormalizer.isEnabled(), is(false)); assertThat(renormalizer.isEnabled(), is(false));
} }
@ -99,7 +95,7 @@ public class ShortCircuitingRenormalizerTests extends ESTestCase {
public void testIsEnabled_GivenNormalizationWindowGreaterThanZero() { public void testIsEnabled_GivenNormalizationWindowGreaterThanZero() {
ScoresUpdater scoresUpdater = mock(ScoresUpdater.class); ScoresUpdater scoresUpdater = mock(ScoresUpdater.class);
when(scoresUpdater.getNormalizationWindow()).thenReturn(1L); when(scoresUpdater.getNormalizationWindow()).thenReturn(1L);
ShortCircuitingRenormalizer renormalizer = new ShortCircuitingRenormalizer(JOB_ID, scoresUpdater, null, randomBoolean()); ShortCircuitingRenormalizer renormalizer = new ShortCircuitingRenormalizer(JOB_ID, scoresUpdater, null);
assertThat(renormalizer.isEnabled(), is(true)); assertThat(renormalizer.isEnabled(), is(true));
} }

View File

@ -13,7 +13,6 @@ import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord;
import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecordTests; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecordTests;
import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Bucket;
import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer; import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -61,15 +60,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
if (randomBoolean()) { if (randomBoolean()) {
bucket.setInterim(randomBoolean()); bucket.setInterim(randomBoolean());
} }
if (randomBoolean()) {
int size = randomInt(10);
List<PartitionScore> partitionScores = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
partitionScores.add(new PartitionScore(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20), randomDouble(),
randomDouble(), randomDouble()));
}
bucket.setPartitionScores(partitionScores);
}
if (randomBoolean()) { if (randomBoolean()) {
bucket.setProcessingTimeMs(randomLong()); bucket.setProcessingTimeMs(randomLong());
} }
@ -235,15 +225,6 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
assertFalse(bucket.isNormalizable()); assertFalse(bucket.isNormalizable());
} }
public void testIsNormalizable_GivenAnomalyScoreIsZeroAndPartitionsScoresAreNonZero() {
Bucket bucket = new Bucket("foo", new Date(123), 123);
bucket.addBucketInfluencer(new BucketInfluencer("foo", new Date(123), 123));
bucket.setAnomalyScore(0.0);
bucket.setPartitionScores(Collections.singletonList(new PartitionScore("n", "v", 50.0, 40.0, 0.01)));
assertTrue(bucket.isNormalizable());
}
public void testIsNormalizable_GivenAnomalyScoreIsNonZeroAndRecordCountIsZero() { public void testIsNormalizable_GivenAnomalyScoreIsNonZeroAndRecordCountIsZero() {
Bucket bucket = new Bucket("foo", new Date(123), 123); Bucket bucket = new Bucket("foo", new Date(123), 123);
bucket.addBucketInfluencer(new BucketInfluencer("foo", new Date(123), 123)); bucket.addBucketInfluencer(new BucketInfluencer("foo", new Date(123), 123));
@ -260,35 +241,7 @@ public class BucketTests extends AbstractSerializingTestCase<Bucket> {
assertTrue(bucket.isNormalizable()); assertTrue(bucket.isNormalizable());
} }
public void testPartitionAnomalyScore() { public void testId() {
List<PartitionScore> pScore = new ArrayList<>();
pScore.add(new PartitionScore("pf", "pv1", 11.0, 10.0, 0.1));
pScore.add(new PartitionScore("pf", "pv3", 51.0, 50.0, 0.1));
pScore.add(new PartitionScore("pf", "pv4", 61.0, 60.0, 0.1));
pScore.add(new PartitionScore("pf", "pv2", 41.0, 40.0, 0.1));
Bucket bucket = new Bucket("foo", new Date(123), 123);
bucket.setPartitionScores(pScore);
double initialAnomalyScore = bucket.partitionInitialAnomalyScore("pv1");
assertEquals(11.0, initialAnomalyScore, 0.001);
double anomalyScore = bucket.partitionAnomalyScore("pv1");
assertEquals(10.0, anomalyScore, 0.001);
initialAnomalyScore = bucket.partitionInitialAnomalyScore("pv2");
assertEquals(41.0, initialAnomalyScore, 0.001);
anomalyScore = bucket.partitionAnomalyScore("pv2");
assertEquals(40.0, anomalyScore, 0.001);
initialAnomalyScore = bucket.partitionInitialAnomalyScore("pv3");
assertEquals(51.0, initialAnomalyScore, 0.001);
anomalyScore = bucket.partitionAnomalyScore("pv3");
assertEquals(50.0, anomalyScore, 0.001);
initialAnomalyScore = bucket.partitionInitialAnomalyScore("pv4");
assertEquals(61.0, initialAnomalyScore, 0.001);
anomalyScore = bucket.partitionAnomalyScore("pv4");
assertEquals(60.0, anomalyScore, 0.001);
}
public void testId() {
Bucket bucket = new Bucket("foo", new Date(123), 60L); Bucket bucket = new Bucket("foo", new Date(123), 60L);
assertEquals("foo_bucket_123_60", bucket.getId()); assertEquals("foo_bucket_123_60", bucket.getId());
} }

View File

@ -1,54 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.job.results;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.results.PartitionScore;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
public class PartitionScoreTests extends AbstractSerializingTestCase<PartitionScore> {
@Override
protected PartitionScore createTestInstance() {
return new PartitionScore(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20), randomDouble(), randomDouble(),
randomDouble());
}
@Override
protected Reader<PartitionScore> instanceReader() {
return PartitionScore::new;
}
@Override
protected PartitionScore doParseInstance(XContentParser parser) {
return PartitionScore.STRICT_PARSER.apply(parser, null);
}
public void testStrictParser() throws IOException {
String json = "{\"partition_field_name\":\"field_1\", \"partition_field_value\":\"x\", \"initial_record_score\": 3," +
" \"record_score\": 3, \"probability\": 0.001, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> PartitionScore.STRICT_PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("unknown field [foo]"));
}
}
public void testLenientParser() throws IOException {
String json = "{\"partition_field_name\":\"field_1\", \"partition_field_value\":\"x\", \"initial_record_score\": 3," +
" \"record_score\": 3, \"probability\": 0.001, \"foo\":\"bar\"}";
try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) {
PartitionScore.LENIENT_PARSER.apply(parser, null);
}
}
}

View File

@ -1,41 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapCheck;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.xpack.core.XPackSettings;
import javax.crypto.SecretKeyFactory;
import java.security.NoSuchAlgorithmException;
import java.util.Locale;
/**
* Bootstrap check to ensure that one of the allowed password hashing algorithms is
* selected and that it is available.
*/
public class PasswordHashingAlgorithmBootstrapCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
final String selectedAlgorithm = XPackSettings.PASSWORD_HASHING_ALGORITHM.get(context.settings);
if (selectedAlgorithm.toLowerCase(Locale.ROOT).startsWith("pbkdf2")) {
try {
SecretKeyFactory.getInstance("PBKDF2withHMACSHA512");
} catch (NoSuchAlgorithmException e) {
final String errorMessage = String.format(Locale.ROOT,
"Support for PBKDF2WithHMACSHA512 must be available in order to use any of the " +
"PBKDF2 algorithms for the [%s] setting.", XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey());
return BootstrapCheckResult.failure(errorMessage);
}
}
return BootstrapCheckResult.success();
}
@Override
public boolean alwaysEnforce() {
return true;
}
}

View File

@ -300,7 +300,6 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
new TokenSSLBootstrapCheck(), new TokenSSLBootstrapCheck(),
new PkiRealmBootstrapCheck(getSslService()), new PkiRealmBootstrapCheck(getSslService()),
new TLSLicenseBootstrapCheck(), new TLSLicenseBootstrapCheck(),
new PasswordHashingAlgorithmBootstrapCheck(),
new FIPS140SecureSettingsBootstrapCheck(settings, env), new FIPS140SecureSettingsBootstrapCheck(settings, env),
new FIPS140JKSKeystoreBootstrapCheck(settings), new FIPS140JKSKeystoreBootstrapCheck(settings),
new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings))); new FIPS140PasswordHashingAlgorithmBootstrapCheck(settings)));

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.action.user;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier; import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -18,11 +19,15 @@ import org.elasticsearch.xpack.core.security.action.user.PutUserAction;
import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest;
import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; import org.elasticsearch.xpack.core.security.action.user.PutUserResponse;
import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm;
import org.elasticsearch.xpack.core.security.support.Validation;
import org.elasticsearch.xpack.core.security.user.AnonymousUser; import org.elasticsearch.xpack.core.security.user.AnonymousUser;
import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.xpack.core.security.user.SystemUser;
import org.elasticsearch.xpack.core.security.user.XPackSecurityUser;
import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.core.security.user.XPackUser;
import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import static org.elasticsearch.action.ValidateActions.addValidationError;
public class TransportPutUserAction extends HandledTransportAction<PutUserRequest, PutUserResponse> { public class TransportPutUserAction extends HandledTransportAction<PutUserRequest, PutUserResponse> {
private final NativeUsersStore usersStore; private final NativeUsersStore usersStore;
@ -36,37 +41,62 @@ public class TransportPutUserAction extends HandledTransportAction<PutUserReques
@Override @Override
protected void doExecute(Task task, final PutUserRequest request, final ActionListener<PutUserResponse> listener) { protected void doExecute(Task task, final PutUserRequest request, final ActionListener<PutUserResponse> listener) {
final ActionRequestValidationException validationException = validateRequest(request);
if (validationException != null) {
listener.onFailure(validationException);
} else {
usersStore.putUser(request, new ActionListener<Boolean>() {
@Override
public void onResponse(Boolean created) {
if (created) {
logger.info("added user [{}]", request.username());
} else {
logger.info("updated user [{}]", request.username());
}
listener.onResponse(new PutUserResponse(created));
}
@Override
public void onFailure(Exception e) {
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to put user [{}]", request.username()), e);
listener.onFailure(e);
}
});
}
}
private ActionRequestValidationException validateRequest(PutUserRequest request) {
ActionRequestValidationException validationException = null;
final String username = request.username(); final String username = request.username();
if (ClientReservedRealm.isReserved(username, settings)) { if (ClientReservedRealm.isReserved(username, settings)) {
if (AnonymousUser.isAnonymousUsername(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) {
listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); validationException =
return; addValidationError("user [" + username + "] is anonymous and cannot be modified via the API", validationException);
} else { } else {
listener.onFailure(new IllegalArgumentException("user [" + username + "] is reserved and only the " + validationException = addValidationError("user [" + username + "] is reserved and only the " +
"password can be changed")); "password can be changed", validationException);
return; }
} else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username) || XPackSecurityUser.NAME.equals(username)) {
validationException = addValidationError("user [" + username + "] is internal", validationException);
} else {
Validation.Error usernameError = Validation.Users.validateUsername(username, true, settings);
if (usernameError != null) {
validationException = addValidationError(usernameError.toString(), validationException);
} }
} else if (SystemUser.NAME.equals(username) || XPackUser.NAME.equals(username)) {
listener.onFailure(new IllegalArgumentException("user [" + username + "] is internal"));
return;
} }
usersStore.putUser(request, new ActionListener<Boolean>() { if (request.roles() != null) {
@Override for (String role : request.roles()) {
public void onResponse(Boolean created) { Validation.Error roleNameError = Validation.Roles.validateRoleName(role, true);
if (created) { if (roleNameError != null) {
logger.info("added user [{}]", request.username()); validationException = addValidationError(roleNameError.toString(), validationException);
} else {
logger.info("updated user [{}]", request.username());
} }
listener.onResponse(new PutUserResponse(created));
} }
}
@Override if (request.password() != null) {
public void onFailure(Exception e) { validationException = addValidationError("password should never be passed to the transport action", validationException);
logger.error((Supplier<?>) () -> new ParameterizedMessage("failed to put user [{}]", request.username()), e); }
listener.onFailure(e); return validationException;
}
});
} }
} }

View File

@ -1,44 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security;
import org.elasticsearch.bootstrap.BootstrapContext;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.XPackSettings;
import javax.crypto.SecretKeyFactory;
import java.security.NoSuchAlgorithmException;
public class PasswordHashingAlgorithmBootstrapCheckTests extends ESTestCase {
public void testPasswordHashingAlgorithmBootstrapCheck() {
Settings settings = Settings.EMPTY;
assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure());
// The following two will always pass because for now we only test in environments where PBKDF2WithHMACSHA512 is supported
assertTrue(isSecretkeyFactoryAlgoAvailable("PBKDF2WithHMACSHA512"));
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2_10000").build();
assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "PBKDF2").build();
assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT").build();
assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure());
settings = Settings.builder().put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), "BCRYPT11").build();
assertFalse(new PasswordHashingAlgorithmBootstrapCheck().check(new BootstrapContext(settings, null)).isFailure());
}
private boolean isSecretkeyFactoryAlgoAvailable(String algorithmId) {
try {
SecretKeyFactory.getInstance(algorithmId);
return true;
} catch (NoSuchAlgorithmException e) {
return false;
}
}
}

View File

@ -39,16 +39,6 @@ public class PutUserRequestTests extends ESTestCase {
assertThat(validation.validationErrors().size(), is(1)); assertThat(validation.validationErrors().size(), is(1));
} }
public void testValidateRejectsUserNameThatHasInvalidCharacters() throws Exception {
final PutUserRequest request = new PutUserRequest();
request.username("fóóbár");
request.roles("bar");
final ActionRequestValidationException validation = request.validate();
assertThat(validation, is(notNullValue()));
assertThat(validation.validationErrors(), contains(containsString("must be")));
assertThat(validation.validationErrors().size(), is(1));
}
public void testValidateRejectsMetaDataWithLeadingUnderscore() throws Exception { public void testValidateRejectsMetaDataWithLeadingUnderscore() throws Exception {
final PutUserRequest request = new PutUserRequest(); final PutUserRequest request = new PutUserRequest();
request.username("foo"); request.username("foo");

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.action.user;
import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.ValidationException;
@ -37,6 +38,7 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -194,12 +196,32 @@ public class TransportPutUserActionTests extends ESTestCase {
} }
}); });
assertThat(throwableRef.get(), is(nullValue()));
assertThat(responseRef.get(), is(notNullValue())); assertThat(responseRef.get(), is(notNullValue()));
assertThat(responseRef.get().created(), is(created)); assertThat(responseRef.get().created(), is(created));
assertThat(throwableRef.get(), is(nullValue()));
verify(usersStore, times(1)).putUser(eq(request), any(ActionListener.class)); verify(usersStore, times(1)).putUser(eq(request), any(ActionListener.class));
} }
public void testInvalidUser() {
NativeUsersStore usersStore = mock(NativeUsersStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class),
usersStore, transportService);
final PutUserRequest request = new PutUserRequest();
request.username("fóóbár");
request.roles("bar");
ActionRequestValidationException validation = request.validate();
assertNull(validation);
PlainActionFuture<PutUserResponse> responsePlainActionFuture = new PlainActionFuture<>();
action.doExecute(mock(Task.class), request, responsePlainActionFuture);
validation = expectThrows(ActionRequestValidationException.class, responsePlainActionFuture::actionGet);
assertThat(validation.validationErrors(), contains(containsString("must be")));
assertThat(validation.validationErrors().size(), is(1));
}
public void testException() { public void testException() {
final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException()); final Exception e = randomFrom(new ElasticsearchSecurityException(""), new IllegalStateException(), new ValidationException());
final User user = new User("joe"); final User user = new User("joe");

View File

@ -13,7 +13,6 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.SecureString;
@ -492,14 +491,14 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
client.preparePutUser("joe", randomAlphaOfLengthBetween(0, 5).toCharArray(), hasher, client.preparePutUser("joe", randomAlphaOfLengthBetween(0, 5).toCharArray(), hasher,
"admin_role").get(); "admin_role").get();
fail("cannot create a user without a password < 6 characters"); fail("cannot create a user without a password < 6 characters");
} catch (ValidationException v) { } catch (IllegalArgumentException v) {
assertThat(v.getMessage().contains("password"), is(true)); assertThat(v.getMessage().contains("password"), is(true));
} }
} }
public void testCannotCreateUserWithInvalidCharactersInName() throws Exception { public void testCannotCreateUserWithInvalidCharactersInName() throws Exception {
SecurityClient client = securityClient(); SecurityClient client = securityClient();
ValidationException v = expectThrows(ValidationException.class, IllegalArgumentException v = expectThrows(IllegalArgumentException.class,
() -> client.preparePutUser("fóóbár", "my-am@zing-password".toCharArray(), hasher, () -> client.preparePutUser("fóóbár", "my-am@zing-password".toCharArray(), hasher,
"admin_role").get() "admin_role").get()
); );
@ -533,7 +532,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
() -> securityClient().preparePutUser(username, randomBoolean() ? SecuritySettingsSourceField.TEST_PASSWORD.toCharArray() () -> securityClient().preparePutUser(username, randomBoolean() ? SecuritySettingsSourceField.TEST_PASSWORD.toCharArray()
: null, hasher, "admin").get()); : null, hasher, "admin").get());
assertThat(exception.getMessage(), containsString("Username [" + username + "] is reserved")); assertThat(exception.getMessage(), containsString("user [" + username + "] is reserved"));
exception = expectThrows(IllegalArgumentException.class, exception = expectThrows(IllegalArgumentException.class,
() -> securityClient().prepareDeleteUser(username).get()); () -> securityClient().prepareDeleteUser(username).get());
@ -551,7 +550,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase {
exception = expectThrows(IllegalArgumentException.class, exception = expectThrows(IllegalArgumentException.class,
() -> securityClient().preparePutUser(AnonymousUser.DEFAULT_ANONYMOUS_USERNAME, "foobar".toCharArray(), () -> securityClient().preparePutUser(AnonymousUser.DEFAULT_ANONYMOUS_USERNAME, "foobar".toCharArray(),
hasher).get()); hasher).get());
assertThat(exception.getMessage(), containsString("Username [" + AnonymousUser.DEFAULT_ANONYMOUS_USERNAME + "] is reserved")); assertThat(exception.getMessage(), containsString("user [" + AnonymousUser.DEFAULT_ANONYMOUS_USERNAME + "] is anonymous"));
exception = expectThrows(IllegalArgumentException.class, exception = expectThrows(IllegalArgumentException.class,
() -> securityClient().preparePutUser(SystemUser.NAME, "foobar".toCharArray(), hasher).get()); () -> securityClient().preparePutUser(SystemUser.NAME, "foobar".toCharArray(), hasher).get());

View File

@ -83,7 +83,7 @@ public class ReservedRealmTests extends ESTestCase {
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> new ReservedRealm(mock(Environment.class), IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> new ReservedRealm(mock(Environment.class),
invalidSettings, usersStore, new AnonymousUser(Settings.EMPTY), securityIndex, threadPool)); invalidSettings, usersStore, new AnonymousUser(Settings.EMPTY), securityIndex, threadPool));
assertThat(exception.getMessage(), containsString(invalidAlgoId)); assertThat(exception.getMessage(), containsString(invalidAlgoId));
assertThat(exception.getMessage(), containsString("Only pbkdf2 or bcrypt family algorithms can be used for password hashing")); assertThat(exception.getMessage(), containsString("Invalid algorithm"));
} }
public void testReservedUserEmptyPasswordAuthenticationFails() throws Throwable { public void testReservedUserEmptyPasswordAuthenticationFails() throws Throwable {

View File

@ -92,12 +92,7 @@ import org.elasticsearch.xpack.watcher.actions.slack.SlackActionFactory;
import org.elasticsearch.xpack.watcher.actions.webhook.WebhookAction; import org.elasticsearch.xpack.watcher.actions.webhook.WebhookAction;
import org.elasticsearch.xpack.watcher.actions.webhook.WebhookActionFactory; import org.elasticsearch.xpack.watcher.actions.webhook.WebhookActionFactory;
import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.common.http.HttpClient;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.http.HttpSettings; import org.elasticsearch.xpack.watcher.common.http.HttpSettings;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthFactory;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth;
import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuthFactory;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.condition.ArrayCompareCondition; import org.elasticsearch.xpack.watcher.condition.ArrayCompareCondition;
import org.elasticsearch.xpack.watcher.condition.CompareCondition; import org.elasticsearch.xpack.watcher.condition.CompareCondition;
@ -264,12 +259,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa
new WatcherIndexTemplateRegistry(settings, clusterService, threadPool, client); new WatcherIndexTemplateRegistry(settings, clusterService, threadPool, client);
// http client // http client
Map<String, HttpAuthFactory> httpAuthFactories = new HashMap<>(); httpClient = new HttpClient(settings, getSslService(), cryptoService);
httpAuthFactories.put(BasicAuth.TYPE, new BasicAuthFactory(cryptoService));
// TODO: add more auth types, or remove this indirection
HttpAuthRegistry httpAuthRegistry = new HttpAuthRegistry(httpAuthFactories);
HttpRequestTemplate.Parser httpTemplateParser = new HttpRequestTemplate.Parser(httpAuthRegistry);
httpClient = new HttpClient(settings, httpAuthRegistry, getSslService());
// notification // notification
EmailService emailService = new EmailService(settings, cryptoService, clusterService.getClusterSettings()); EmailService emailService = new EmailService(settings, cryptoService, clusterService.getClusterSettings());
@ -286,11 +276,9 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa
TextTemplateEngine templateEngine = new TextTemplateEngine(settings, scriptService); TextTemplateEngine templateEngine = new TextTemplateEngine(settings, scriptService);
Map<String, EmailAttachmentParser> emailAttachmentParsers = new HashMap<>(); Map<String, EmailAttachmentParser> emailAttachmentParsers = new HashMap<>();
emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, httpTemplateParser, emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, templateEngine));
templateEngine));
emailAttachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser()); emailAttachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser());
emailAttachmentParsers.put(ReportingAttachmentParser.TYPE, new ReportingAttachmentParser(settings, httpClient, templateEngine, emailAttachmentParsers.put(ReportingAttachmentParser.TYPE, new ReportingAttachmentParser(settings, httpClient, templateEngine));
httpAuthRegistry));
EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(emailAttachmentParsers); EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(emailAttachmentParsers);
// conditions // conditions
@ -310,7 +298,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa
// actions // actions
final Map<String, ActionFactory> actionFactoryMap = new HashMap<>(); final Map<String, ActionFactory> actionFactoryMap = new HashMap<>();
actionFactoryMap.put(EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, emailAttachmentsParser)); actionFactoryMap.put(EmailAction.TYPE, new EmailActionFactory(settings, emailService, templateEngine, emailAttachmentsParser));
actionFactoryMap.put(WebhookAction.TYPE, new WebhookActionFactory(settings, httpClient, httpTemplateParser, templateEngine)); actionFactoryMap.put(WebhookAction.TYPE, new WebhookActionFactory(settings, httpClient, templateEngine));
actionFactoryMap.put(IndexAction.TYPE, new IndexActionFactory(settings, client)); actionFactoryMap.put(IndexAction.TYPE, new IndexActionFactory(settings, client));
actionFactoryMap.put(LoggingAction.TYPE, new LoggingActionFactory(settings, templateEngine)); actionFactoryMap.put(LoggingAction.TYPE, new LoggingActionFactory(settings, templateEngine));
actionFactoryMap.put(HipChatAction.TYPE, new HipChatActionFactory(settings, templateEngine, hipChatService)); actionFactoryMap.put(HipChatAction.TYPE, new HipChatActionFactory(settings, templateEngine, hipChatService));
@ -324,7 +312,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa
final Map<String, InputFactory> inputFactories = new HashMap<>(); final Map<String, InputFactory> inputFactories = new HashMap<>();
inputFactories.put(SearchInput.TYPE, new SearchInputFactory(settings, client, xContentRegistry, scriptService)); inputFactories.put(SearchInput.TYPE, new SearchInputFactory(settings, client, xContentRegistry, scriptService));
inputFactories.put(SimpleInput.TYPE, new SimpleInputFactory(settings)); inputFactories.put(SimpleInput.TYPE, new SimpleInputFactory(settings));
inputFactories.put(HttpInput.TYPE, new HttpInputFactory(settings, httpClient, templateEngine, httpTemplateParser)); inputFactories.put(HttpInput.TYPE, new HttpInputFactory(settings, httpClient, templateEngine));
inputFactories.put(NoneInput.TYPE, new NoneInputFactory(settings)); inputFactories.put(NoneInput.TYPE, new NoneInputFactory(settings));
inputFactories.put(TransformInput.TYPE, new TransformInputFactory(settings, transformRegistry)); inputFactories.put(TransformInput.TYPE, new TransformInputFactory(settings, transformRegistry));
final InputRegistry inputRegistry = new InputRegistry(settings, inputFactories); final InputRegistry inputRegistry = new InputRegistry(settings, inputFactories);

View File

@ -47,7 +47,7 @@ public class ExecutablePagerDutyAction extends ExecutableAction<PagerDutyAction>
return new PagerDutyAction.Result.Simulated(event); return new PagerDutyAction.Result.Simulated(event);
} }
SentEvent sentEvent = account.send(event, payload); SentEvent sentEvent = account.send(event, payload, ctx.id().watchId());
return new PagerDutyAction.Result.Executed(account.getName(), sentEvent); return new PagerDutyAction.Result.Executed(account.getName(), sentEvent);
} }

View File

@ -55,10 +55,9 @@ public class WebhookAction implements Action {
return requestTemplate.toXContent(builder, params); return requestTemplate.toXContent(builder, params);
} }
public static WebhookAction parse(String watchId, String actionId, XContentParser parser, public static WebhookAction parse(String watchId, String actionId, XContentParser parser) throws IOException {
HttpRequestTemplate.Parser requestParser) throws IOException {
try { try {
HttpRequestTemplate request = requestParser.parse(parser); HttpRequestTemplate request = HttpRequestTemplate.Parser.parse(parser);
return new WebhookAction(request); return new WebhookAction(request);
} catch (ElasticsearchParseException pe) { } catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. failed parsing http request template", pe, TYPE, throw new ElasticsearchParseException("could not parse [{}] action [{}/{}]. failed parsing http request template", pe, TYPE,

View File

@ -10,7 +10,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.watcher.actions.ActionFactory; import org.elasticsearch.xpack.core.watcher.actions.ActionFactory;
import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.common.http.HttpClient;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import java.io.IOException; import java.io.IOException;
@ -18,21 +17,18 @@ import java.io.IOException;
public class WebhookActionFactory extends ActionFactory { public class WebhookActionFactory extends ActionFactory {
private final HttpClient httpClient; private final HttpClient httpClient;
private final HttpRequestTemplate.Parser requestTemplateParser;
private final TextTemplateEngine templateEngine; private final TextTemplateEngine templateEngine;
public WebhookActionFactory(Settings settings, HttpClient httpClient, HttpRequestTemplate.Parser requestTemplateParser, public WebhookActionFactory(Settings settings, HttpClient httpClient, TextTemplateEngine templateEngine) {
TextTemplateEngine templateEngine) {
super(Loggers.getLogger(ExecutableWebhookAction.class, settings)); super(Loggers.getLogger(ExecutableWebhookAction.class, settings));
this.httpClient = httpClient; this.httpClient = httpClient;
this.requestTemplateParser = requestTemplateParser;
this.templateEngine = templateEngine; this.templateEngine = templateEngine;
} }
@Override @Override
public ExecutableWebhookAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException { public ExecutableWebhookAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException {
return new ExecutableWebhookAction(WebhookAction.parse(watchId, actionId, parser, requestTemplateParser), return new ExecutableWebhookAction(WebhookAction.parse(watchId, actionId, parser),
actionLogger, httpClient, templateEngine); actionLogger, httpClient, templateEngine);
} }

View File

@ -3,22 +3,22 @@
* or more contributor license agreements. Licensed under the Elastic License; * or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License. * you may not use this file except in compliance with the Elastic License.
*/ */
package org.elasticsearch.xpack.watcher.common.http.auth.basic; package org.elasticsearch.xpack.watcher.common.http;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.watcher.common.secret.Secret; import org.elasticsearch.xpack.core.watcher.common.secret.Secret;
import org.elasticsearch.xpack.core.watcher.crypto.CryptoService; import org.elasticsearch.xpack.core.watcher.crypto.CryptoService;
import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams;
import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherXContentParser; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherXContentParser;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuth;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class BasicAuth implements HttpAuth { public class BasicAuth implements ToXContentObject {
public static final String TYPE = "basic"; public static final String TYPE = "basic";
@ -34,11 +34,6 @@ public class BasicAuth implements HttpAuth {
this.password = password; this.password = password;
} }
@Override
public String type() {
return TYPE;
}
public String getUsername() { public String getUsername() {
return username; return username;
} }
@ -74,7 +69,7 @@ public class BasicAuth implements HttpAuth {
return builder.endObject(); return builder.endObject();
} }
public static BasicAuth parse(XContentParser parser) throws IOException { public static BasicAuth parseInner(XContentParser parser) throws IOException {
String username = null; String username = null;
Secret password = null; Secret password = null;
@ -103,6 +98,20 @@ public class BasicAuth implements HttpAuth {
return new BasicAuth(username, password); return new BasicAuth(username, password);
} }
public static BasicAuth parse(XContentParser parser) throws IOException {
String type = null;
XContentParser.Token token;
BasicAuth auth = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
type = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT && type != null) {
auth = BasicAuth.parseInner(parser);
}
}
return auth;
}
interface Field { interface Field {
ParseField USERNAME = new ParseField("username"); ParseField USERNAME = new ParseField("username");
ParseField PASSWORD = new ParseField("password"); ParseField PASSWORD = new ParseField("password");

View File

@ -10,6 +10,8 @@ import org.apache.http.HttpHeaders;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.apache.http.NameValuePair; import org.apache.http.NameValuePair;
import org.apache.http.auth.AuthScope; import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.AuthCache; import org.apache.http.client.AuthCache;
import org.apache.http.client.CredentialsProvider; import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.config.RequestConfig; import org.apache.http.client.config.RequestConfig;
@ -42,8 +44,7 @@ import org.elasticsearch.core.internal.io.Streams;
import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.elasticsearch.xpack.core.common.socket.SocketAccess;
import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.SSLService;
import org.elasticsearch.xpack.watcher.common.http.auth.ApplicableHttpAuth; import org.elasticsearch.xpack.core.watcher.crypto.CryptoService;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HostnameVerifier;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@ -66,20 +67,20 @@ public class HttpClient extends AbstractComponent implements Closeable {
// you are querying a remote Elasticsearch cluster // you are querying a remote Elasticsearch cluster
private static final int MAX_CONNECTIONS = 500; private static final int MAX_CONNECTIONS = 500;
private final HttpAuthRegistry httpAuthRegistry;
private final CloseableHttpClient client; private final CloseableHttpClient client;
private final HttpProxy settingsProxy; private final HttpProxy settingsProxy;
private final TimeValue defaultConnectionTimeout; private final TimeValue defaultConnectionTimeout;
private final TimeValue defaultReadTimeout; private final TimeValue defaultReadTimeout;
private final ByteSizeValue maxResponseSize; private final ByteSizeValue maxResponseSize;
private final CryptoService cryptoService;
public HttpClient(Settings settings, HttpAuthRegistry httpAuthRegistry, SSLService sslService) { public HttpClient(Settings settings, SSLService sslService, CryptoService cryptoService) {
super(settings); super(settings);
this.httpAuthRegistry = httpAuthRegistry;
this.defaultConnectionTimeout = HttpSettings.CONNECTION_TIMEOUT.get(settings); this.defaultConnectionTimeout = HttpSettings.CONNECTION_TIMEOUT.get(settings);
this.defaultReadTimeout = HttpSettings.READ_TIMEOUT.get(settings); this.defaultReadTimeout = HttpSettings.READ_TIMEOUT.get(settings);
this.maxResponseSize = HttpSettings.MAX_HTTP_RESPONSE_SIZE.get(settings); this.maxResponseSize = HttpSettings.MAX_HTTP_RESPONSE_SIZE.get(settings);
this.settingsProxy = getProxyFromSettings(); this.settingsProxy = getProxyFromSettings();
this.cryptoService = cryptoService;
HttpClientBuilder clientBuilder = HttpClientBuilder.create(); HttpClientBuilder clientBuilder = HttpClientBuilder.create();
@ -139,9 +140,10 @@ public class HttpClient extends AbstractComponent implements Closeable {
HttpClientContext localContext = HttpClientContext.create(); HttpClientContext localContext = HttpClientContext.create();
// auth // auth
if (request.auth() != null) { if (request.auth() != null) {
ApplicableHttpAuth applicableAuth = httpAuthRegistry.createApplicable(request.auth);
CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
applicableAuth.apply(credentialsProvider, new AuthScope(request.host, request.port)); Credentials credentials = new UsernamePasswordCredentials(request.auth().username,
new String(request.auth().password.text(cryptoService)));
credentialsProvider.setCredentials(new AuthScope(request.host, request.port), credentials);
localContext.setCredentialsProvider(credentialsProvider); localContext.setCredentialsProvider(credentialsProvider);
// preemptive auth, no need to wait for a 401 first // preemptive auth, no need to wait for a 401 first

View File

@ -21,8 +21,6 @@ import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils;
import org.elasticsearch.xpack.core.watcher.support.WatcherUtils; import org.elasticsearch.xpack.core.watcher.support.WatcherUtils;
import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams;
import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherXContentParser; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherXContentParser;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuth;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
@ -50,7 +48,7 @@ public class HttpRequest implements ToXContentObject {
@Nullable final String path; @Nullable final String path;
final Map<String, String> params; final Map<String, String> params;
final Map<String, String> headers; final Map<String, String> headers;
@Nullable final HttpAuth auth; @Nullable final BasicAuth auth;
@Nullable final String body; @Nullable final String body;
@Nullable final TimeValue connectionTimeout; @Nullable final TimeValue connectionTimeout;
@Nullable final TimeValue readTimeout; @Nullable final TimeValue readTimeout;
@ -58,7 +56,7 @@ public class HttpRequest implements ToXContentObject {
public HttpRequest(String host, int port, @Nullable Scheme scheme, @Nullable HttpMethod method, @Nullable String path, public HttpRequest(String host, int port, @Nullable Scheme scheme, @Nullable HttpMethod method, @Nullable String path,
@Nullable Map<String, String> params, @Nullable Map<String, String> headers, @Nullable Map<String, String> params, @Nullable Map<String, String> headers,
@Nullable HttpAuth auth, @Nullable String body, @Nullable TimeValue connectionTimeout, @Nullable BasicAuth auth, @Nullable String body, @Nullable TimeValue connectionTimeout,
@Nullable TimeValue readTimeout, @Nullable HttpProxy proxy) { @Nullable TimeValue readTimeout, @Nullable HttpProxy proxy) {
this.host = host; this.host = host;
this.port = port; this.port = port;
@ -102,7 +100,7 @@ public class HttpRequest implements ToXContentObject {
return headers; return headers;
} }
public HttpAuth auth() { public BasicAuth auth() {
return auth; return auth;
} }
@ -166,7 +164,7 @@ public class HttpRequest implements ToXContentObject {
} }
if (auth != null) { if (auth != null) {
builder.startObject(Field.AUTH.getPreferredName()) builder.startObject(Field.AUTH.getPreferredName())
.field(auth.type(), auth, toXContentParams) .field(BasicAuth.TYPE, auth, toXContentParams)
.endObject(); .endObject();
} }
if (body != null) { if (body != null) {
@ -234,7 +232,7 @@ public class HttpRequest implements ToXContentObject {
sb.append("], "); sb.append("], ");
} }
if (auth != null) { if (auth != null) {
sb.append("auth=[").append(auth.type()).append("], "); sb.append("auth=[").append(BasicAuth.TYPE).append("], ");
} }
sb.append("connection_timeout=[").append(connectionTimeout).append("], "); sb.append("connection_timeout=[").append(connectionTimeout).append("], ");
sb.append("read_timeout=[").append(readTimeout).append("], "); sb.append("read_timeout=[").append(readTimeout).append("], ");
@ -254,14 +252,7 @@ public class HttpRequest implements ToXContentObject {
} }
public static class Parser { public static class Parser {
public static HttpRequest parse(XContentParser parser) throws IOException {
private final HttpAuthRegistry httpAuthRegistry;
public Parser(HttpAuthRegistry httpAuthRegistry) {
this.httpAuthRegistry = httpAuthRegistry;
}
public HttpRequest parse(XContentParser parser) throws IOException {
Builder builder = new Builder(); Builder builder = new Builder();
XContentParser.Token token; XContentParser.Token token;
String currentFieldName = null; String currentFieldName = null;
@ -275,7 +266,7 @@ public class HttpRequest implements ToXContentObject {
throw new ElasticsearchParseException("could not parse http request. could not parse [{}] field", currentFieldName); throw new ElasticsearchParseException("could not parse http request. could not parse [{}] field", currentFieldName);
} }
} else if (Field.AUTH.match(currentFieldName, parser.getDeprecationHandler())) { } else if (Field.AUTH.match(currentFieldName, parser.getDeprecationHandler())) {
builder.auth(httpAuthRegistry.parse(parser)); builder.auth(BasicAuth.parse(parser));
} else if (HttpRequest.Field.CONNECTION_TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) { } else if (HttpRequest.Field.CONNECTION_TIMEOUT.match(currentFieldName, parser.getDeprecationHandler())) {
builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue())); builder.connectionTimeout(TimeValue.timeValueMillis(parser.longValue()));
} else if (HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) { } else if (HttpRequest.Field.CONNECTION_TIMEOUT_HUMAN.match(currentFieldName, parser.getDeprecationHandler())) {
@ -302,7 +293,7 @@ public class HttpRequest implements ToXContentObject {
builder.setHeaders((Map) WatcherUtils.flattenModel(parser.map())); builder.setHeaders((Map) WatcherUtils.flattenModel(parser.map()));
} else if (Field.PARAMS.match(currentFieldName, parser.getDeprecationHandler())) { } else if (Field.PARAMS.match(currentFieldName, parser.getDeprecationHandler())) {
builder.setParams((Map) WatcherUtils.flattenModel(parser.map())); builder.setParams((Map) WatcherUtils.flattenModel(parser.map()));
} else if (Field.BODY.match(currentFieldName, parser.getDeprecationHandler())) { } else if (Field.BODY.match(currentFieldName, parser.getDeprecationHandler())) {
builder.body(parser.text()); builder.body(parser.text());
} else { } else {
throw new ElasticsearchParseException("could not parse http request. unexpected object field [{}]", throw new ElasticsearchParseException("could not parse http request. unexpected object field [{}]",
@ -360,7 +351,7 @@ public class HttpRequest implements ToXContentObject {
private String path; private String path;
private Map<String, String> params = new HashMap<>(); private Map<String, String> params = new HashMap<>();
private Map<String, String> headers = new HashMap<>(); private Map<String, String> headers = new HashMap<>();
private HttpAuth auth; private BasicAuth auth;
private String body; private String body;
private TimeValue connectionTimeout; private TimeValue connectionTimeout;
private TimeValue readTimeout; private TimeValue readTimeout;
@ -421,7 +412,7 @@ public class HttpRequest implements ToXContentObject {
return this; return this;
} }
public Builder auth(HttpAuth auth) { public Builder auth(BasicAuth auth) {
this.auth = auth; this.auth = auth;
return this; return this;
} }

View File

@ -18,8 +18,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.RestUtils; import org.elasticsearch.rest.RestUtils;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils; import org.elasticsearch.xpack.core.watcher.support.WatcherDateTimeUtils;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuth;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
@ -42,14 +40,14 @@ public class HttpRequestTemplate implements ToXContentObject {
private final TextTemplate path; private final TextTemplate path;
private final Map<String, TextTemplate> params; private final Map<String, TextTemplate> params;
private final Map<String, TextTemplate> headers; private final Map<String, TextTemplate> headers;
private final HttpAuth auth; private final BasicAuth auth;
private final TextTemplate body; private final TextTemplate body;
@Nullable private final TimeValue connectionTimeout; @Nullable private final TimeValue connectionTimeout;
@Nullable private final TimeValue readTimeout; @Nullable private final TimeValue readTimeout;
@Nullable private final HttpProxy proxy; @Nullable private final HttpProxy proxy;
public HttpRequestTemplate(String host, int port, @Nullable Scheme scheme, @Nullable HttpMethod method, @Nullable TextTemplate path, public HttpRequestTemplate(String host, int port, @Nullable Scheme scheme, @Nullable HttpMethod method, @Nullable TextTemplate path,
Map<String, TextTemplate> params, Map<String, TextTemplate> headers, HttpAuth auth, Map<String, TextTemplate> params, Map<String, TextTemplate> headers, BasicAuth auth,
TextTemplate body, @Nullable TimeValue connectionTimeout, @Nullable TimeValue readTimeout, TextTemplate body, @Nullable TimeValue connectionTimeout, @Nullable TimeValue readTimeout,
@Nullable HttpProxy proxy) { @Nullable HttpProxy proxy) {
this.host = host; this.host = host;
@ -94,7 +92,7 @@ public class HttpRequestTemplate implements ToXContentObject {
return headers; return headers;
} }
public HttpAuth auth() { public BasicAuth auth() {
return auth; return auth;
} }
@ -185,7 +183,7 @@ public class HttpRequestTemplate implements ToXContentObject {
} }
if (auth != null) { if (auth != null) {
builder.startObject(HttpRequest.Field.AUTH.getPreferredName()) builder.startObject(HttpRequest.Field.AUTH.getPreferredName())
.field(auth.type(), auth, params) .field(BasicAuth.TYPE, auth, params)
.endObject(); .endObject();
} }
if (body != null) { if (body != null) {
@ -261,14 +259,7 @@ public class HttpRequestTemplate implements ToXContentObject {
} }
public static class Parser { public static class Parser {
public static HttpRequestTemplate parse(XContentParser parser) throws IOException {
private final HttpAuthRegistry httpAuthRegistry;
public Parser(HttpAuthRegistry httpAuthRegistry) {
this.httpAuthRegistry = httpAuthRegistry;
}
public HttpRequestTemplate parse(XContentParser parser) throws IOException {
assert parser.currentToken() == XContentParser.Token.START_OBJECT; assert parser.currentToken() == XContentParser.Token.START_OBJECT;
Builder builder = new Builder(); Builder builder = new Builder();
@ -312,8 +303,8 @@ public class HttpRequestTemplate implements ToXContentObject {
} }
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (HttpRequest.Field.AUTH.match(currentFieldName, parser.getDeprecationHandler())) { if (HttpRequest.Field.AUTH.match(currentFieldName, parser.getDeprecationHandler())) {
builder.auth(httpAuthRegistry.parse(parser)); builder.auth(BasicAuth.parse(parser));
} else { } else {
throw new ElasticsearchParseException("could not parse http request template. unexpected object field [{}]", throw new ElasticsearchParseException("could not parse http request template. unexpected object field [{}]",
currentFieldName); currentFieldName);
} }
@ -387,7 +378,7 @@ public class HttpRequestTemplate implements ToXContentObject {
private TextTemplate path; private TextTemplate path;
private final Map<String, TextTemplate> params = new HashMap<>(); private final Map<String, TextTemplate> params = new HashMap<>();
private final Map<String, TextTemplate> headers = new HashMap<>(); private final Map<String, TextTemplate> headers = new HashMap<>();
private HttpAuth auth; private BasicAuth auth;
private TextTemplate body; private TextTemplate body;
private TimeValue connectionTimeout; private TimeValue connectionTimeout;
private TimeValue readTimeout; private TimeValue readTimeout;
@ -444,7 +435,7 @@ public class HttpRequestTemplate implements ToXContentObject {
return this; return this;
} }
public Builder auth(HttpAuth auth) { public Builder auth(BasicAuth auth) {
this.auth = auth; this.auth = auth;
return this; return this;
} }

View File

@ -1,51 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.common.http.auth;
import org.apache.http.auth.AuthScope;
import org.apache.http.client.CredentialsProvider;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.net.HttpURLConnection;
public abstract class ApplicableHttpAuth<Auth extends HttpAuth> implements ToXContentObject {
protected final Auth auth;
public ApplicableHttpAuth(Auth auth) {
this.auth = auth;
}
public final String type() {
return auth.type();
}
public abstract void apply(HttpURLConnection connection);
public abstract void apply(CredentialsProvider credsProvider, AuthScope authScope);
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return auth.toXContent(builder, params);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ApplicableHttpAuth<?> that = (ApplicableHttpAuth<?>) o;
return auth.equals(that.auth);
}
@Override
public int hashCode() {
return auth.hashCode();
}
}

View File

@ -1,14 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.common.http.auth;
import org.elasticsearch.common.xcontent.ToXContentObject;
public interface HttpAuth extends ToXContentObject {
String type();
}

View File

@ -1,25 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.common.http.auth;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public abstract class HttpAuthFactory<Auth extends HttpAuth, AAuth extends ApplicableHttpAuth<Auth>> {
public abstract String type();
public abstract Auth parse(XContentParser parser) throws IOException;
public abstract AAuth createApplicable(Auth auth);
public AAuth parseApplicable(XContentParser parser) throws IOException {
Auth auth = parse(parser);
return createApplicable(auth);
}
}

View File

@ -1,50 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.common.http.auth;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Map;
import static org.elasticsearch.xpack.core.watcher.support.Exceptions.illegalArgument;
public class HttpAuthRegistry {
private final Map<String, HttpAuthFactory> factories;
public HttpAuthRegistry(Map<String, HttpAuthFactory> factories) {
this.factories = factories;
}
public HttpAuth parse(XContentParser parser) throws IOException {
String type = null;
XContentParser.Token token;
HttpAuth auth = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
type = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT && type != null) {
HttpAuthFactory factory = factories.get(type);
if (factory == null) {
throw new ElasticsearchParseException("unknown http auth type [{}]", type);
}
auth = factory.parse(parser);
}
}
return auth;
}
public <A extends HttpAuth, AA extends ApplicableHttpAuth<A>> AA createApplicable(A auth) {
HttpAuthFactory factory = factories.get(auth.type());
if (factory == null) {
throw illegalArgument("unknown http auth type [{}]", auth.type());
}
return (AA) factory.createApplicable(auth);
}
}

View File

@ -1,43 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.common.http.auth.basic;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.elasticsearch.xpack.core.watcher.crypto.CryptoService;
import org.elasticsearch.xpack.watcher.common.http.auth.ApplicableHttpAuth;
import java.net.HttpURLConnection;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
public class ApplicableBasicAuth extends ApplicableHttpAuth<BasicAuth> {
private final String basicAuth;
private final CryptoService cryptoService;
public ApplicableBasicAuth(BasicAuth auth, CryptoService service) {
super(auth);
basicAuth = headerValue(auth.username, auth.password.text(service));
this.cryptoService = service;
}
public static String headerValue(String username, char[] password) {
return "Basic " + Base64.getEncoder().encodeToString((username + ":" + new String(password)).getBytes(StandardCharsets.UTF_8));
}
public void apply(HttpURLConnection connection) {
connection.setRequestProperty("Authorization", basicAuth);
}
@Override
public void apply(CredentialsProvider credsProvider, AuthScope authScope) {
credsProvider.setCredentials(authScope,
new UsernamePasswordCredentials(auth.username, new String(auth.password.text(cryptoService))));
}
}

View File

@ -1,35 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.common.http.auth.basic;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.core.watcher.crypto.CryptoService;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthFactory;
import java.io.IOException;
public class BasicAuthFactory extends HttpAuthFactory<BasicAuth, ApplicableBasicAuth> {
private final CryptoService cryptoService;
public BasicAuthFactory(@Nullable CryptoService cryptoService) {
this.cryptoService = cryptoService;
}
public String type() {
return BasicAuth.TYPE;
}
public BasicAuth parse(XContentParser parser) throws IOException {
return BasicAuth.parse(parser);
}
@Override
public ApplicableBasicAuth createApplicable(BasicAuth auth) {
return new ApplicableBasicAuth(auth, cryptoService);
}
}

View File

@ -70,7 +70,7 @@ public class HttpInput implements Input {
return builder; return builder;
} }
public static HttpInput parse(String watchId, XContentParser parser, HttpRequestTemplate.Parser requestParser) throws IOException { public static HttpInput parse(String watchId, XContentParser parser) throws IOException {
Set<String> extract = null; Set<String> extract = null;
HttpRequestTemplate request = null; HttpRequestTemplate request = null;
HttpContentType expectedResponseBodyType = null; HttpContentType expectedResponseBodyType = null;
@ -82,7 +82,7 @@ public class HttpInput implements Input {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (Field.REQUEST.match(currentFieldName, parser.getDeprecationHandler())) { } else if (Field.REQUEST.match(currentFieldName, parser.getDeprecationHandler())) {
try { try {
request = requestParser.parse(parser); request = HttpRequestTemplate.Parser.parse(parser);
} catch (ElasticsearchParseException pe) { } catch (ElasticsearchParseException pe) {
throw new ElasticsearchParseException("could not parse [{}] input for watch [{}]. failed to parse http request " + throw new ElasticsearchParseException("could not parse [{}] input for watch [{}]. failed to parse http request " +
"template", pe, TYPE, watchId); "template", pe, TYPE, watchId);

View File

@ -9,7 +9,6 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.watcher.common.http.HttpClient; import org.elasticsearch.xpack.watcher.common.http.HttpClient;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.input.InputFactory; import org.elasticsearch.xpack.watcher.input.InputFactory;
@ -19,14 +18,11 @@ public final class HttpInputFactory extends InputFactory<HttpInput, HttpInput.Re
private final HttpClient httpClient; private final HttpClient httpClient;
private final TextTemplateEngine templateEngine; private final TextTemplateEngine templateEngine;
private final HttpRequestTemplate.Parser requestTemplateParser;
public HttpInputFactory(Settings settings, HttpClient httpClient, TextTemplateEngine templateEngine, public HttpInputFactory(Settings settings, HttpClient httpClient, TextTemplateEngine templateEngine) {
HttpRequestTemplate.Parser requestTemplateParser) {
super(Loggers.getLogger(ExecutableHttpInput.class, settings)); super(Loggers.getLogger(ExecutableHttpInput.class, settings));
this.templateEngine = templateEngine; this.templateEngine = templateEngine;
this.httpClient = httpClient; this.httpClient = httpClient;
this.requestTemplateParser = requestTemplateParser;
} }
@Override @Override
@ -36,7 +32,7 @@ public final class HttpInputFactory extends InputFactory<HttpInput, HttpInput.Re
@Override @Override
public HttpInput parseInput(String watchId, XContentParser parser) throws IOException { public HttpInput parseInput(String watchId, XContentParser parser) throws IOException {
return HttpInput.parse(watchId, parser, requestTemplateParser); return HttpInput.parse(watchId, parser);
} }
@Override @Override

View File

@ -34,13 +34,10 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpReq
public static final String TYPE = "http"; public static final String TYPE = "http";
private final HttpClient httpClient; private final HttpClient httpClient;
private HttpRequestTemplate.Parser requestTemplateParser;
private final TextTemplateEngine templateEngine; private final TextTemplateEngine templateEngine;
public HttpEmailAttachementParser(HttpClient httpClient, HttpRequestTemplate.Parser requestTemplateParser, public HttpEmailAttachementParser(HttpClient httpClient, TextTemplateEngine templateEngine) {
TextTemplateEngine templateEngine) {
this.httpClient = httpClient; this.httpClient = httpClient;
this.requestTemplateParser = requestTemplateParser;
this.templateEngine = templateEngine; this.templateEngine = templateEngine;
} }
@ -65,7 +62,7 @@ public class HttpEmailAttachementParser implements EmailAttachmentParser<HttpReq
} else if (Fields.INLINE.match(currentFieldName, parser.getDeprecationHandler())) { } else if (Fields.INLINE.match(currentFieldName, parser.getDeprecationHandler())) {
inline = parser.booleanValue(); inline = parser.booleanValue();
} else if (Fields.REQUEST.match(currentFieldName, parser.getDeprecationHandler())) { } else if (Fields.REQUEST.match(currentFieldName, parser.getDeprecationHandler())) {
requestTemplate = requestTemplateParser.parse(parser); requestTemplate = HttpRequestTemplate.Parser.parse(parser);
} else { } else {
String msg = "Unknown field name [" + currentFieldName + "] in http request attachment configuration"; String msg = "Unknown field name [" + currentFieldName + "] in http request attachment configuration";
throw new ElasticsearchParseException(msg); throw new ElasticsearchParseException(msg);

View File

@ -10,7 +10,7 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.xpack.watcher.common.http.HttpProxy; import org.elasticsearch.xpack.watcher.common.http.HttpProxy;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuth; import org.elasticsearch.xpack.watcher.common.http.BasicAuth;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -26,14 +26,14 @@ public class ReportingAttachment implements EmailAttachmentParser.EmailAttachmen
private final boolean inline; private final boolean inline;
private final String id; private final String id;
private final HttpAuth auth; private final BasicAuth auth;
private final String url; private final String url;
private final TimeValue interval; private final TimeValue interval;
private final Integer retries; private final Integer retries;
private final HttpProxy proxy; private final HttpProxy proxy;
ReportingAttachment(String id, String url, boolean inline, @Nullable TimeValue interval, @Nullable Integer retries, ReportingAttachment(String id, String url, boolean inline, @Nullable TimeValue interval, @Nullable Integer retries,
@Nullable HttpAuth auth, @Nullable HttpProxy proxy) { @Nullable BasicAuth auth, @Nullable HttpProxy proxy) {
this.id = id; this.id = id;
this.url = url; this.url = url;
this.retries = retries; this.retries = retries;
@ -61,7 +61,7 @@ public class ReportingAttachment implements EmailAttachmentParser.EmailAttachmen
return inline; return inline;
} }
public HttpAuth auth() { public BasicAuth auth() {
return auth; return auth;
} }
@ -100,7 +100,7 @@ public class ReportingAttachment implements EmailAttachmentParser.EmailAttachmen
if (auth != null) { if (auth != null) {
builder.startObject(AUTH.getPreferredName()); builder.startObject(AUTH.getPreferredName());
builder.field(auth.type(), auth, params); builder.field(BasicAuth.TYPE, auth, params);
builder.endObject(); builder.endObject();
} }

View File

@ -28,8 +28,7 @@ import org.elasticsearch.xpack.watcher.common.http.HttpProxy;
import org.elasticsearch.xpack.watcher.common.http.HttpRequest; import org.elasticsearch.xpack.watcher.common.http.HttpRequest;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuth; import org.elasticsearch.xpack.watcher.common.http.BasicAuth;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.notification.email.Attachment; import org.elasticsearch.xpack.watcher.notification.email.Attachment;
@ -69,15 +68,12 @@ public class ReportingAttachmentParser implements EmailAttachmentParser<Reportin
private final int retries; private final int retries;
private HttpClient httpClient; private HttpClient httpClient;
private final TextTemplateEngine templateEngine; private final TextTemplateEngine templateEngine;
private HttpAuthRegistry authRegistry;
public ReportingAttachmentParser(Settings settings, HttpClient httpClient, public ReportingAttachmentParser(Settings settings, HttpClient httpClient, TextTemplateEngine templateEngine) {
TextTemplateEngine templateEngine, HttpAuthRegistry authRegistry) {
this.interval = INTERVAL_SETTING.get(settings); this.interval = INTERVAL_SETTING.get(settings);
this.retries = RETRIES_SETTING.get(settings); this.retries = RETRIES_SETTING.get(settings);
this.httpClient = httpClient; this.httpClient = httpClient;
this.templateEngine = templateEngine; this.templateEngine = templateEngine;
this.authRegistry = authRegistry;
this.logger = Loggers.getLogger(getClass()); this.logger = Loggers.getLogger(getClass());
} }
@ -89,7 +85,7 @@ public class ReportingAttachmentParser implements EmailAttachmentParser<Reportin
@Override @Override
public ReportingAttachment parse(String id, XContentParser parser) throws IOException { public ReportingAttachment parse(String id, XContentParser parser) throws IOException {
Builder builder = new Builder(id); Builder builder = new Builder(id);
PARSER.parse(parser, builder, new AuthParseContext(authRegistry)); PARSER.parse(parser, builder, new AuthParseContext());
return builder.build(); return builder.build();
} }
@ -222,15 +218,9 @@ public class ReportingAttachmentParser implements EmailAttachmentParser<Reportin
*/ */
private static class AuthParseContext { private static class AuthParseContext {
private final HttpAuthRegistry authRegistry; BasicAuth parseAuth(XContentParser parser) {
AuthParseContext(HttpAuthRegistry authRegistry) {
this.authRegistry = authRegistry;
}
HttpAuth parseAuth(XContentParser parser) {
try { try {
return authRegistry.parse(parser); return BasicAuth.parse(parser);
} catch (IOException e) { } catch (IOException e) {
throw new UncheckedIOException(e); throw new UncheckedIOException(e);
} }
@ -273,7 +263,7 @@ public class ReportingAttachmentParser implements EmailAttachmentParser<Reportin
private String url; private String url;
private TimeValue interval; private TimeValue interval;
private Integer retries; private Integer retries;
private HttpAuth auth; private BasicAuth auth;
private HttpProxy proxy; private HttpProxy proxy;
Builder(String id) { Builder(String id) {
@ -301,7 +291,7 @@ public class ReportingAttachmentParser implements EmailAttachmentParser<Reportin
return this; return this;
} }
Builder auth(HttpAuth auth) { Builder auth(BasicAuth auth) {
this.auth = auth; this.auth = auth;
return this; return this;
} }

View File

@ -13,14 +13,12 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
public class HipChatMessage implements ToXContentObject { public class HipChatMessage implements ToXContentObject {
@ -181,26 +179,6 @@ public class HipChatMessage implements ToXContentObject {
return Objects.hash(body, rooms, users, from, format, color, notify); return Objects.hash(body, rooms, users, from, format, color, notify);
} }
public HipChatMessage render(TextTemplateEngine engine, Map<String, Object> model) {
String body = engine.render(this.body, model);
String[] rooms = null;
if (this.rooms != null) {
rooms = new String[this.rooms.length];
for (int i = 0; i < this.rooms.length; i++) {
rooms[i] = engine.render(this.rooms[i], model);
}
}
String[] users = null;
if (this.users != null) {
users = new String[this.users.length];
for (int i = 0; i < this.users.length; i++) {
users[i] = engine.render(this.users[i], model);
}
}
Color color = this.color == null ? null : Color.resolve(engine.render(this.color, model), null);
return new HipChatMessage(body, rooms, users, from, format, color, notify);
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(); builder.startObject();

View File

@ -26,7 +26,7 @@ import org.elasticsearch.xpack.watcher.common.http.HttpProxy;
import org.elasticsearch.xpack.watcher.common.http.HttpRequest; import org.elasticsearch.xpack.watcher.common.http.HttpRequest;
import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.common.http.Scheme; import org.elasticsearch.xpack.watcher.common.http.Scheme;
import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth; import org.elasticsearch.xpack.watcher.common.http.BasicAuth;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;

View File

@ -24,22 +24,22 @@ import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
/** /**
* Official documentation for this can be found at * Official documentation for this can be found at
* *
* https://developer.pagerduty.com/documentation/howto/manually-trigger-an-incident/ * https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2
* https://developer.pagerduty.com/documentation/integration/events/trigger
* https://developer.pagerduty.com/documentation/integration/events/acknowledge
* https://developer.pagerduty.com/documentation/integration/events/resolve
*/ */
public class IncidentEvent implements ToXContentObject { public class IncidentEvent implements ToXContentObject {
static final String HOST = "events.pagerduty.com"; static final String HOST = "events.pagerduty.com";
static final String PATH = "/generic/2010-04-15/create_event.json"; static final String PATH = "/v2/enqueue";
static final String ACCEPT_HEADER = "application/vnd.pagerduty+json;version=2";
final String description; final String description;
@Nullable final HttpProxy proxy; @Nullable final HttpProxy proxy;
@ -93,46 +93,81 @@ public class IncidentEvent implements ToXContentObject {
return result; return result;
} }
public HttpRequest createRequest(final String serviceKey, final Payload payload) throws IOException { HttpRequest createRequest(final String serviceKey, final Payload payload, final String watchId) throws IOException {
return HttpRequest.builder(HOST, -1) return HttpRequest.builder(HOST, -1)
.method(HttpMethod.POST) .method(HttpMethod.POST)
.scheme(Scheme.HTTPS) .scheme(Scheme.HTTPS)
.path(PATH) .path(PATH)
.proxy(proxy) .proxy(proxy)
.jsonBody(new ToXContent() { .setHeader("Accept", ACCEPT_HEADER)
@Override .jsonBody((b, p) -> buildAPIXContent(b, p, serviceKey, payload, watchId))
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(Fields.SERVICE_KEY.getPreferredName(), serviceKey);
builder.field(Fields.EVENT_TYPE.getPreferredName(), eventType);
builder.field(Fields.DESCRIPTION.getPreferredName(), description);
if (incidentKey != null) {
builder.field(Fields.INCIDENT_KEY.getPreferredName(), incidentKey);
}
if (client != null) {
builder.field(Fields.CLIENT.getPreferredName(), client);
}
if (clientUrl != null) {
builder.field(Fields.CLIENT_URL.getPreferredName(), clientUrl);
}
if (attachPayload) {
builder.startObject(Fields.DETAILS.getPreferredName());
builder.field(Fields.PAYLOAD.getPreferredName());
payload.toXContent(builder, params);
builder.endObject();
}
if (contexts != null && contexts.length > 0) {
builder.startArray(Fields.CONTEXTS.getPreferredName());
for (IncidentEventContext context : contexts) {
context.toXContent(builder, params);
}
builder.endArray();
}
return builder;
}
})
.build(); .build();
} }
XContentBuilder buildAPIXContent(XContentBuilder builder, Params params, String serviceKey,
Payload payload, String watchId) throws IOException {
builder.field(Fields.ROUTING_KEY.getPreferredName(), serviceKey);
builder.field(Fields.EVENT_ACTION.getPreferredName(), eventType);
if (incidentKey != null) {
builder.field(Fields.DEDUP_KEY.getPreferredName(), incidentKey);
}
builder.startObject(Fields.PAYLOAD.getPreferredName());
{
builder.field(Fields.SUMMARY.getPreferredName(), description);
if (attachPayload && payload != null) {
builder.startObject(Fields.CUSTOM_DETAILS.getPreferredName());
{
builder.field(Fields.PAYLOAD.getPreferredName(), payload, params);
}
builder.endObject();
}
if (watchId != null) {
builder.field(Fields.SOURCE.getPreferredName(), watchId);
} else {
builder.field(Fields.SOURCE.getPreferredName(), "watcher");
}
// TODO externalize this into something user editable
builder.field(Fields.SEVERITY.getPreferredName(), "critical");
}
builder.endObject();
if (client != null) {
builder.field(Fields.CLIENT.getPreferredName(), client);
}
if (clientUrl != null) {
builder.field(Fields.CLIENT_URL.getPreferredName(), clientUrl);
}
if (contexts != null && contexts.length > 0) {
toXContentV2Contexts(builder, params, contexts);
}
return builder;
}
/**
* Turns the V1 API contexts into 2 distinct lists, images and links. The V2 API has separated these out into 2 top level fields.
*/
private void toXContentV2Contexts(XContentBuilder builder, ToXContent.Params params,
IncidentEventContext[] contexts) throws IOException {
// contexts can be either links or images, and the v2 api needs them separate
Map<IncidentEventContext.Type, List<IncidentEventContext>> groups = Arrays.stream(contexts)
.collect(Collectors.groupingBy(iec -> iec.type));
List<IncidentEventContext> links = groups.getOrDefault(IncidentEventContext.Type.LINK, Collections.emptyList());
if (links.isEmpty() == false) {
builder.array(Fields.LINKS.getPreferredName(), links.toArray());
}
List<IncidentEventContext> images = groups.getOrDefault(IncidentEventContext.Type.IMAGE, Collections.emptyList());
if (images.isEmpty() == false) {
builder.array(Fields.IMAGES.getPreferredName(), images.toArray());
}
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(); builder.startObject();
@ -445,8 +480,15 @@ public class IncidentEvent implements ToXContentObject {
// we need to keep this for BWC // we need to keep this for BWC
ParseField CONTEXT_DEPRECATED = new ParseField("context"); ParseField CONTEXT_DEPRECATED = new ParseField("context");
ParseField SERVICE_KEY = new ParseField("service_key");
ParseField PAYLOAD = new ParseField("payload"); ParseField PAYLOAD = new ParseField("payload");
ParseField DETAILS = new ParseField("details"); ParseField ROUTING_KEY = new ParseField("routing_key");
ParseField EVENT_ACTION = new ParseField("event_action");
ParseField DEDUP_KEY = new ParseField("dedup_key");
ParseField SUMMARY = new ParseField("summary");
ParseField SOURCE = new ParseField("source");
ParseField SEVERITY = new ParseField("severity");
ParseField LINKS = new ParseField("links");
ParseField IMAGES = new ParseField("images");
ParseField CUSTOM_DETAILS = new ParseField("custom_details");
} }
} }

View File

@ -92,6 +92,85 @@ public class IncidentEventContext implements ToXContentObject {
return builder.endObject(); return builder.endObject();
} }
public static IncidentEventContext parse(XContentParser parser) throws IOException {
Type type = null;
String href = null;
String text = null;
String src = null;
String alt = null;
String currentFieldName = null;
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (Strings.hasLength(currentFieldName)) {
if (XField.TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
try {
type = Type.valueOf(parser.text().toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
String msg = "could not parse trigger incident event context. unknown context type [{}]";
throw new ElasticsearchParseException(msg, parser.text());
}
} else {
if (XField.HREF.match(currentFieldName, parser.getDeprecationHandler())) {
href = parser.text();
} else if (XField.TEXT.match(currentFieldName, parser.getDeprecationHandler())) {
text = parser.text();
} else if (XField.SRC.match(currentFieldName, parser.getDeprecationHandler())) {
src = parser.text();
} else if (XField.ALT.match(currentFieldName, parser.getDeprecationHandler())) {
alt = parser.text();
} else {
String msg = "could not parse trigger incident event context. unknown field [{}]";
throw new ElasticsearchParseException(msg, currentFieldName);
}
}
}
}
return createAndValidateTemplate(type, href, src, alt, text);
}
private static IncidentEventContext createAndValidateTemplate(Type type, String href, String src, String alt,
String text) {
if (type == null) {
throw new ElasticsearchParseException("could not parse trigger incident event context. missing required field [{}]",
XField.TYPE.getPreferredName());
}
switch (type) {
case LINK:
if (href == null) {
throw new ElasticsearchParseException("could not parse trigger incident event context. missing required field " +
"[{}] for [{}] context", XField.HREF.getPreferredName(), Type.LINK.name().toLowerCase(Locale.ROOT));
}
if (src != null) {
throw new ElasticsearchParseException("could not parse trigger incident event context. unexpected field [{}] for " +
"[{}] context", XField.SRC.getPreferredName(), Type.LINK.name().toLowerCase(Locale.ROOT));
}
if (alt != null) {
throw new ElasticsearchParseException("could not parse trigger incident event context. unexpected field [{}] for " +
"[{}] context", XField.ALT.getPreferredName(), Type.LINK.name().toLowerCase(Locale.ROOT));
}
return link(href, text);
case IMAGE:
if (src == null) {
throw new ElasticsearchParseException("could not parse trigger incident event context. missing required field " +
"[{}] for [{}] context", XField.SRC.getPreferredName(), Type.IMAGE.name().toLowerCase(Locale.ROOT));
}
if (text != null) {
throw new ElasticsearchParseException("could not parse trigger incident event context. unexpected field [{}] for " +
"[{}] context", XField.TEXT.getPreferredName(), Type.IMAGE.name().toLowerCase(Locale.ROOT));
}
return image(src, href, alt);
default:
throw new ElasticsearchParseException("could not parse trigger incident event context. unknown context type [{}]",
type);
}
}
public static class Template implements ToXContentObject { public static class Template implements ToXContentObject {
final Type type; final Type type;

View File

@ -48,8 +48,8 @@ public class PagerDutyAccount {
return eventDefaults; return eventDefaults;
} }
public SentEvent send(IncidentEvent event, Payload payload) throws IOException { public SentEvent send(IncidentEvent event, Payload payload, String watchId) throws IOException {
HttpRequest request = event.createRequest(serviceKey, payload); HttpRequest request = event.createRequest(serviceKey, payload, watchId);
HttpResponse response = httpClient.execute(request); HttpResponse response = httpClient.execute(request);
return SentEvent.responded(event, request, response); return SentEvent.responded(event, request, response);
} }

View File

@ -27,8 +27,6 @@ import org.elasticsearch.xpack.watcher.common.http.HttpClient;
import org.elasticsearch.xpack.watcher.common.http.HttpRequest; import org.elasticsearch.xpack.watcher.common.http.HttpRequest;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuthFactory;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.notification.email.Attachment; import org.elasticsearch.xpack.watcher.notification.email.Attachment;
@ -79,7 +77,6 @@ import static org.mockito.Mockito.when;
public class EmailActionTests extends ESTestCase { public class EmailActionTests extends ESTestCase {
private HttpAuthRegistry registry = new HttpAuthRegistry(singletonMap("basic", new BasicAuthFactory(null)));
private HttpClient httpClient = mock(HttpClient.class); private HttpClient httpClient = mock(HttpClient.class);
private EmailAttachmentsParser emailAttachmentParser; private EmailAttachmentsParser emailAttachmentParser;
@ -87,7 +84,7 @@ public class EmailActionTests extends ESTestCase {
public void addEmailAttachmentParsers() { public void addEmailAttachmentParsers() {
Map<String, EmailAttachmentParser> emailAttachmentParsers = new HashMap<>(); Map<String, EmailAttachmentParser> emailAttachmentParsers = new HashMap<>();
emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient,
new HttpRequestTemplate.Parser(registry), new MockTextTemplateEngine())); new MockTextTemplateEngine()));
emailAttachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser()); emailAttachmentParsers.put(DataAttachmentParser.TYPE, new DataAttachmentParser());
emailAttachmentParser = new EmailAttachmentsParser(emailAttachmentParsers); emailAttachmentParser = new EmailAttachmentsParser(emailAttachmentParsers);
} }
@ -511,10 +508,8 @@ public class EmailActionTests extends ESTestCase {
.thenReturn(new HttpResponse(403)); .thenReturn(new HttpResponse(403));
// setup email attachment parsers // setup email attachment parsers
HttpRequestTemplate.Parser httpRequestTemplateParser = new HttpRequestTemplate.Parser(registry);
Map<String, EmailAttachmentParser> attachmentParsers = new HashMap<>(); Map<String, EmailAttachmentParser> attachmentParsers = new HashMap<>();
attachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, httpRequestTemplateParser, attachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, engine));
engine));
EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(attachmentParsers); EmailAttachmentsParser emailAttachmentsParser = new EmailAttachmentsParser(attachmentParsers);
XContentBuilder builder = jsonBuilder().startObject() XContentBuilder builder = jsonBuilder().startObject()

View File

@ -17,8 +17,7 @@ import org.elasticsearch.xpack.watcher.common.http.HttpClient;
import org.elasticsearch.xpack.watcher.common.http.HttpProxy; import org.elasticsearch.xpack.watcher.common.http.HttpProxy;
import org.elasticsearch.xpack.watcher.common.http.HttpRequest; import org.elasticsearch.xpack.watcher.common.http.HttpRequest;
import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuth; import org.elasticsearch.xpack.watcher.common.http.BasicAuth;
import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuth;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.notification.jira.JiraAccount; import org.elasticsearch.xpack.watcher.notification.jira.JiraAccount;
@ -93,11 +92,9 @@ public class ExecutableJiraActionTests extends ESTestCase {
assertThat(request.port(), is(port)); assertThat(request.port(), is(port));
assertThat(request.path(), is(JiraAccount.DEFAULT_PATH)); assertThat(request.path(), is(JiraAccount.DEFAULT_PATH));
HttpAuth httpAuth = request.auth(); BasicAuth httpAuth = request.auth();
assertThat(httpAuth.type(), is("basic"));
BasicAuth basicAuth = (BasicAuth) httpAuth; assertThat(httpAuth.getUsername(), is(user));
assertThat(basicAuth.getUsername(), is(user));
} }
public void testExecutionWithNoDefaults() throws Exception { public void testExecutionWithNoDefaults() throws Exception {

View File

@ -111,7 +111,7 @@ public class PagerDutyActionTests extends ESTestCase {
when(response.status()).thenReturn(200); when(response.status()).thenReturn(200);
HttpRequest request = mock(HttpRequest.class); HttpRequest request = mock(HttpRequest.class);
SentEvent sentEvent = SentEvent.responded(event, request, response); SentEvent sentEvent = SentEvent.responded(event, request, response);
when(account.send(event, payload)).thenReturn(sentEvent); when(account.send(event, payload, wid.watchId())).thenReturn(sentEvent);
when(service.getAccount(accountName)).thenReturn(account); when(service.getAccount(accountName)).thenReturn(account);
Action.Result result = executable.execute("_id", ctx, payload); Action.Result result = executable.execute("_id", ctx, payload);

View File

@ -28,8 +28,6 @@ import org.elasticsearch.xpack.watcher.common.http.HttpProxy;
import org.elasticsearch.xpack.watcher.common.http.HttpRequest; import org.elasticsearch.xpack.watcher.common.http.HttpRequest;
import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate; import org.elasticsearch.xpack.watcher.common.http.HttpRequestTemplate;
import org.elasticsearch.xpack.watcher.common.http.HttpResponse; import org.elasticsearch.xpack.watcher.common.http.HttpResponse;
import org.elasticsearch.xpack.watcher.common.http.auth.HttpAuthRegistry;
import org.elasticsearch.xpack.watcher.common.http.auth.basic.BasicAuthFactory;
import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplate;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.execution.TriggeredExecutionContext; import org.elasticsearch.xpack.watcher.execution.TriggeredExecutionContext;
@ -47,7 +45,6 @@ import javax.mail.internet.AddressException;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
@ -73,7 +70,6 @@ public class WebhookActionTests extends ESTestCase {
private static final String TEST_PATH_STRING = "/testPath"; private static final String TEST_PATH_STRING = "/testPath";
private TextTemplateEngine templateEngine; private TextTemplateEngine templateEngine;
private HttpAuthRegistry authRegistry;
private TextTemplate testBody; private TextTemplate testBody;
private TextTemplate testPath; private TextTemplate testPath;
@ -82,7 +78,6 @@ public class WebhookActionTests extends ESTestCase {
templateEngine = new MockTextTemplateEngine(); templateEngine = new MockTextTemplateEngine();
testBody = new TextTemplate(TEST_BODY_STRING); testBody = new TextTemplate(TEST_BODY_STRING);
testPath = new TextTemplate(TEST_PATH_STRING); testPath = new TextTemplate(TEST_PATH_STRING);
authRegistry = new HttpAuthRegistry(singletonMap("basic", new BasicAuthFactory(null)));
} }
public void testExecute() throws Exception { public void testExecute() throws Exception {
@ -213,14 +208,14 @@ public class WebhookActionTests extends ESTestCase {
} }
private WebhookActionFactory webhookFactory(HttpClient client) { private WebhookActionFactory webhookFactory(HttpClient client) {
return new WebhookActionFactory(Settings.EMPTY, client, new HttpRequestTemplate.Parser(authRegistry), templateEngine); return new WebhookActionFactory(Settings.EMPTY, client, templateEngine);
} }
public void testThatSelectingProxyWorks() throws Exception { public void testThatSelectingProxyWorks() throws Exception {
Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); Environment environment = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build());
try (HttpClient httpClient = new HttpClient(Settings.EMPTY, authRegistry, try (HttpClient httpClient = new HttpClient(Settings.EMPTY, new SSLService(environment.settings(), environment), null);
new SSLService(environment.settings(), environment)); MockWebServer proxyServer = new MockWebServer()) { MockWebServer proxyServer = new MockWebServer()) {
proxyServer.start(); proxyServer.start();
proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent")); proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent"));

Some files were not shown because too many files have changed in this diff Show More