diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java index e9cde26e6c8..80d862acd07 100644 --- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java +++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java @@ -19,13 +19,13 @@ package org.elasticsearch.client.benchmark.ops.bulk; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.client.benchmark.BenchmarkTask; import org.elasticsearch.client.benchmark.metrics.Sample; import org.elasticsearch.client.benchmark.metrics.SampleRecorder; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.io.BufferedReader; import java.io.IOException; @@ -135,7 +135,7 @@ public class BulkBenchmarkTask implements BenchmarkTask { private static final class BulkIndexer implements Runnable { - private static final Logger logger = ESLoggerFactory.getLogger(BulkIndexer.class.getName()); + private static final Logger logger = LogManager.getLogger(BulkIndexer.class); private final BlockingQueue> bulkData; private final int warmupIterations; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java new file mode 100644 index 00000000000..10c0d0911ba --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpression.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl; + +import org.elasticsearch.common.xcontent.ToXContentObject; + +/** + * Implementations of this interface represent an expression used for user role mapping + * that can later be resolved to a boolean value. + */ +public interface RoleMapperExpression extends ToXContentObject { + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java new file mode 100644 index 00000000000..b5cbe4d2e42 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AllRoleMapperExpression.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; + +import java.util.ArrayList; +import java.util.List; + +/** + * An expression that evaluates to true if-and-only-if all its children + * evaluate to true. + * An all expression with no children is always true. + */ +public final class AllRoleMapperExpression extends CompositeRoleMapperExpression { + + private AllRoleMapperExpression(String name, RoleMapperExpression[] elements) { + super(name, elements); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private List elements = new ArrayList<>(); + + public Builder addExpression(final RoleMapperExpression expression) { + assert expression != null : "expression cannot be null"; + elements.add(expression); + return this; + } + + public AllRoleMapperExpression build() { + return new AllRoleMapperExpression(CompositeType.ALL.getName(), elements.toArray(new RoleMapperExpression[0])); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java new file mode 100644 index 00000000000..7632a071bd1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/AnyRoleMapperExpression.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; + +import java.util.ArrayList; +import java.util.List; + +/** + * An expression that evaluates to true if at least one of its children + * evaluate to true. + * An any expression with no children is never true. + */ +public final class AnyRoleMapperExpression extends CompositeRoleMapperExpression { + + private AnyRoleMapperExpression(String name, RoleMapperExpression[] elements) { + super(name, elements); + } + + public static Builder builder() { + return new Builder(); + } + + public static final class Builder { + private List elements = new ArrayList<>(); + + public Builder addExpression(final RoleMapperExpression expression) { + assert expression != null : "expression cannot be null"; + elements.add(expression); + return this; + } + + public AnyRoleMapperExpression build() { + return new AnyRoleMapperExpression(CompositeType.ANY.getName(), elements.toArray(new RoleMapperExpression[0])); + } + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java new file mode 100644 index 00000000000..2519c59b688 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeRoleMapperExpression.java @@ -0,0 +1,100 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Expression of role mapper expressions which can be combined by operators like AND, OR + *

+ * Expression builder example: + *

+ * {@code
+ * final RoleMapperExpression allExpression = AllRoleMapperExpression.builder()
+                    .addExpression(AnyRoleMapperExpression.builder()
+                            .addExpression(FieldRoleMapperExpression.ofUsername("user1@example.org"))
+                            .addExpression(FieldRoleMapperExpression.ofUsername("user2@example.org"))
+                            .build())
+                    .addExpression(FieldRoleMapperExpression.ofMetadata("metadata.location", "AMER"))
+                    .addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofUsername("user3@example.org")))
+                    .build();
+ * }
+ * 
+ */ +public abstract class CompositeRoleMapperExpression implements RoleMapperExpression { + private final String name; + private final List elements; + + CompositeRoleMapperExpression(final String name, final RoleMapperExpression... elements) { + assert name != null : "field name cannot be null"; + assert elements != null : "at least one field expression is required"; + this.name = name; + this.elements = Collections.unmodifiableList(Arrays.asList(elements)); + } + + public String getName() { + return this.getName(); + } + + public List getElements() { + return elements; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + final CompositeRoleMapperExpression that = (CompositeRoleMapperExpression) o; + if (Objects.equals(this.getName(), that.getName()) == false) { + return false; + } + return Objects.equals(this.getElements(), that.getElements()); + } + + @Override + public int hashCode() { + return Objects.hash(name, elements); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.startArray(name); + for (RoleMapperExpression e : elements) { + e.toXContent(builder, params); + } + builder.endArray(); + return builder.endObject(); + } + +} + diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java new file mode 100644 index 00000000000..1d6c8aea122 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/CompositeType.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.common.ParseField; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public enum CompositeType { + + ANY("any"), ALL("all"), EXCEPT("except"); + + private static Map nameToType = Collections.unmodifiableMap(initialize()); + private ParseField field; + + CompositeType(String name) { + this.field = new ParseField(name); + } + + public String getName() { + return field.getPreferredName(); + } + + public ParseField getParseField() { + return field; + } + + public static CompositeType fromName(String name) { + return nameToType.get(name); + } + + private static Map initialize() { + Map map = new HashMap<>(); + for (CompositeType field : values()) { + map.put(field.getName(), field); + } + return map; + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java new file mode 100644 index 00000000000..c2cad0d18da --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/expressions/ExceptRoleMapperExpression.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.expressions; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +/** + * A negating expression. That is, this expression evaluates to true if-and-only-if + * its delegate expression evaluate to false. + * Syntactically, except expressions are intended to be children of all + * expressions ({@link AllRoleMapperExpression}). + */ +public final class ExceptRoleMapperExpression extends CompositeRoleMapperExpression { + + public ExceptRoleMapperExpression(final RoleMapperExpression expression) { + super(CompositeType.EXCEPT.getName(), expression); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(); + builder.field(CompositeType.EXCEPT.getName()); + builder.value(getElements().get(0)); + return builder.endObject(); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java new file mode 100644 index 00000000000..c96ac3cc5b5 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/fields/FieldRoleMapperExpression.java @@ -0,0 +1,122 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.fields; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * An expression that evaluates to true if a field (map element) matches + * the provided values. A field expression may have more than one provided value, in which + * case the expression is true if any of the values are matched. + *

+ * Expression builder example: + *

+ * {@code
+ * final RoleMapperExpression usernameExpression = FieldRoleMapperExpression.ofUsername("user1@example.org");
+ * }
+ * 
+ */ +public class FieldRoleMapperExpression implements RoleMapperExpression { + + private final String field; + private final List values; + + public FieldRoleMapperExpression(final String field, final Object... values) { + if (field == null || field.isEmpty()) { + throw new IllegalArgumentException("null or empty field name (" + field + ")"); + } + if (values == null || values.length == 0) { + throw new IllegalArgumentException("null or empty values (" + values + ")"); + } + this.field = field; + this.values = Collections.unmodifiableList(Arrays.asList(values)); + } + + public String getField() { + return field; + } + + public List getValues() { + return values; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + final FieldRoleMapperExpression that = (FieldRoleMapperExpression) o; + + return Objects.equals(this.getField(), that.getField()) && Objects.equals(this.getValues(), that.getValues()); + } + + @Override + public int hashCode() { + int result = field.hashCode(); + result = 31 * result + values.hashCode(); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject("field"); + builder.startArray(this.field); + for (Object value : values) { + builder.value(value); + } + builder.endArray(); + builder.endObject(); + return builder.endObject(); + } + + public static FieldRoleMapperExpression ofUsername(Object... values) { + return ofKeyValues("username", values); + } + + public static FieldRoleMapperExpression ofGroups(Object... values) { + return ofKeyValues("groups", values); + } + + public static FieldRoleMapperExpression ofDN(Object... values) { + return ofKeyValues("dn", values); + } + + public static FieldRoleMapperExpression ofMetadata(String key, Object... values) { + if (key.startsWith("metadata.") == false) { + throw new IllegalArgumentException("metadata key must have prefix 'metadata.'"); + } + return ofKeyValues(key, values); + } + + public static FieldRoleMapperExpression ofKeyValues(String key, Object... values) { + return new FieldRoleMapperExpression(key, values); + } + +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java new file mode 100644 index 00000000000..98de4f4c209 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParser.java @@ -0,0 +1,180 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.parser; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeType; +import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +/** + * Parses the JSON (XContent) based boolean expression DSL into a tree of + * {@link RoleMapperExpression} objects. + * Note: As this is client side parser, it mostly validates the structure of + * DSL being parsed it does not enforce rules + * like allowing "except" within "except" or "any" expressions. + */ +public final class RoleMapperExpressionParser { + public static final ParseField FIELD = new ParseField("field"); + + /** + * @param name The name of the expression tree within its containing object. + * Used to provide descriptive error messages. + * @param parser A parser over the XContent (typically JSON) DSL + * representation of the expression + */ + public RoleMapperExpression parse(final String name, final XContentParser parser) throws IOException { + return parseRulesObject(name, parser); + } + + private RoleMapperExpression parseRulesObject(final String objectName, final XContentParser parser) + throws IOException { + // find the start of the DSL object + final XContentParser.Token token; + if (parser.currentToken() == null) { + token = parser.nextToken(); + } else { + token = parser.currentToken(); + } + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. expected [{}] to be an object but found [{}] instead", + objectName, token); + } + + final String fieldName = fieldName(objectName, parser); + final RoleMapperExpression expr = parseExpression(parser, fieldName, objectName); + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", objectName); + } + return expr; + } + + private RoleMapperExpression parseExpression(XContentParser parser, String field, String objectName) + throws IOException { + + if (CompositeType.ANY.getParseField().match(field, parser.getDeprecationHandler())) { + final AnyRoleMapperExpression.Builder builder = AnyRoleMapperExpression.builder(); + parseExpressionArray(CompositeType.ANY.getParseField(), parser).forEach(builder::addExpression); + return builder.build(); + } else if (CompositeType.ALL.getParseField().match(field, parser.getDeprecationHandler())) { + final AllRoleMapperExpression.Builder builder = AllRoleMapperExpression.builder(); + parseExpressionArray(CompositeType.ALL.getParseField(), parser).forEach(builder::addExpression); + return builder.build(); + } else if (FIELD.match(field, parser.getDeprecationHandler())) { + return parseFieldExpression(parser); + } else if (CompositeType.EXCEPT.getParseField().match(field, parser.getDeprecationHandler())) { + return parseExceptExpression(parser); + } else { + throw new ElasticsearchParseException("failed to parse rules expression. field [{}] is not recognised in object [{}]", field, + objectName); + } + } + + private RoleMapperExpression parseFieldExpression(XContentParser parser) throws IOException { + checkStartObject(parser); + final String fieldName = fieldName(FIELD.getPreferredName(), parser); + + final List values; + if (parser.nextToken() == XContentParser.Token.START_ARRAY) { + values = parseArray(FIELD, parser, this::parseFieldValue); + } else { + values = Collections.singletonList(parseFieldValue(parser)); + } + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", + FIELD.getPreferredName()); + } + + return FieldRoleMapperExpression.ofKeyValues(fieldName, values.toArray()); + } + + private RoleMapperExpression parseExceptExpression(XContentParser parser) throws IOException { + checkStartObject(parser); + return new ExceptRoleMapperExpression(parseRulesObject(CompositeType.EXCEPT.getName(), parser)); + } + + private void checkStartObject(XContentParser parser) throws IOException { + final XContentParser.Token token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("failed to parse rules expression. expected an object but found [{}] instead", token); + } + } + + private String fieldName(String objectName, XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { + throw new ElasticsearchParseException("failed to parse rules expression. object [{}] does not contain any fields", objectName); + } + String parsedFieldName = parser.currentName(); + return parsedFieldName; + } + + private List parseExpressionArray(ParseField field, XContentParser parser) + throws IOException { + parser.nextToken(); // parseArray requires that the parser is positioned + // at the START_ARRAY token + return parseArray(field, parser, p -> parseRulesObject(field.getPreferredName(), p)); + } + + private List parseArray(ParseField field, XContentParser parser, CheckedFunction elementParser) + throws IOException { + final XContentParser.Token token = parser.currentToken(); + if (token == XContentParser.Token.START_ARRAY) { + List list = new ArrayList<>(); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + list.add(elementParser.apply(parser)); + } + return list; + } else { + throw new ElasticsearchParseException("failed to parse rules expression. field [{}] requires an array", field); + } + } + + private Object parseFieldValue(XContentParser parser) throws IOException { + switch (parser.currentToken()) { + case VALUE_STRING: + return parser.text(); + + case VALUE_BOOLEAN: + return parser.booleanValue(); + + case VALUE_NUMBER: + return parser.longValue(); + + case VALUE_NULL: + return null; + + default: + throw new ElasticsearchParseException("failed to parse rules expression. expected a field value but found [{}] instead", parser + .currentToken()); + } + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java index ddaec641573..751f4cfdf0e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -147,7 +147,7 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase { @After public void deleteJob() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testGetCategories() throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 698c4c6bef1..a8050397ad1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -19,7 +19,6 @@ package org.elasticsearch.client; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; - import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.bulk.BulkRequest; @@ -93,7 +92,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testPutJob() throws Exception { @@ -426,7 +425,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertTrue(response.isAcknowledged()); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33966") public void testStartDatafeed() throws Exception { String jobId = "test-start-datafeed"; String indexName = "start_data_1"; @@ -437,18 +435,18 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT); BulkRequest bulk = new BulkRequest(); bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - long now = System.currentTimeMillis(); - long oneDayAgo = now - 86400000; + long now = (System.currentTimeMillis()/1000)*1000; + long thePast = now - 60000; int i = 0; - long dayAgoCopy = oneDayAgo; - while(dayAgoCopy < now) { + long pastCopy = thePast; + while(pastCopy < now) { IndexRequest doc = new IndexRequest(); doc.index(indexName); doc.type("doc"); doc.id("id" + i); - doc.source("{\"total\":" +randomInt(1000) + ",\"timestamp\":"+ dayAgoCopy +"}", XContentType.JSON); + doc.source("{\"total\":" +randomInt(1000) + ",\"timestamp\":"+ pastCopy +"}", XContentType.JSON); bulk.add(doc); - dayAgoCopy += 1000000; + pastCopy += 1000; i++; } highLevelClient().bulk(bulk, RequestOptions.DEFAULT); @@ -470,9 +468,9 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { StartDatafeedRequest startDatafeedRequest = new StartDatafeedRequest(datafeedId); - startDatafeedRequest.setStart(String.valueOf(oneDayAgo)); + startDatafeedRequest.setStart(String.valueOf(thePast)); // Should only process two documents - startDatafeedRequest.setEnd(String.valueOf(oneDayAgo + 2000000)); + startDatafeedRequest.setEnd(String.valueOf(thePast + 2000)); StartDatafeedResponse response = execute(startDatafeedRequest, machineLearningClient::startDatafeed, machineLearningClient::startDatafeedAsync); @@ -701,7 +699,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { .setDetectorDescription(randomAlphaOfLength(10)) .build(); AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Arrays.asList(detector)); - configBuilder.setBucketSpan(new TimeValue(randomIntBetween(1, 10), TimeUnit.SECONDS)); + //should not be random, see:https://github.com/elastic/ml-cpp/issues/208 + configBuilder.setBucketSpan(new TimeValue(5, TimeUnit.SECONDS)); builder.setAnalysisConfig(configBuilder); DataDescription.Builder dataDescription = new DataDescription.Builder(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java deleted file mode 100644 index 7ad86576245..00000000000 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlRestTestStateCleaner.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.client; - -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.test.rest.ESRestTestCase; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -/** - * This is temporarily duplicated from the server side. - * @TODO Replace with an implementation using the HLRC once - * the APIs for managing datafeeds are implemented. - */ -public class MlRestTestStateCleaner { - - private final Logger logger; - private final RestClient adminClient; - - public MlRestTestStateCleaner(Logger logger, RestClient adminClient) { - this.logger = logger; - this.adminClient = adminClient; - } - - public void clearMlMetadata() throws IOException { - deleteAllDatafeeds(); - deleteAllJobs(); - // indices will be deleted by the ESRestTestCase class - } - - @SuppressWarnings("unchecked") - private void deleteAllDatafeeds() throws IOException { - final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds"); - datafeedsRequest.addParameter("filter_path", "datafeeds"); - final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); - final List> datafeeds = - (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); - if (datafeeds == null) { - return; - } - - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop")); - } catch (Exception e1) { - logger.warn("failed to stop all datafeeds. Forcing stop", e1); - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")); - } catch (Exception e2) { - logger.warn("Force-closing all data feeds failed", e2); - } - throw new RuntimeException( - "Had to resort to force-stopping datafeeds, something went wrong?", e1); - } - - for (Map datafeed : datafeeds) { - String datafeedId = (String) datafeed.get("datafeed_id"); - adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId)); - } - } - - private void deleteAllJobs() throws IOException { - final Request jobsRequest = new Request("GET", "/_xpack/ml/anomaly_detectors"); - jobsRequest.addParameter("filter_path", "jobs"); - final Response response = adminClient.performRequest(jobsRequest); - @SuppressWarnings("unchecked") - final List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response)); - if (jobConfigs == null) { - return; - } - - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close")); - } catch (Exception e1) { - logger.warn("failed to close all jobs. Forcing closed", e1); - try { - adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true")); - } catch (Exception e2) { - logger.warn("Force-closing all jobs failed", e2); - } - throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", - e1); - } - - for (Map jobConfig : jobConfigs) { - String jobId = (String) jobConfig.get("job_id"); - adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId)); - } - } -} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java new file mode 100644 index 00000000000..c565af7c372 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MlTestStateCleaner.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.ml.CloseJobRequest; +import org.elasticsearch.client.ml.DeleteDatafeedRequest; +import org.elasticsearch.client.ml.DeleteJobRequest; +import org.elasticsearch.client.ml.GetDatafeedRequest; +import org.elasticsearch.client.ml.GetDatafeedResponse; +import org.elasticsearch.client.ml.GetJobRequest; +import org.elasticsearch.client.ml.GetJobResponse; +import org.elasticsearch.client.ml.StopDatafeedRequest; +import org.elasticsearch.client.ml.datafeed.DatafeedConfig; +import org.elasticsearch.client.ml.job.config.Job; + +import java.io.IOException; + +/** + * Cleans up and ML resources created during tests + */ +public class MlTestStateCleaner { + + private final Logger logger; + private final MachineLearningClient mlClient; + + public MlTestStateCleaner(Logger logger, MachineLearningClient mlClient) { + this.logger = logger; + this.mlClient = mlClient; + } + + public void clearMlMetadata() throws IOException { + deleteAllDatafeeds(); + deleteAllJobs(); + } + + private void deleteAllDatafeeds() throws IOException { + stopAllDatafeeds(); + + GetDatafeedResponse getDatafeedResponse = mlClient.getDatafeed(GetDatafeedRequest.getAllDatafeedsRequest(), RequestOptions.DEFAULT); + for (DatafeedConfig datafeed : getDatafeedResponse.datafeeds()) { + mlClient.deleteDatafeed(new DeleteDatafeedRequest(datafeed.getId()), RequestOptions.DEFAULT); + } + } + + private void stopAllDatafeeds() { + StopDatafeedRequest stopAllDatafeedsRequest = StopDatafeedRequest.stopAllDatafeedsRequest(); + try { + mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT); + } catch (Exception e1) { + logger.warn("failed to stop all datafeeds. Forcing stop", e1); + try { + stopAllDatafeedsRequest.setForce(true); + mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT); + } catch (Exception e2) { + logger.warn("Force-closing all data feeds failed", e2); + } + throw new RuntimeException("Had to resort to force-stopping datafeeds, something went wrong?", e1); + } + } + + private void deleteAllJobs() throws IOException { + closeAllJobs(); + + GetJobResponse getJobResponse = mlClient.getJob(GetJobRequest.getAllJobsRequest(), RequestOptions.DEFAULT); + for (Job job : getJobResponse.jobs()) { + mlClient.deleteJob(new DeleteJobRequest(job.getId()), RequestOptions.DEFAULT); + } + } + + private void closeAllJobs() { + CloseJobRequest closeAllJobsRequest = CloseJobRequest.closeAllJobsRequest(); + try { + mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT); + } catch (Exception e1) { + logger.warn("failed to close all jobs. Forcing closed", e1); + closeAllJobsRequest.setForce(true); + try { + mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT); + } catch (Exception e2) { + logger.warn("Force-closing all jobs failed", e2); + } + throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", e1); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 85612147b46..5dc4c88cbbc 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -114,6 +114,7 @@ import static org.hamcrest.Matchers.not; */ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unused") public void testIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -278,6 +279,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testUpdate() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -546,6 +548,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testDelete() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -665,6 +668,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testBulk() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -767,6 +771,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testReindex() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -905,6 +910,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testReindexRethrottle() throws Exception { RestHighLevelClient client = highLevelClient(); TaskId taskId = new TaskId("oTUltX4IQMOUUVeiohTt8A:124"); @@ -947,6 +953,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testUpdateByQuery() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -1066,6 +1073,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testDeleteByQuery() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -1173,6 +1181,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testGet() throws Exception { RestHighLevelClient client = highLevelClient(); { @@ -1487,6 +1496,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testMultiGet() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java index dedd50096f8..170bd435856 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java @@ -192,6 +192,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testClusterGetSettings() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -257,6 +258,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testClusterHealth() throws IOException { RestHighLevelClient client = highLevelClient(); client.indices().create(new CreateIndexRequest("index"), RequestOptions.DEFAULT); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 2da4d306c28..c66baf69d96 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -706,6 +706,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testGetFieldMapping() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); @@ -891,6 +892,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testRefreshIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -959,6 +961,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testFlushIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1035,6 +1038,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testSyncedFlushIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1308,6 +1312,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testForceMergeIndex() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1381,6 +1386,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testClearCache() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1527,6 +1533,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testExistsAlias() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1590,6 +1597,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testUpdateAliases() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1915,6 +1923,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testGetAlias() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -1985,6 +1994,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testIndexPutSettings() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -2315,6 +2325,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + @SuppressWarnings("unused") public void testValidateQuery() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index 4702c34c6de..00bee27807f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -143,6 +143,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testGetPipeline() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 1613b34693e..a9fbb56f68f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.MachineLearningGetResultsIT; import org.elasticsearch.client.MachineLearningIT; -import org.elasticsearch.client.MlRestTestStateCleaner; +import org.elasticsearch.client.MlTestStateCleaner; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ml.CloseJobRequest; @@ -126,7 +126,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { @After public void cleanUp() throws IOException { - new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); + new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata(); } public void testCreateJob() throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index d9d4f665f9d..4382924bb97 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -413,6 +413,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testSearchRequestHighlighting() throws IOException { RestHighLevelClient client = highLevelClient(); { @@ -831,6 +832,8 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + @SuppressWarnings("unused") public void testMultiSearchTemplateWithInlineScript() throws Exception { indexSearchTestData(); RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index d1aed55f44e..22ef30c92b7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -577,6 +577,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase } } + @SuppressWarnings("unused") public void testSnapshotGetSnapshots() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java index c5d53abd978..9165c5cf10d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java @@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.equalTo; */ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unused") public void testGetStoredScript() throws Exception { RestHighLevelClient client = highLevelClient(); @@ -128,6 +129,7 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase } + @SuppressWarnings("unused") public void testDeleteStoredScript() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java index 8a45195757c..38c8986e1d9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/TasksClientDocumentationIT.java @@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.notNullValue; */ public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unused") public void testListTasks() throws IOException { RestHighLevelClient client = highLevelClient(); { @@ -149,6 +150,7 @@ public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase { } } + @SuppressWarnings("unused") public void testCancelTasks() throws IOException { RestHighLevelClient client = highLevelClient(); { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java new file mode 100644 index 00000000000..df94640f172 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/RoleMapperExpressionDslTests.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl; + +import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Date; + +import static org.hamcrest.Matchers.equalTo; + +public class RoleMapperExpressionDslTests extends ESTestCase { + + public void testRoleMapperExpressionToXContentType() throws IOException { + + final RoleMapperExpression allExpression = AllRoleMapperExpression.builder() + .addExpression(AnyRoleMapperExpression.builder() + .addExpression(FieldRoleMapperExpression.ofDN("*,ou=admin,dc=example,dc=com")) + .addExpression(FieldRoleMapperExpression.ofUsername("es-admin", "es-system")) + .build()) + .addExpression(FieldRoleMapperExpression.ofGroups("cn=people,dc=example,dc=com")) + .addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofMetadata("metadata.terminated_date", new Date( + 1537145401027L)))) + .build(); + + final XContentBuilder builder = XContentFactory.jsonBuilder(); + allExpression.toXContent(builder, ToXContent.EMPTY_PARAMS); + final String output = Strings.toString(builder); + final String expected = + "{"+ + "\"all\":["+ + "{"+ + "\"any\":["+ + "{"+ + "\"field\":{"+ + "\"dn\":[\"*,ou=admin,dc=example,dc=com\"]"+ + "}"+ + "},"+ + "{"+ + "\"field\":{"+ + "\"username\":["+ + "\"es-admin\","+ + "\"es-system\""+ + "]"+ + "}"+ + "}"+ + "]"+ + "},"+ + "{"+ + "\"field\":{"+ + "\"groups\":[\"cn=people,dc=example,dc=com\"]"+ + "}"+ + "},"+ + "{"+ + "\"except\":{"+ + "\"field\":{"+ + "\"metadata.terminated_date\":[\"2018-09-17T00:50:01.027Z\"]"+ + "}"+ + "}"+ + "}"+ + "]"+ + "}"; + + assertThat(expected, equalTo(output)); + } + + public void testFieldRoleMapperExpressionThrowsExceptionForMissingMetadataPrefix() { + final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> FieldRoleMapperExpression.ofMetadata( + "terminated_date", new Date(1537145401027L))); + assertThat(ile.getMessage(), equalTo("metadata key must have prefix 'metadata.'")); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java new file mode 100644 index 00000000000..24ed5684fa8 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/support/expressiondsl/parser/RoleMapperExpressionParserTests.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security.support.expressiondsl.parser; + +import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeRoleMapperExpression; +import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.iterableWithSize; + +public class RoleMapperExpressionParserTests extends ESTestCase { + + public void testParseSimpleFieldExpression() throws Exception { + String json = "{ \"field\": { \"username\" : [\"*@shield.gov\"] } }"; + FieldRoleMapperExpression field = checkExpressionType(parse(json), FieldRoleMapperExpression.class); + assertThat(field.getField(), equalTo("username")); + assertThat(field.getValues(), iterableWithSize(1)); + assertThat(field.getValues().get(0), equalTo("*@shield.gov")); + + assertThat(toJson(field), equalTo(json.replaceAll("\\s", ""))); + } + + public void testParseComplexExpression() throws Exception { + String json = "{ \"any\": [" + + " { \"field\": { \"username\" : \"*@shield.gov\" } }, " + + " { \"all\": [" + + " { \"field\": { \"username\" : \"/.*\\\\@avengers\\\\.(net|org)/\" } }, " + + " { \"field\": { \"groups\" : [ \"admin\", \"operators\" ] } }, " + + " { \"except\":" + + " { \"field\": { \"groups\" : \"disavowed\" } }" + + " }" + + " ] }" + + "] }"; + final RoleMapperExpression expr = parse(json); + + assertThat(expr, instanceOf(CompositeRoleMapperExpression.class)); + CompositeRoleMapperExpression any = (CompositeRoleMapperExpression) expr; + + assertThat(any.getElements(), iterableWithSize(2)); + + final FieldRoleMapperExpression fieldShield = checkExpressionType(any.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldShield.getField(), equalTo("username")); + assertThat(fieldShield.getValues(), iterableWithSize(1)); + assertThat(fieldShield.getValues().get(0), equalTo("*@shield.gov")); + + final CompositeRoleMapperExpression all = checkExpressionType(any.getElements().get(1), + CompositeRoleMapperExpression.class); + assertThat(all.getElements(), iterableWithSize(3)); + + final FieldRoleMapperExpression fieldAvengers = checkExpressionType(all.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldAvengers.getField(), equalTo("username")); + assertThat(fieldAvengers.getValues(), iterableWithSize(1)); + assertThat(fieldAvengers.getValues().get(0), equalTo("/.*\\@avengers\\.(net|org)/")); + + final FieldRoleMapperExpression fieldGroupsAdmin = checkExpressionType(all.getElements().get(1), + FieldRoleMapperExpression.class); + assertThat(fieldGroupsAdmin.getField(), equalTo("groups")); + assertThat(fieldGroupsAdmin.getValues(), iterableWithSize(2)); + assertThat(fieldGroupsAdmin.getValues().get(0), equalTo("admin")); + assertThat(fieldGroupsAdmin.getValues().get(1), equalTo("operators")); + + final CompositeRoleMapperExpression except = checkExpressionType(all.getElements().get(2), + CompositeRoleMapperExpression.class); + final FieldRoleMapperExpression fieldDisavowed = checkExpressionType(except.getElements().get(0), + FieldRoleMapperExpression.class); + assertThat(fieldDisavowed.getField(), equalTo("groups")); + assertThat(fieldDisavowed.getValues(), iterableWithSize(1)); + assertThat(fieldDisavowed.getValues().get(0), equalTo("disavowed")); + + } + + private String toJson(final RoleMapperExpression expr) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder(); + expr.toXContent(builder, ToXContent.EMPTY_PARAMS); + final String output = Strings.toString(builder); + return output; + } + + private T checkExpressionType(RoleMapperExpression expr, Class type) { + assertThat(expr, instanceOf(type)); + return type.cast(expr); + } + + private RoleMapperExpression parse(String json) throws IOException { + return new RoleMapperExpressionParser().parse("rules", XContentType.JSON.xContent().createParser(new NamedXContentRegistry( + Collections.emptyList()), new DeprecationHandler() { + @Override + public void usedDeprecatedName(String usedName, String modernName) { + } + + @Override + public void usedDeprecatedField(String usedName, String replacedWith) { + } + }, json)); + } + +} diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index ffd3a1f6c0c..52b918e97f1 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.rest; import org.apache.http.util.EntityUtils; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Response; @@ -31,57 +30,53 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Locale; import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; /** * Tests that wait for refresh is fired if the index is closed. */ -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533") public class WaitForRefreshAndCloseIT extends ESRestTestCase { @Before public void setupIndex() throws IOException { - try { - client().performRequest(new Request("DELETE", indexName())); - } catch (ResponseException e) { - // If we get an error, it should be because the index doesn't exist - assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); - } - Request request = new Request("PUT", indexName()); + Request request = new Request("PUT", "/test"); request.setJsonEntity("{\"settings\":{\"refresh_interval\":-1}}"); client().performRequest(request); } @After public void cleanupIndex() throws IOException { - client().performRequest(new Request("DELETE", indexName())); - } - - private String indexName() { - return getTestName().toLowerCase(Locale.ROOT); + client().performRequest(new Request("DELETE", "/test")); } private String docPath() { - return indexName() + "/test/1"; + return "test/_doc/1"; } public void testIndexAndThenClose() throws Exception { - closeWhileListenerEngaged(start("PUT", "", "{\"test\":\"test\"}")); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + closeWhileListenerEngaged(start(request)); } public void testUpdateAndThenClose() throws Exception { - Request request = new Request("PUT", docPath()); - request.setJsonEntity("{\"test\":\"test\"}"); - client().performRequest(request); - closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); + Request createDoc = new Request("PUT", docPath()); + createDoc.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(createDoc); + Request updateDoc = new Request("POST", docPath() + "/_update"); + updateDoc.setJsonEntity("{\"doc\":{\"name\":\"test\"}}"); + closeWhileListenerEngaged(start(updateDoc)); } public void testDeleteAndThenClose() throws Exception { Request request = new Request("PUT", docPath()); request.setJsonEntity("{\"test\":\"test\"}"); client().performRequest(request); - closeWhileListenerEngaged(start("DELETE", "", null)); + closeWhileListenerEngaged(start(new Request("DELETE", docPath()))); } private void closeWhileListenerEngaged(ActionFuture future) throws Exception { @@ -89,40 +84,52 @@ public class WaitForRefreshAndCloseIT extends ESRestTestCase { assertBusy(() -> { Map stats; try { - stats = entityAsMap(client().performRequest(new Request("GET", indexName() + "/_stats/refresh"))); + stats = entityAsMap(client().performRequest(new Request("GET", "/test/_stats/refresh"))); } catch (IOException e) { throw new RuntimeException(e); } - @SuppressWarnings("unchecked") - Map indices = (Map) stats.get("indices"); - @SuppressWarnings("unchecked") - Map theIndex = (Map) indices.get(indexName()); - @SuppressWarnings("unchecked") - Map total = (Map) theIndex.get("total"); - @SuppressWarnings("unchecked") - Map refresh = (Map) total.get("refresh"); - int listeners = (int) refresh.get("listeners"); + Map indices = (Map) stats.get("indices"); + Map theIndex = (Map) indices.get("test"); + Map total = (Map) theIndex.get("total"); + Map refresh = (Map) total.get("refresh"); + int listeners = (Integer) refresh.get("listeners"); assertEquals(1, listeners); }); // Close the index. That should flush the listener. - client().performRequest(new Request("POST", indexName() + "/_close")); + client().performRequest(new Request("POST", "/test/_close")); - // The request shouldn't fail. It certainly shouldn't hang. - future.get(); + /* + * The request may fail, but we really, really, really want to make + * sure that it doesn't time out. + */ + try { + future.get(1, TimeUnit.MINUTES); + } catch (ExecutionException ee) { + /* + * If it *does* fail it should fail with a FORBIDDEN error because + * it attempts to take an action on a closed index. Again, it'd be + * nice if all requests waiting for refresh came back even though + * the index is closed and most do, but sometimes they bump into + * the index being closed. At least they don't hang forever. That'd + * be a nightmare. + */ + assertThat(ee.getCause(), instanceOf(ResponseException.class)); + ResponseException re = (ResponseException) ee.getCause(); + assertEquals(403, re.getResponse().getStatusLine().getStatusCode()); + assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("FORBIDDEN/4/index closed")); + } } - private ActionFuture start(String method, String path, String body) { + private ActionFuture start(Request request) { PlainActionFuture future = new PlainActionFuture<>(); - Request request = new Request(method, docPath() + path); request.addParameter("refresh", "wait_for"); request.addParameter("error_trace", ""); - request.setJsonEntity(body); client().performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { try { - future.onResponse(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8)); + future.onResponse(EntityUtils.toString(response.getEntity())); } catch (IOException e) { future.onFailure(e); } diff --git a/docs/build.gradle b/docs/build.gradle index 864567ba835..935149bdc84 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -100,6 +100,7 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'reference/rollup/apis/delete-job.asciidoc' exclude 'reference/rollup/apis/get-job.asciidoc' exclude 'reference/rollup/apis/rollup-caps.asciidoc' + exclude 'reference/graph/explore.asciidoc' } listSnippets.docs = buildRestTests.docs diff --git a/docs/java-api/query-dsl/type-query.asciidoc b/docs/java-api/query-dsl/type-query.asciidoc index cbbc6666077..93c7bd76dfe 100644 --- a/docs/java-api/query-dsl/type-query.asciidoc +++ b/docs/java-api/query-dsl/type-query.asciidoc @@ -1,6 +1,8 @@ [[java-query-dsl-type-query]] ==== Type Query +deprecated[7.0.0, Types are being removed, prefer filtering on a field instead. For more information, please see {ref}/removal-of-types.html[Removal of mapping types].] + See {ref}/query-dsl-type-query.html[Type Query] ["source","java",subs="attributes,callouts,macros"] diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index 34572acaa96..92133822fa5 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -59,7 +59,7 @@ PUT my_index "analysis": { "analyzer": { "my_custom_analyzer": { - "type": "custom", + "type": "custom", <1> "tokenizer": "standard", "char_filter": [ "html_strip" @@ -82,6 +82,11 @@ POST my_index/_analyze -------------------------------- // CONSOLE +<1> Setting `type` to `custom` tells Elasticsearch that we are defining a custom analyzer. + Compare this to how <>: + `type` will be set to the name of the built-in analyzer, like + <> or <>. + ///////////////////// [source,js] diff --git a/docs/reference/api-conventions.asciidoc b/docs/reference/api-conventions.asciidoc index 42216a9a0fc..be41c0fdc77 100644 --- a/docs/reference/api-conventions.asciidoc +++ b/docs/reference/api-conventions.asciidoc @@ -1,5 +1,5 @@ [[api-conventions]] -= API Conventions += API conventions [partintro] -- diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index 27d5cc316a6..33246b57aa4 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -108,9 +108,11 @@ The order of precedence for cluster settings is: 2. persistent cluster settings 3. settings in the `elasticsearch.yml` configuration file. -It's best to use the `elasticsearch.yml` file only -for local configurations, and set all cluster-wide settings with the -`settings` API. +It's best to set all cluster-wide settings with the `settings` API and use the +`elasticsearch.yml` file only for local configurations. This way you can be sure that +the setting is the same on all nodes. If, on the other hand, you define different +settings on different nodes by accident using the configuration file, it is very +difficult to notice these discrepancies. You can find the list of settings that you can dynamically update in <>. diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 6e536f382dd..2763a31fb4e 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -1,5 +1,5 @@ [[getting-started]] -= Getting Started += Getting started [partintro] -- @@ -158,19 +158,6 @@ And now we are ready to start our node and single cluster: ./elasticsearch -------------------------------------------------- -[float] -=== Installation with Homebrew - -On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]: - -["source","sh"] --------------------------------------------------- -brew install elasticsearch --------------------------------------------------- - -If installation succeeds, Homebrew will finish by saying that you can start Elasticsearch by entering -`elasticsearch`. Do that now. The expected response is described below, under <>. - [float] === Installation example with MSI Windows Installer diff --git a/x-pack/docs/en/rest-api/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc similarity index 99% rename from x-pack/docs/en/rest-api/graph/explore.asciidoc rename to docs/reference/graph/explore.asciidoc index f9902fcbe48..91b07a63129 100644 --- a/x-pack/docs/en/rest-api/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="platinum"] [[graph-explore-api]] == Explore API diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 70c3d09dc93..81bc96bb8f9 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -1,6 +1,6 @@ [[index-modules]] -= Index Modules += Index modules [partintro] -- diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index 0aabeadf227..cfaf9d7e09c 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -29,8 +29,6 @@ include::setup/bootstrap-checks-xes.asciidoc[] :edit_url: include::upgrade.asciidoc[] -include::migration/index.asciidoc[] - include::api-conventions.asciidoc[] include::docs.asciidoc[] @@ -78,6 +76,8 @@ include::glossary.asciidoc[] include::release-notes/highlights.asciidoc[] +include::migration/index.asciidoc[] + include::release-notes.asciidoc[] include::redirects.asciidoc[] diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index 18349beab6a..772013534b6 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -1,5 +1,5 @@ [[ingest]] -= Ingest Node += Ingest node [partintro] -- diff --git a/docs/reference/mapping/params/format.asciidoc b/docs/reference/mapping/params/format.asciidoc index 7d621f87522..120233a8f0f 100644 --- a/docs/reference/mapping/params/format.asciidoc +++ b/docs/reference/mapping/params/format.asciidoc @@ -31,10 +31,6 @@ Many APIs which support date values also support <> expressions, such as `now-1m/d` -- the current time, minus one month, rounded down to the nearest day. -TIP: The `format` setting can be updated on existing fields using the -<>. - - [[custom-date-formats]] ==== Custom date formats diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc index 924a6984dc0..45f383435e4 100644 --- a/docs/reference/migration/migrate_7_0.asciidoc +++ b/docs/reference/migration/migrate_7_0.asciidoc @@ -1,30 +1,14 @@ [[breaking-changes-7.0]] == Breaking changes in 7.0 +++++ +7.0 +++++ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 7.0. See also <> and <>. -[float] -=== Indices created before 7.0 - -Elasticsearch 7.0 can read indices created in version 6.0 or above. An -Elasticsearch 7.0 node will not start in the presence of indices created in a -version of Elasticsearch before 6.0. - -[IMPORTANT] -.Reindex indices from Elasticsearch 5.x or before -========================================= - -Indices created in Elasticsearch 5.x or before will need to be reindexed with -Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. - -========================================= - -[float] -=== Also see: - * <> * <> * <> @@ -41,6 +25,22 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. * <> * <> +[float] +=== Indices created before 7.0 + +Elasticsearch 7.0 can read indices created in version 6.0 or above. An +Elasticsearch 7.0 node will not start in the presence of indices created in a +version of Elasticsearch before 6.0. + +[IMPORTANT] +.Reindex indices from Elasticsearch 5.x or before +========================================= + +Indices created in Elasticsearch 5.x or before will need to be reindexed with +Elasticsearch 6.x in order to be readable by Elasticsearch 7.x. + +========================================= + include::migrate_7_0/aggregations.asciidoc[] include::migrate_7_0/analysis.asciidoc[] include::migrate_7_0/cluster.asciidoc[] diff --git a/docs/reference/migration/migrate_7_0/aggregations.asciidoc b/docs/reference/migration/migrate_7_0/aggregations.asciidoc index 08f181b2919..b29f741dd85 100644 --- a/docs/reference/migration/migrate_7_0/aggregations.asciidoc +++ b/docs/reference/migration/migrate_7_0/aggregations.asciidoc @@ -1,21 +1,26 @@ +[float] [[breaking_70_aggregations_changes]] === Aggregations changes +[float] ==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed These `execution_hint` are removed and should be replaced by `global_ordinals`. +[float] ==== `search.max_buckets` in the cluster setting The dynamic cluster setting named `search.max_buckets` now defaults to 10,000 (instead of unlimited in the previous version). Requests that try to return more than the limit will fail with an exception. +[float] ==== `missing` option of the `composite` aggregation has been removed The `missing` option of the `composite` aggregation, deprecated in 6.x, has been removed. `missing_bucket` should be used instead. +[float] ==== Replaced `params._agg` with `state` context variable in scripted metric aggregations The object used to share aggregation state between the scripts in a Scripted Metric diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc index 6e6cc5b078d..e4b27def9f8 100644 --- a/docs/reference/migration/migrate_7_0/analysis.asciidoc +++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc @@ -1,12 +1,15 @@ +[float] [[breaking_70_analysis_changes]] === Analysis changes +[float] ==== Limiting the number of tokens produced by _analyze To safeguard against out of memory errors, the number of tokens that can be produced using the `_analyze` endpoint has been limited to 10000. This default limit can be changed for a particular index with the index setting `index.analyze.max_token_count`. +[float] ==== Limiting the length of an analyzed text during highlighting Highlighting a text that was indexed without offsets or term vectors, @@ -16,6 +19,7 @@ To protect against this, the maximum number of characters that will be analyzed limited to 1000000. This default limit can be changed for a particular index with the index setting `index.highlight.max_analyzed_offset`. +[float] ==== `delimited_payload_filter` renaming The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2. @@ -23,6 +27,7 @@ Using it in indices created before 7.0 will issue deprecation warnings. Using th name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload` instead. +[float] ==== `standard` filter has been removed The `standard` token filter has been removed because it doesn't change anything in the stream. diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index a58223023bd..71a8e1aa015 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_api_changes]] === API changes +[float] ==== Camel case and underscore parameters deprecated in 6.x have been removed A number of duplicate parameters deprecated in 6.x have been removed from Bulk request, Multi Get request, Term Vectors request, and More Like This Query @@ -22,6 +24,7 @@ The following parameters starting with underscore have been removed: Instead of these removed parameters, use their non camel case equivalents without starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`. +[float] ==== Thread pool info In previous versions of Elasticsearch, the thread pool info returned in the @@ -48,10 +51,12 @@ aligns the output of the API with the configuration values for thread pools. Note that `core` and `max` will be populated for scaling thread pools, and `size` will be populated for fixed thread pools. +[float] ==== The parameter `fields` deprecated in 6.x has been removed from Bulk request and Update request. The Update API returns `400 - Bad request` if request contains unknown parameters (instead of ignored in the previous version). +[float] [[remove-suggest-metric]] ==== Remove support for `suggest` metric/index metric in indices stats and nodes stats APIs @@ -66,6 +71,7 @@ In the past, `fields` could be provided either as a parameter, or as part of the body. Specifying `fields` in the request body as opposed to a parameter was deprecated in 6.4.0, and is now unsupported in 7.0.0. +[float] ==== `copy_settings` is deprecated on shrink and split APIs Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink @@ -76,10 +82,12 @@ will be for such settings to be copied on such operations. To enable users in the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in 8.0.0. +[float] ==== The deprecated stored script contexts have now been removed When putting stored scripts, support for storing them with the deprecated `template` context or without a context is now removed. Scripts must be stored using the `script` context as mentioned in the documentation. +[float] ==== Get Aliases API limitations when {security} is enabled removed The behavior and response codes of the get aliases API no longer vary @@ -88,6 +96,7 @@ depending on whether {security} is enabled. Previously a current user was not authorized for any alias. An empty response with status 200 - OK is now returned instead at all times. +[float] ==== Put User API response no longer has `user` object The Put User API response was changed in 6.5.0 to add the `created` field diff --git a/docs/reference/migration/migrate_7_0/cluster.asciidoc b/docs/reference/migration/migrate_7_0/cluster.asciidoc index e9584074d73..d518d29987d 100644 --- a/docs/reference/migration/migrate_7_0/cluster.asciidoc +++ b/docs/reference/migration/migrate_7_0/cluster.asciidoc @@ -1,16 +1,20 @@ +[float] [[breaking_70_cluster_changes]] === Cluster changes +[float] ==== `:` is no longer allowed in cluster name Due to cross-cluster search using `:` to separate a cluster and index name, cluster names may no longer contain `:`. +[float] ==== New default for `wait_for_active_shards` parameter of the open index command The default value for the `wait_for_active_shards` parameter of the open index API is changed from 0 to 1, which means that the command will now by default wait for all primary shards of the opened index to be allocated. +[float] ==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences. diff --git a/docs/reference/migration/migrate_7_0/indices.asciidoc b/docs/reference/migration/migrate_7_0/indices.asciidoc index a47cc6f4324..d040343d32e 100644 --- a/docs/reference/migration/migrate_7_0/indices.asciidoc +++ b/docs/reference/migration/migrate_7_0/indices.asciidoc @@ -1,17 +1,20 @@ +[float] [[breaking_70_indices_changes]] === Indices changes +[float] ==== `:` is no longer allowed in index name Due to cross-cluster search using `:` to separate a cluster and index name, index names may no longer contain `:`. +[float] ==== `index.unassigned.node_left.delayed_timeout` may no longer be negative Negative values were interpreted as zero in earlier versions but are no longer accepted. - +[float] ==== `_flush` and `_force_merge` will no longer refresh In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`) @@ -20,7 +23,7 @@ visible to searches and non-realtime GET operations. From now on these operation don't have this side-effect anymore. To make documents visible an explicit `_refresh` call is needed unless the index is refreshed by the internal scheduler. - +[float] ==== Limit to the difference between max_size and min_size in NGramTokenFilter and NGramTokenizer To safeguard against creating too many index terms, the difference between `max_ngram` and @@ -29,7 +32,7 @@ limit can be changed with the index setting `index.max_ngram_diff`. Note that if exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation warning is logged. - +[float] ==== Limit to the difference between max_size and min_size in ShingleTokenFilter To safeguard against creating too many tokens, the difference between `max_shingle_size` and @@ -38,6 +41,7 @@ limit can be changed with the index setting `index.max_shingle_diff`. Note that exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation warning is logged. +[float] ==== Document distribution changes Indices created with version `7.0.0` onwards will have an automatic `index.number_of_routing_shards` @@ -46,6 +50,7 @@ shards the index has. In order to maintain the exact same distribution as a pre `index.number_of_routing_shards` must be set to the `index.number_of_shards` at index creation time. Note: if the number of routing shards equals the number of shards `_split` operations are not supported. +[float] ==== Skipped background refresh on search idle shards Shards belonging to an index that does not have an explicit @@ -56,6 +61,7 @@ that access a search idle shard will be "parked" until the next refresh happens. Indexing requests with `wait_for_refresh` will also trigger a background refresh. +[float] ==== Remove deprecated url parameters for Clear Indices Cache API The following previously deprecated url parameter have been removed: @@ -65,12 +71,14 @@ The following previously deprecated url parameter have been removed: * `request_cache` - use `request` instead * `field_data` - use `fielddata` instead +[float] ==== `network.breaker.inflight_requests.overhead` increased to 2 Previously the in flight requests circuit breaker considered only the raw byte representation. By bumping the value of `network.breaker.inflight_requests.overhead` from 1 to 2, this circuit breaker considers now also the memory overhead of representing the request as a structured object. +[float] ==== Parent circuit breaker changes The parent circuit breaker defines a new setting `indices.breaker.total.use_real_memory` which is @@ -79,6 +87,7 @@ heap memory instead of only considering the reserved memory by child circuit bre setting is `true`, the default parent breaker limit also changes from 70% to 95% of the JVM heap size. The previous behavior can be restored by setting `indices.breaker.total.use_real_memory` to `false`. +[float] ==== `fix` value for `index.shard.check_on_startup` is removed Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported. \ No newline at end of file diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index dde61259a21..7d68ff2fb57 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -1,23 +1,28 @@ +[float] [[breaking_70_java_changes]] === Java API changes +[float] ==== `isShardsAcked` deprecated in `6.2` has been removed `isShardsAcked` has been replaced by `isShardsAcknowledged` in `CreateIndexResponse`, `RolloverResponse` and `CreateIndexClusterStateUpdateResponse`. +[float] ==== `prepareExecute` removed from the client api The `prepareExecute` method which created a request builder has been removed from the client api. Instead, construct a builder for the appropriate request directly. +[float] ==== Some Aggregation classes have moved packages * All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages were moved to a single `org.elasticsearch.search.aggregations.metrics` package. +[float] ==== `Retry.withBackoff` methods with `Settings` removed The variants of `Retry.withBackoff` that included `Settings` have been removed diff --git a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc index 77f5266763f..0820c7f01cc 100644 --- a/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc +++ b/docs/reference/migration/migrate_7_0/low_level_restclient.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_low_level_restclient_changes]] === Low-level REST client changes +[float] ==== Deprecated flavors of performRequest have been removed We deprecated the flavors of `performRequest` and `performRequestAsync` that @@ -8,6 +10,7 @@ do not take `Request` objects in 6.4.0 in favor of the flavors that take `Request` objects because those methods can be extended without breaking backwards compatibility. +[float] ==== Removed setHosts We deprecated `setHosts` in 6.4.0 in favor of `setNodes` because it supports diff --git a/docs/reference/migration/migrate_7_0/mappings.asciidoc b/docs/reference/migration/migrate_7_0/mappings.asciidoc index 4983cb2da57..5ee1615796c 100644 --- a/docs/reference/migration/migrate_7_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_7_0/mappings.asciidoc @@ -1,36 +1,44 @@ +[float] [[breaking_70_mappings_changes]] === Mapping changes +[float] ==== The `_all` meta field is removed The `_all` field deprecated in 6 have now been removed. +[float] ==== The `_uid` meta field is removed This field used to index a composite key formed of the `_type` and the `_id`. Now that indices cannot have multiple types, this has been removed in favour of `_id`. +[float] ==== The `_default_` mapping is no longer allowed The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed in 7.0. Trying to configure a `_default_` mapping on 7.x indices will result in an error. +[float] ==== `index_options` for numeric fields has been removed The `index_options` field for numeric fields has been deprecated in 6 and has now been removed. +[float] ==== Limiting the number of `nested` json objects To safeguard against out of memory errors, the number of nested json objects within a single document across all fields has been limited to 10000. This default limit can be changed with the index setting `index.mapping.nested_objects.limit`. +[float] ==== The `update_all_types` option has been removed This option is useless now that all indices have at most one type. +[float] ==== The `classic` similarity has been removed The `classic` similarity relied on coordination factors for scoring to be good @@ -39,6 +47,7 @@ Lucene, which means that the `classic` similarity now produces scores of lower quality. It is advised to switch to `BM25` instead, which is widely accepted as a better alternative. +[float] ==== Similarities fail when unsupported options are provided An error will now be thrown when unknown configuration options are provided diff --git a/docs/reference/migration/migrate_7_0/packaging.asciidoc b/docs/reference/migration/migrate_7_0/packaging.asciidoc index 934522db716..e2380613d8f 100644 --- a/docs/reference/migration/migrate_7_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_7_0/packaging.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_packaging_changes]] === Packaging changes +[float] [[systemd-service-file-config]] ==== systemd service file is no longer configuration @@ -9,6 +11,7 @@ was previously marked as a configuration file in rpm and deb packages. Overrides to the systemd elasticsearch service should be made in `/etc/systemd/system/elasticsearch.service.d/override.conf`. +[float] ==== tar package no longer includes windows specific files The tar package previously included files in the `bin` directory meant only diff --git a/docs/reference/migration/migrate_7_0/plugins.asciidoc b/docs/reference/migration/migrate_7_0/plugins.asciidoc index 462823a61fd..5fcd2bb9526 100644 --- a/docs/reference/migration/migrate_7_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_7_0/plugins.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_plugins_changes]] === Plugins changes +[float] ==== Azure Repository plugin * The legacy azure settings which where starting with `cloud.azure.storage.` prefix have been removed. @@ -12,6 +14,7 @@ You must set it per azure client instead. Like `azure.client.default.timeout: 10 See {plugins}/repository-azure-repository-settings.html#repository-azure-repository-settings[Azure Repository settings]. +[float] ==== Google Cloud Storage Repository plugin * The repository settings `application_name`, `connect_timeout` and `read_timeout` have been removed and @@ -19,11 +22,13 @@ must now be specified in the client settings instead. See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings]. +[float] ==== Analysis Plugin changes * The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider provider)` has been renamed to `requiresAnalysisSettings` +[float] ==== File-based discovery plugin * This plugin has been removed since its functionality is now part of diff --git a/docs/reference/migration/migrate_7_0/restclient.asciidoc b/docs/reference/migration/migrate_7_0/restclient.asciidoc index 470996cfeff..3c0237db6e7 100644 --- a/docs/reference/migration/migrate_7_0/restclient.asciidoc +++ b/docs/reference/migration/migrate_7_0/restclient.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_restclient_changes]] === High-level REST client changes +[float] ==== API methods accepting `Header` argument have been removed All API methods accepting headers as a `Header` varargs argument, deprecated @@ -12,6 +14,7 @@ In case you are specifying headers e.g. `client.index(indexRequest, new Header("name" "value"))` becomes `client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());` +[float] ==== Cluster Health API default to `cluster` level The Cluster Health API used to default to `shards` level to ease migration diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc index 79380f84204..de312c1c723 100644 --- a/docs/reference/migration/migrate_7_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_scripting_changes]] === Scripting changes +[float] ==== getDate() and getDates() removed Fields of type `long` and `date` had `getDate()` and `getDates()` methods @@ -12,6 +14,7 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly parse `long` fields into a date object using `Instance.ofEpochMillis(doc["myfield"].value)`. +[float] ==== Script errors will return as `400` error codes Malformed scripts, either in search templates, ingest pipelines or search diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index a7d32896e97..b7aa15861af 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -1,6 +1,8 @@ +[float] [[breaking_70_search_changes]] === Search and Query DSL changes +[float] ==== Changes to queries * The default value for `transpositions` parameter of `fuzzy` query has been changed to `true`. @@ -18,6 +20,7 @@ * Attempts to generate multi-term phrase queries against non-text fields with a custom analyzer will now throw an exception +[float] ==== Adaptive replica selection enabled by default Adaptive replica selection has been enabled by default. If you wish to return to @@ -35,6 +38,7 @@ PUT /_cluster/settings -------------------------------------------------- // CONSOLE +[float] ==== Search API returns `400` for invalid requests The Search API returns `400 - Bad request` while it would previously return @@ -48,12 +52,14 @@ The Search API returns `400 - Bad request` while it would previously return * number of filters in the adjacency matrix aggregation is too large * script compilation errors +[float] ==== Scroll queries cannot use the `request_cache` anymore Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`) has been deprecated in 6 and will now return a `400 - Bad request`. Scroll queries are not meant to be cached. +[float] ==== Scroll queries cannot use `rescore` anymore Including a rescore clause on a query that creates a scroll (`scroll=1m`) has been deprecated in 6.5 and will now return a `400 - Bad request`. Allowing @@ -61,6 +67,7 @@ rescore on scroll queries would break the scroll sort. In the 6.x line, the rescore clause was silently ignored (for scroll queries), and it was allowed in the 5.x line. +[float] ==== Term Suggesters supported distance algorithms The following string distance algorithms were given additional names in 6.2 and @@ -70,7 +77,7 @@ removed. * `levenstein` - replaced by `levenshtein` * `jarowinkler` - replaced by `jaro_winkler` - +[float] ==== Limiting the number of terms that can be used in a Terms Query request Executing a Terms Query with a lot of terms may degrade the cluster performance, @@ -79,7 +86,7 @@ To safeguard against this, the maximum number of terms that can be used in a Terms Query request has been limited to 65536. This default maximum can be changed for a particular index with the index setting `index.max_terms_count`. - +[float] ==== Limiting the length of regex that can be used in a Regexp Query request Executing a Regexp Query with a long regex string may degrade search performance. @@ -87,11 +94,13 @@ To safeguard against this, the maximum length of regex that can be used in a Regexp Query request has been limited to 1000. This default maximum can be changed for a particular index with the index setting `index.max_regex_length`. +[float] ==== Invalid `_search` request body Search requests with extra content after the main object will no longer be accepted by the `_search` endpoint. A parsing exception will be thrown instead. +[float] ==== Context Completion Suggester The ability to query and index context enabled suggestions without context, @@ -102,12 +111,14 @@ considerably. For geo context the value of the `path` parameter is now validated against the mapping, and the context is only accepted if `path` points to a field with `geo_point` type. +[float] ==== Semantics changed for `max_concurrent_shard_requests` `max_concurrent_shard_requests` used to limit the total number of concurrent shard requests a single high level search request can execute. In 7.0 this changed to be the max number of concurrent shard requests per node. The default is now `5`. +[float] ==== `max_score` set to `null` when scores are not tracked `max_score` used to be set to `0` whenever scores are not tracked. `null` is now used diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index e4b132b38d6..85648da4f0d 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -1,18 +1,21 @@ +[float] [[breaking_70_settings_changes]] - === Settings changes +[float] ==== The default for `node.name` is now the hostname `node.name` now defaults to the hostname at the time when Elasticsearch is started. Previously the default node name was the first eight characters of the node id. It can still be configured explicitly in `elasticsearch.yml`. +[float] ==== Percolator * The deprecated `index.percolator.map_unmapped_fields_as_string` setting has been removed in favour of the `index.percolator.map_unmapped_fields_as_text` setting. +[float] ==== Index thread pool * Internally, single-document index/delete/update requests are executed as bulk @@ -21,6 +24,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. longer needed and has been removed. As such, the settings `thread_pool.index.size` and `thread_pool.index.queue_size` have been removed. +[float] [[write-thread-pool-fallback]] ==== Write thread pool fallback @@ -32,6 +36,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. available to keep the display output in APIs as `bulk` instead of `write`. These fallback settings and this system property have been removed. +[float] [[remove-http-enabled]] ==== Http enabled setting removed @@ -39,6 +44,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`. use of the transport client. This setting has been removed, as the transport client will be removed in the future, thus requiring HTTP to always be enabled. +[float] [[remove-http-pipelining-setting]] ==== Http pipelining setting removed @@ -47,6 +53,7 @@ This setting has been removed, as disabling http pipelining support on the serve provided little value. The setting `http.pipelining.max_events` can still be used to limit the number of pipelined requests in-flight. +[float] ==== Cross-cluster search settings renamed The cross-cluster search remote cluster connection infrastructure is also used diff --git a/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc b/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc index 6dbd24b13a1..2098eb3574c 100644 --- a/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc +++ b/docs/reference/migration/migrate_7_0/snapshotstats.asciidoc @@ -1,3 +1,4 @@ +[float] [[breaking_70_snapshotstats_changes]] === Snapshot stats changes @@ -7,6 +8,7 @@ Snapshot stats details are provided in a new structured way: * `incremental` section for those files that actually needed to be copied over as part of the incremental snapshotting. * In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied. +[float] ==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed * Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`. diff --git a/docs/reference/ml/apis/find-file-structure.asciidoc b/docs/reference/ml/apis/find-file-structure.asciidoc index 61987221a76..0da485a3f0a 100644 --- a/docs/reference/ml/apis/find-file-structure.asciidoc +++ b/docs/reference/ml/apis/find-file-structure.asciidoc @@ -159,7 +159,7 @@ format corresponds to the primary timestamp, but you do not want to specify the full `grok_pattern`. If this parameter is not specified, the structure finder chooses the best format from -the formats it knows, which are: +the formats it knows, which are these Joda formats and their Java time equivalents: * `dd/MMM/YYYY:HH:mm:ss Z` * `EEE MMM dd HH:mm zzz YYYY` @@ -178,7 +178,7 @@ the formats it knows, which are: * `MMM dd HH:mm:ss` * `MMM dd HH:mm:ss,SSS` * `MMM dd YYYY HH:mm:ss` -* `MMM dd, YYYY K:mm:ss a` +* `MMM dd, YYYY h:mm:ss a` * `TAI64N` * `UNIX` * `UNIX_MS` @@ -487,3 +487,791 @@ If the request does not encounter errors, you receive the following result: may provide clues that the data needs to be cleaned or transformed prior to use by other {ml} functionality. +The next example shows how it's possible to find the structure of some New York +City yellow cab trip data. The first `curl` command downloads the data, the +first 20000 lines of which are then piped into the `find_file_structure` +endpoint. The `lines_to_sample` query parameter of the endpoint is set to 20000 +to match what is specified in the `head` command. + +[source,js] +---- +curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -20000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=20000" -T - +---- +// NOTCONSOLE +// Not converting to console because this shows how curl can be used + +-- +NOTE: The `Content-Type: application/json` header must be set even though in +this case the data is not JSON. (Alternatively the `Content-Type` can be set +to any other supported by Elasticsearch, but it must be set.) +-- + +If the request does not encounter errors, you receive the following result: +[source,js] +---- +{ + "num_lines_analyzed" : 20000, + "num_messages_analyzed" : 19998, <1> + "sample_start" : "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID,store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount,improvement_surcharge,total_amount\n\n1,2018-06-01 00:15:40,2018-06-01 00:16:46,1,.00,1,N,145,145,2,3,0.5,0.5,0,0,0.3,4.3\n", + "charset" : "UTF-8", + "has_byte_order_marker" : false, + "format" : "delimited", <2> + "multiline_start_pattern" : "^.*?,\"?\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + "exclude_lines_pattern" : "^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?,\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?,\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?", + "column_names" : [ <3> + "VendorID", + "tpep_pickup_datetime", + "tpep_dropoff_datetime", + "passenger_count", + "trip_distance", + "RatecodeID", + "store_and_fwd_flag", + "PULocationID", + "DOLocationID", + "payment_type", + "fare_amount", + "extra", + "mta_tax", + "tip_amount", + "tolls_amount", + "improvement_surcharge", + "total_amount" + ], + "has_header_row" : true, <4> + "delimiter" : ",", <5> + "quote" : "\"", <6> + "timestamp_field" : "tpep_pickup_datetime", <7> + "joda_timestamp_formats" : [ <8> + "YYYY-MM-dd HH:mm:ss" + ], + "java_timestamp_formats" : [ <9> + "yyyy-MM-dd HH:mm:ss" + ], + "need_client_timezone" : true, <10> + "mappings" : { + "@timestamp" : { + "type" : "date" + }, + "DOLocationID" : { + "type" : "long" + }, + "PULocationID" : { + "type" : "long" + }, + "RatecodeID" : { + "type" : "long" + }, + "VendorID" : { + "type" : "long" + }, + "extra" : { + "type" : "double" + }, + "fare_amount" : { + "type" : "double" + }, + "improvement_surcharge" : { + "type" : "double" + }, + "mta_tax" : { + "type" : "double" + }, + "passenger_count" : { + "type" : "long" + }, + "payment_type" : { + "type" : "long" + }, + "store_and_fwd_flag" : { + "type" : "keyword" + }, + "tip_amount" : { + "type" : "double" + }, + "tolls_amount" : { + "type" : "double" + }, + "total_amount" : { + "type" : "double" + }, + "tpep_dropoff_datetime" : { + "type" : "date", + "format" : "YYYY-MM-dd HH:mm:ss" + }, + "tpep_pickup_datetime" : { + "type" : "date", + "format" : "YYYY-MM-dd HH:mm:ss" + }, + "trip_distance" : { + "type" : "double" + } + }, + "field_stats" : { + "DOLocationID" : { + "count" : 19998, + "cardinality" : 240, + "min_value" : 1, + "max_value" : 265, + "mean_value" : 150.26532653265312, + "median_value" : 148, + "top_hits" : [ + { + "value" : 79, + "count" : 760 + }, + { + "value" : 48, + "count" : 683 + }, + { + "value" : 68, + "count" : 529 + }, + { + "value" : 170, + "count" : 506 + }, + { + "value" : 107, + "count" : 468 + }, + { + "value" : 249, + "count" : 457 + }, + { + "value" : 230, + "count" : 441 + }, + { + "value" : 186, + "count" : 432 + }, + { + "value" : 141, + "count" : 409 + }, + { + "value" : 263, + "count" : 386 + } + ] + }, + "PULocationID" : { + "count" : 19998, + "cardinality" : 154, + "min_value" : 1, + "max_value" : 265, + "mean_value" : 153.4042404240424, + "median_value" : 148, + "top_hits" : [ + { + "value" : 79, + "count" : 1067 + }, + { + "value" : 230, + "count" : 949 + }, + { + "value" : 148, + "count" : 940 + }, + { + "value" : 132, + "count" : 897 + }, + { + "value" : 48, + "count" : 853 + }, + { + "value" : 161, + "count" : 820 + }, + { + "value" : 234, + "count" : 750 + }, + { + "value" : 249, + "count" : 722 + }, + { + "value" : 164, + "count" : 663 + }, + { + "value" : 114, + "count" : 646 + } + ] + }, + "RatecodeID" : { + "count" : 19998, + "cardinality" : 5, + "min_value" : 1, + "max_value" : 5, + "mean_value" : 1.0656565656565653, + "median_value" : 1, + "top_hits" : [ + { + "value" : 1, + "count" : 19311 + }, + { + "value" : 2, + "count" : 468 + }, + { + "value" : 5, + "count" : 195 + }, + { + "value" : 4, + "count" : 17 + }, + { + "value" : 3, + "count" : 7 + } + ] + }, + "VendorID" : { + "count" : 19998, + "cardinality" : 2, + "min_value" : 1, + "max_value" : 2, + "mean_value" : 1.59005900590059, + "median_value" : 2, + "top_hits" : [ + { + "value" : 2, + "count" : 11800 + }, + { + "value" : 1, + "count" : 8198 + } + ] + }, + "extra" : { + "count" : 19998, + "cardinality" : 3, + "min_value" : -0.5, + "max_value" : 0.5, + "mean_value" : 0.4815981598159816, + "median_value" : 0.5, + "top_hits" : [ + { + "value" : 0.5, + "count" : 19281 + }, + { + "value" : 0, + "count" : 698 + }, + { + "value" : -0.5, + "count" : 19 + } + ] + }, + "fare_amount" : { + "count" : 19998, + "cardinality" : 208, + "min_value" : -100, + "max_value" : 300, + "mean_value" : 13.937719771977209, + "median_value" : 9.5, + "top_hits" : [ + { + "value" : 6, + "count" : 1004 + }, + { + "value" : 6.5, + "count" : 935 + }, + { + "value" : 5.5, + "count" : 909 + }, + { + "value" : 7, + "count" : 903 + }, + { + "value" : 5, + "count" : 889 + }, + { + "value" : 7.5, + "count" : 854 + }, + { + "value" : 4.5, + "count" : 802 + }, + { + "value" : 8.5, + "count" : 790 + }, + { + "value" : 8, + "count" : 789 + }, + { + "value" : 9, + "count" : 711 + } + ] + }, + "improvement_surcharge" : { + "count" : 19998, + "cardinality" : 3, + "min_value" : -0.3, + "max_value" : 0.3, + "mean_value" : 0.29915991599159913, + "median_value" : 0.3, + "top_hits" : [ + { + "value" : 0.3, + "count" : 19964 + }, + { + "value" : -0.3, + "count" : 22 + }, + { + "value" : 0, + "count" : 12 + } + ] + }, + "mta_tax" : { + "count" : 19998, + "cardinality" : 3, + "min_value" : -0.5, + "max_value" : 0.5, + "mean_value" : 0.4962246224622462, + "median_value" : 0.5, + "top_hits" : [ + { + "value" : 0.5, + "count" : 19868 + }, + { + "value" : 0, + "count" : 109 + }, + { + "value" : -0.5, + "count" : 21 + } + ] + }, + "passenger_count" : { + "count" : 19998, + "cardinality" : 7, + "min_value" : 0, + "max_value" : 6, + "mean_value" : 1.6201620162016201, + "median_value" : 1, + "top_hits" : [ + { + "value" : 1, + "count" : 14219 + }, + { + "value" : 2, + "count" : 2886 + }, + { + "value" : 5, + "count" : 1047 + }, + { + "value" : 3, + "count" : 804 + }, + { + "value" : 6, + "count" : 523 + }, + { + "value" : 4, + "count" : 406 + }, + { + "value" : 0, + "count" : 113 + } + ] + }, + "payment_type" : { + "count" : 19998, + "cardinality" : 4, + "min_value" : 1, + "max_value" : 4, + "mean_value" : 1.315631563156316, + "median_value" : 1, + "top_hits" : [ + { + "value" : 1, + "count" : 13936 + }, + { + "value" : 2, + "count" : 5857 + }, + { + "value" : 3, + "count" : 160 + }, + { + "value" : 4, + "count" : 45 + } + ] + }, + "store_and_fwd_flag" : { + "count" : 19998, + "cardinality" : 2, + "top_hits" : [ + { + "value" : "N", + "count" : 19910 + }, + { + "value" : "Y", + "count" : 88 + } + ] + }, + "tip_amount" : { + "count" : 19998, + "cardinality" : 717, + "min_value" : 0, + "max_value" : 128, + "mean_value" : 2.010959095909593, + "median_value" : 1.45, + "top_hits" : [ + { + "value" : 0, + "count" : 6917 + }, + { + "value" : 1, + "count" : 1178 + }, + { + "value" : 2, + "count" : 624 + }, + { + "value" : 3, + "count" : 248 + }, + { + "value" : 1.56, + "count" : 206 + }, + { + "value" : 1.46, + "count" : 205 + }, + { + "value" : 1.76, + "count" : 196 + }, + { + "value" : 1.45, + "count" : 195 + }, + { + "value" : 1.36, + "count" : 191 + }, + { + "value" : 1.5, + "count" : 187 + } + ] + }, + "tolls_amount" : { + "count" : 19998, + "cardinality" : 26, + "min_value" : 0, + "max_value" : 35, + "mean_value" : 0.2729697969796978, + "median_value" : 0, + "top_hits" : [ + { + "value" : 0, + "count" : 19107 + }, + { + "value" : 5.76, + "count" : 791 + }, + { + "value" : 10.5, + "count" : 36 + }, + { + "value" : 2.64, + "count" : 21 + }, + { + "value" : 11.52, + "count" : 8 + }, + { + "value" : 5.54, + "count" : 4 + }, + { + "value" : 8.5, + "count" : 4 + }, + { + "value" : 17.28, + "count" : 4 + }, + { + "value" : 2, + "count" : 2 + }, + { + "value" : 2.16, + "count" : 2 + } + ] + }, + "total_amount" : { + "count" : 19998, + "cardinality" : 1267, + "min_value" : -100.3, + "max_value" : 389.12, + "mean_value" : 17.499898989898995, + "median_value" : 12.35, + "top_hits" : [ + { + "value" : 7.3, + "count" : 478 + }, + { + "value" : 8.3, + "count" : 443 + }, + { + "value" : 8.8, + "count" : 420 + }, + { + "value" : 6.8, + "count" : 406 + }, + { + "value" : 7.8, + "count" : 405 + }, + { + "value" : 6.3, + "count" : 371 + }, + { + "value" : 9.8, + "count" : 368 + }, + { + "value" : 5.8, + "count" : 362 + }, + { + "value" : 9.3, + "count" : 332 + }, + { + "value" : 10.3, + "count" : 332 + } + ] + }, + "tpep_dropoff_datetime" : { + "count" : 19998, + "cardinality" : 9066, + "top_hits" : [ + { + "value" : "2018-06-01 01:12:12", + "count" : 10 + }, + { + "value" : "2018-06-01 00:32:15", + "count" : 9 + }, + { + "value" : "2018-06-01 00:44:27", + "count" : 9 + }, + { + "value" : "2018-06-01 00:46:42", + "count" : 9 + }, + { + "value" : "2018-06-01 01:03:22", + "count" : 9 + }, + { + "value" : "2018-06-01 01:05:13", + "count" : 9 + }, + { + "value" : "2018-06-01 00:11:20", + "count" : 8 + }, + { + "value" : "2018-06-01 00:16:03", + "count" : 8 + }, + { + "value" : "2018-06-01 00:19:47", + "count" : 8 + }, + { + "value" : "2018-06-01 00:25:17", + "count" : 8 + } + ] + }, + "tpep_pickup_datetime" : { + "count" : 19998, + "cardinality" : 8760, + "top_hits" : [ + { + "value" : "2018-06-01 00:01:23", + "count" : 12 + }, + { + "value" : "2018-06-01 00:04:31", + "count" : 10 + }, + { + "value" : "2018-06-01 00:05:38", + "count" : 10 + }, + { + "value" : "2018-06-01 00:09:50", + "count" : 10 + }, + { + "value" : "2018-06-01 00:12:01", + "count" : 10 + }, + { + "value" : "2018-06-01 00:14:17", + "count" : 10 + }, + { + "value" : "2018-06-01 00:00:34", + "count" : 9 + }, + { + "value" : "2018-06-01 00:00:40", + "count" : 9 + }, + { + "value" : "2018-06-01 00:02:53", + "count" : 9 + }, + { + "value" : "2018-06-01 00:05:40", + "count" : 9 + } + ] + }, + "trip_distance" : { + "count" : 19998, + "cardinality" : 1687, + "min_value" : 0, + "max_value" : 64.63, + "mean_value" : 3.6521062106210715, + "median_value" : 2.16, + "top_hits" : [ + { + "value" : 0.9, + "count" : 335 + }, + { + "value" : 0.8, + "count" : 320 + }, + { + "value" : 1.1, + "count" : 316 + }, + { + "value" : 0.7, + "count" : 304 + }, + { + "value" : 1.2, + "count" : 303 + }, + { + "value" : 1, + "count" : 296 + }, + { + "value" : 1.3, + "count" : 280 + }, + { + "value" : 1.5, + "count" : 268 + }, + { + "value" : 1.6, + "count" : 268 + }, + { + "value" : 0.6, + "count" : 256 + } + ] + } + } +} +---- +// NOTCONSOLE + +<1> `num_messages_analyzed` is 2 lower than `num_lines_analyzed` because only + data records count as messages. The first line contains the column names + and in this sample the second line is blank. +<2> Unlike the first example, in this case the `format` has been identified as + `delimited`. +<3> Because the `format` is `delimited`, the `column_names` field in the output + lists the column names in the order they appear in the sample. +<4> `has_header_row` indicates that for this sample the column names were in + the first row of the sample. (If they hadn't been then it would have been + a good idea to specify them in the `column_names` query parameter.) +<5> The `delimiter` for this sample is a comma, as it's a CSV file. +<6> The `quote` character is the default double quote. (The structure finder + does not attempt to deduce any other quote character, so if you have a + delimited file that's quoted with some other character you must specify it + using the `quote` query parameter.) +<7> The `timestamp_field` has been chosen to be `tpep_pickup_datetime`. + `tpep_dropoff_datetime` would work just as well, but `tpep_pickup_datetime` + was chosen because it comes first in the column order. If you prefer + `tpep_dropoff_datetime` then force it to be chosen using the + `timestamp_field` query parameter. +<8> `joda_timestamp_formats` are used to tell Logstash and Ingest pipeline how + to parse timestamps. +<9> `java_timestamp_formats` are the Java time formats recognized in the time + fields. In future Ingest pipeline will switch to use this format. +<10> The timestamp format in this sample doesn't specify a timezone, so to + accurately convert them to UTC timestamps to store in Elasticsearch it's + necessary to supply the timezone they relate to. `need_client_timezone` + will be `false` for timestamp formats that include the timezone. + diff --git a/docs/reference/query-dsl/type-query.asciidoc b/docs/reference/query-dsl/type-query.asciidoc index 96edda27eff..9825c68c74f 100644 --- a/docs/reference/query-dsl/type-query.asciidoc +++ b/docs/reference/query-dsl/type-query.asciidoc @@ -1,6 +1,8 @@ [[query-dsl-type-query]] === Type Query +deprecated[7.0.0, Types are being removed, prefer filtering on a field instead. For more information, please see <>.] + Filters documents matching the provided document / mapping type. [source,js] diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 490249461e5..ffea569ca21 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -1,5 +1,5 @@ [[es-release-notes]] -= Release Notes += Release notes [partintro] -- diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index c3a03d77f81..eb1924d2452 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -1,5 +1,5 @@ [[release-notes-7.0.0-alpha1]] -== 7.0.0-alpha1 Release Notes +== 7.0.0-alpha1 release notes The changes listed below have been released for the first time in Elasticsearch 7.0.0-alpha1. diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 0ab4106c22c..5b20b67061d 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,7 +1,7 @@ [[release-highlights]] -= {es} Release Highlights += {es} Release highlights ++++ -Release Highlights +Release highlights ++++ [partintro] diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index b80e8badf5b..c6243ab2598 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -19,7 +19,7 @@ directly to configure and access {xpack} features. include::info.asciidoc[] -include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[] +include::{es-repo-dir}/graph/explore.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{es-repo-dir}/migration/migration.asciidoc[] include::{es-repo-dir}/ml/apis/ml-api.asciidoc[] diff --git a/docs/reference/sql/appendix/syntax-reserved.asciidoc b/docs/reference/sql/appendix/syntax-reserved.asciidoc index 7a502d6eea9..9d37b181cdf 100644 --- a/docs/reference/sql/appendix/syntax-reserved.asciidoc +++ b/docs/reference/sql/appendix/syntax-reserved.asciidoc @@ -2,7 +2,7 @@ [testenv="basic"] [appendix] [[sql-syntax-reserved]] -= Reserved Keywords += Reserved keywords Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious. diff --git a/docs/reference/sql/functions/math.asciidoc b/docs/reference/sql/functions/math.asciidoc index 6489f1bec4d..e84a71b1e91 100644 --- a/docs/reference/sql/functions/math.asciidoc +++ b/docs/reference/sql/functions/math.asciidoc @@ -8,68 +8,276 @@ to be numeric. ==== Generic -* `ABS` +[[sql-functions-math-abs]] +===== `ABS` -https://en.wikipedia.org/wiki/Absolute_value[Absolute value], returns \[same type as input] - -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[abs] +ABS(numeric_exp<1>) -------------------------------------------------- -* `CBRT` +*Input*: -https://en.wikipedia.org/wiki/Cube_root[Cube root], returns `double` +<1> numeric expression -// TODO make the example in the tests presentable +*Output*: numeric -* `CEIL` +.Description: -https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Ceiling], returns `double` +Returns the https://en.wikipedia.org/wiki/Absolute_value[absolute value] of `numeric_exp`. The return type is the same as the input type. -* `CEILING` - -Same as `CEIL` - -// TODO make the example in the tests presentable - -* `E` - -https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number], returns `2.7182818284590452354` - -* https://en.wikipedia.org/wiki/Exponential_function[e^x^] (`EXP`) - -["source","sql",subs="attributes,callouts,macros"] +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[exp] +include-tagged::{sql-specs}/docs.csv-spec[abs] -------------------------------------------------- -* https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[e^x^ - 1] (`EXPM1`) +[[sql-functions-math-cbrt]] +===== `CBRT` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[expm1] +CBRT(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Floor] (`FLOOR`) +*Input*: -// TODO make the example in the tests presentable +<1> numeric expression -* https://en.wikipedia.org/wiki/Natural_logarithm[Natural logarithm] (`LOG`) +*Output*: double numeric value -["source","sql",subs="attributes,callouts,macros"] +.Description: + +Returns the https://en.wikipedia.org/wiki/Cube_root[cube root] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[log] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCbrtWithNegativeValue] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Logarithm[Logarithm] base 10 (`LOG10`) +[[sql-functions-math-ceil]] +===== `CEIL/CEILING` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[log10] +CEIL(numeric_exp<1>) +CEILING(numeric_exp<2>) -------------------------------------------------- -* `ROUND` +*Input*: + +<1> numeric expression +<2> numeric expression + +*Output*: integer or long numeric value + +.Description: + +Returns the smallest integer greater than or equal to `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCeiling] +-------------------------------------------------- + +[[sql-functions-math-e]] +===== `E` + +.Synopsis: +[source, sql] +-------------------------------------------------- +E() +-------------------------------------------------- + +*Input*: _none_ + +*Output*: `2.718281828459045` + +.Description: + +Returns https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathEulersNumber] +-------------------------------------------------- + +[[sql-functions-math-exp]] +===== `EXP` + +.Synopsis: +[source, sql] +-------------------------------------------------- +EXP(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> float numeric expression + +*Output*: double numeric value + +.Description: + +Returns https://en.wikipedia.org/wiki/Exponential_function[Euler's number at the power] of `numeric_exp` e^numeric_exp^. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathExpInline] +-------------------------------------------------- + +[[sql-functions-math-expm1]] +===== `EXPM1` + +.Synopsis: +[source, sql] +-------------------------------------------------- +EXPM1(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> float numeric expression + +*Output*: double numeric value + +.Description: + +Returns https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[Euler's number at the power] of `numeric_exp` minus 1 (e^numeric_exp^ - 1). + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathExpm1Inline] +-------------------------------------------------- + +[[sql-functions-math-floor]] +===== `FLOOR` + +.Synopsis: +[source, sql] +-------------------------------------------------- +FLOOR(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: integer or long numeric value + +.Description: + +Returns the largest integer less than or equal to `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineFloor] +-------------------------------------------------- + +[[sql-functions-math-log]] +===== `LOG` + +.Synopsis: +[source, sql] +-------------------------------------------------- +LOG(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Natural_logarithm[natural logarithm] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineLog] +-------------------------------------------------- + +[[sql-functions-math-log10]] +===== `LOG10` + +.Synopsis: +[source, sql] +-------------------------------------------------- +LOG10(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Common_logarithm[base 10 logarithm] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineLog10] +-------------------------------------------------- + +[[sql-functions-math-pi]] +===== `PI` + +.Synopsis: +[source, sql] +-------------------------------------------------- +PI() +-------------------------------------------------- + +*Input*: _none_ + +*Output*: `3.141592653589793` + +.Description: + +Returns https://en.wikipedia.org/wiki/Pi[PI number]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathPINumber] +-------------------------------------------------- + +[[sql-functions-math-power]] +===== `POWER` + +.Synopsis: +[source, sql] +-------------------------------------------------- +POWER(numeric_exp<1>, integer_exp<2>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression +<2> integer expression + +*Output*: double numeric value + +.Description: + +Returns the value of `numeric_exp` to the power of `integer_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerPositive] +-------------------------------------------------- + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerNegative] +-------------------------------------------------- + +[[sql-functions-math-round]] +===== `ROUND` .Synopsis: [source, sql] @@ -78,8 +286,8 @@ ROUND(numeric_exp<1>[, integer_exp<2>]) ---- *Input*: - <1> numeric expression - <2> integer expression; optional +<1> numeric expression +<2> integer expression; optional *Output*: numeric @@ -89,24 +297,67 @@ Returns `numeric_exp` rounded to `integer_exp` places right of the decimal point the function will perform as if `integer_exp` would be 0. The returned numeric data type is the same as the data type of `numeric_exp`. -["source","sql",subs="attributes,callouts,macros"] +["source","sql",subs="attributes,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[mathRoundWithPositiveParameter] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +["source","sql",subs="attributes,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[mathRoundWithNegativeParameter] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Square_root[Square root] (`SQRT`) +[[sql-functions-math-sign]] +===== `SIGN` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sqrt] +SIGN(numeric_exp<1>) -------------------------------------------------- -* `TRUNCATE` +*Input*: + +<1> numeric expression + +*Output*: [-1, 0, 1] + +.Description: + +Returns an indicator of the sign of `numeric_exp`. If `numeric_exp` is less than zero, –1 is returned. If `numeric_exp` equals zero, 0 is returned. If `numeric_exp` is greater than zero, 1 is returned. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSign] +-------------------------------------------------- + + +[[sql-functions-math-sqrt]] +===== `SQRT` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SQRT(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns https://en.wikipedia.org/wiki/Square_root[square root] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSqrt] +-------------------------------------------------- + +[[sql-functions-math-truncate]] +===== `TRUNCATE` .Synopsis: [source, sql] @@ -115,8 +366,8 @@ TRUNCATE(numeric_exp<1>[, integer_exp<2>]) ---- *Input*: - <1> numeric expression - <2> integer expression; optional +<1> numeric expression +<2> integer expression; optional *Output*: numeric @@ -126,86 +377,305 @@ Returns `numeric_exp` truncated to `integer_exp` places right of the decimal poi the function will perform as if `integer_exp` would be 0. The returned numeric data type is the same as the data type of `numeric_exp`. -["source","sql",subs="attributes,callouts,macros"] +["source","sql",subs="attributes,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[mathTruncateWithPositiveParameter] -------------------------------------------------- -["source","sql",subs="attributes,callouts,macros"] +["source","sql",subs="attributes,macros"] -------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[mathTruncateWithNegativeParameter] -------------------------------------------------- ==== Trigonometric -* Convert from https://en.wikipedia.org/wiki/Radian[radians] -to https://en.wikipedia.org/wiki/Degree_(angle)[degrees] (`DEGREES`) +[[sql-functions-math-acos]] +===== `ACOS` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[degrees] +ACOS(numeric_exp<1>) -------------------------------------------------- -* Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees] -to https://en.wikipedia.org/wiki/Radian[radians] (`RADIANS`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arccosine] of `numeric_exp` as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[degrees] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAcos] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Trigonometric_functions#sine[Sine] (`SIN`) +[[sql-functions-math-asin]] +===== `ASIN` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sin] +ASIN(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[Cosine] (`COS`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arcsine] of `numeric_exp` as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[cos] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAsin] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[Tangent] (`TAN`) +[[sql-functions-math-atan]] +===== `ATAN` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[tan] +ATAN(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc sine] (`ASIN`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arctangent] of `numeric_exp` as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[asin] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAtan] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc cosine] (`ACOS`) +[[sql-functions-math-atan2]] +===== `ATAN2` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[acos] +ATAN2(ordinate<1>, abscisa<2>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc tangent] (`ATAN`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression +<2> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Atan2[arctangent of the `ordinate` and `abscisa` coordinates] specified as an angle, expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[atan] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineAtan2] -------------------------------------------------- -* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic sine] (`SINH`) +[[sql-functions-math-cos]] +===== `COS` -["source","sql",subs="attributes,callouts,macros"] +.Synopsis: +[source, sql] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sinh] +COS(numeric_exp<1>) -------------------------------------------------- -* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic cosine] (`COSH`) +*Input*: -["source","sql",subs="attributes,callouts,macros"] +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[cosine] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] -------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[cosh] +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCosine] +-------------------------------------------------- + +[[sql-functions-math-cosh]] +===== `COSH` + +.Synopsis: +[source, sql] +-------------------------------------------------- +COSH(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Hyperbolic_function[hyperbolic cosine] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCosh] +-------------------------------------------------- + +[[sql-functions-math-cot]] +===== `COT` + +.Synopsis: +[source, sql] +-------------------------------------------------- +COT(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#Cosecant,_secant,_and_cotangent[cotangent] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineCotangent] +-------------------------------------------------- + +[[sql-functions-math-degrees]] +===== `DEGREES` + +.Synopsis: +[source, sql] +-------------------------------------------------- +DEGREES(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Convert from https://en.wikipedia.org/wiki/Radian[radians] +to https://en.wikipedia.org/wiki/Degree_(angle)[degrees]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineDegrees] +-------------------------------------------------- + +[[sql-functions-math-radians]] +===== `RADIANS` + +.Synopsis: +[source, sql] +-------------------------------------------------- +RADIANS(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees] +to https://en.wikipedia.org/wiki/Radian[radians]. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineRadians] +-------------------------------------------------- + +[[sql-functions-math-sin]] +===== `SIN` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SIN(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#sine[sine] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSine] +-------------------------------------------------- + +[[sql-functions-math-sinh]] +===== `SINH` + +.Synopsis: +[source, sql] +-------------------------------------------------- +SINH(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Hyperbolic_function[hyperbolic sine] of `numeric_exp`. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineSinh] +-------------------------------------------------- + +[[sql-functions-math-tan]] +===== `TAN` + +.Synopsis: +[source, sql] +-------------------------------------------------- +TAN(numeric_exp<1>) +-------------------------------------------------- + +*Input*: + +<1> numeric expression + +*Output*: double numeric value + +.Description: + +Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[tangent] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians. + +["source","sql",subs="attributes,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/docs.csv-spec[mathInlineTanget] -------------------------------------------------- diff --git a/docs/reference/sql/functions/string.asciidoc b/docs/reference/sql/functions/string.asciidoc index ccc11938028..873ac18d812 100644 --- a/docs/reference/sql/functions/string.asciidoc +++ b/docs/reference/sql/functions/string.asciidoc @@ -8,233 +8,456 @@ Functions for performing string manipulation. [[sql-functions-string-ascii]] ==== `ASCII` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +ASCII(string_exp<1>) +-------------------------------------------------- -Returns the ASCII code value of the leftmost character of string_exp as an integer. +*Input*: + +<1> string expression + +*Output*: integer + +.Description: + +Returns the ASCII code value of the leftmost character of `string_exp` as an integer. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringAscii] ----- +-------------------------------------------------- [[sql-functions-string-bit-length]] ==== `BIT_LENGTH` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +BIT_LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the length in bits of the input. +<1> string expression + +*Output*: integer + +.Description: + +Returns the length in bits of the `string_exp` input expression. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringBitLength] ----- +-------------------------------------------------- [[sql-functions-string-char]] ==== `CHAR` -*Input*: `numeric`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +CHAR(code<1>) +-------------------------------------------------- +*Input*: + +<1> integer expression + +*Output*: string + +.Description: Returns the character that has the ASCII code value specified by the numeric input. The value should be between 0 and 255; otherwise, the return value is data source–dependent. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringChar] ----- +-------------------------------------------------- [[sql-functions-string-char-length]] ==== `CHAR_LENGTH` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +CHAR_LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: + +<1> string expression + +*Output*: integer + +.Description: Returns the length in characters of the input, if the string expression is of a character data type; otherwise, returns the length in bytes of the string expression (the smallest integer not less than the number of bits divided by 8). ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringCharLength] ----- +-------------------------------------------------- [[sql-functions-string-concat]] ==== `CONCAT` -*Input*: `string1`, `string2`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +CONCAT(string_exp1<1>,string_exp2<2>) +-------------------------------------------------- +*Input*: -turns a character string that is the result of concatenating string1 to string2. If one of the string is `NULL`, -the other string will be returned. +<1> string expression +<2> string expression + +*Output*: string + +.Description: + +Returns a character string that is the result of concatenating `string_exp1` to `string_exp2`. If one of the string is `NULL`, the other string will be returned. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringConcat] ----- +-------------------------------------------------- [[sql-functions-string-insert]] ==== `INSERT` -*Input*: `string1`, `start`, `length`, `string2`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +INSERT(source<1>, start<2>, length<3>, replacement<4>) +-------------------------------------------------- +*Input*: -Returns a string where length characters have been deleted from string1, beginning at start, and where string2 has been inserted into string1, beginning at start. +<1> string expression +<2> integer expression +<3> integer expression +<4> string expression + +*Output*: string + +.Description: + +Returns a string where `length` characters have been deleted from `source`, beginning at `start`, and where `replacement` has been inserted into `source`, beginning at `start`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringInsert] ----- +-------------------------------------------------- [[sql-functions-string-lcase]] ==== `LCASE` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +LCASE(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns a string equal to that in string, with all uppercase characters converted to lowercase. +<1> string expression + +*Output*: string + +.Description: + +Returns a string equal to that in `string_exp`, with all uppercase characters converted to lowercase. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLCase] ----- +-------------------------------------------------- [[sql-functions-string-left]] ==== `LEFT` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +LEFT(string_exp<1>, count<2>) +-------------------------------------------------- +*Input*: -Returns the leftmost count characters of string. +<1> string expression +<2> integer expression + +*Output*: string + +.Description: + +Returns the leftmost count characters of `string_exp`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLeft] ----- +-------------------------------------------------- [[sql-functions-string-length]] ==== `LENGTH` -*Input*: `string`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +LENGTH(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the number of characters in string, excluding trailing blanks. +<1> string expression + +*Output*: integer + +.Description: + +Returns the number of characters in `string_exp`, excluding trailing blanks. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLength] ----- +-------------------------------------------------- [[sql-functions-string-locate]] ==== `LOCATE` -*Input*: `string1`, `string2`[, `start`]`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +LOCATE(pattern<1>, source<2>[, start]<3>) +-------------------------------------------------- +*Input*: -Returns the starting position of the first occurrence of string1 within string2. The search for the first occurrence of string1 begins with the first character position in string2 unless the optional argument, start, is specified. If start is specified, the search begins with the character position indicated by the value of start. The first character position in string2 is indicated by the value 1. If string1 is not found within string2, the value 0 is returned. +<1> string expression +<2> string expression +<3> integer expression; optional + +*Output*: integer + +.Description: + +Returns the starting position of the first occurrence of `pattern` within `source`. The search for the first occurrence of `pattern` begins with the first character position in `source` unless the optional argument, `start`, is specified. If `start` is specified, the search begins with the character position indicated by the value of `start`. The first character position in `source` is indicated by the value 1. If `pattern` is not found within `source`, the value 0 is returned. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLocateWoStart] ----- +-------------------------------------------------- ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLocateWithStart] ----- +-------------------------------------------------- [[sql-functions-string-ltrim]] ==== `LTRIM` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +LTRIM(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the characters of string_exp, with leading blanks removed. +<1> string expression + +*Output*: string + +.Description: + +Returns the characters of `string_exp`, with leading blanks removed. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringLTrim] ----- +-------------------------------------------------- [[sql-functions-string-position]] ==== `POSITION` -*Input*: `string1`, `string2`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +POSITION(string_exp1<1>, string_exp2<2>) +-------------------------------------------------- +*Input*: -Returns the position of the string1 in string2. The result is an exact numeric. +<1> string expression +<2> string expression + +*Output*: integer + +.Description: + +Returns the position of the `string_exp1` in `string_exp2`. The result is an exact numeric. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringPosition] ----- +-------------------------------------------------- [[sql-functions-string-repeat]] ==== `REPEAT` -*Input*: `string`, `count`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +REPEAT(string_exp<1>, count<2>) +-------------------------------------------------- +*Input*: -Returns a character string composed of string1 repeated count times. +<1> string expression +<2> integer expression + +*Output*: string + +.Description: + +Returns a character string composed of `string_exp` repeated `count` times. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringRepeat] ----- +-------------------------------------------------- [[sql-functions-string-replace]] ==== `REPLACE` -*Input*: `string1`, `string2`, `string3`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +REPLACE(source<1>, pattern<2>, replacement<3>) +-------------------------------------------------- +*Input*: -Search string1 for occurrences of string2, and replace with string3. +<1> string expression +<2> string expression +<3> string expression + +*Output*: string + +.Description: + +Search `source` for occurrences of `pattern`, and replace with `replacement`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringReplace] ----- +-------------------------------------------------- [[sql-functions-string-right]] ==== `RIGHT` -*Input*: `string`, `count`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +RIGHT(string_exp<1>, count<2>) +-------------------------------------------------- +*Input*: -Returns the rightmost count characters of string. +<1> string expression +<2> integer expression + +*Output*: string + +.Description: + +Returns the rightmost count characters of `string_exp`. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringRight] ----- +-------------------------------------------------- [[sql-functions-string-rtrim]] ==== `RTRIM` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +RTRIM(string_exp<1>) +-------------------------------------------------- +*Input*: -Returns the characters of string with trailing blanks removed. +<1> string expression + +*Output*: string + +.Description: + +Returns the characters of `string_exp` with trailing blanks removed. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringRTrim] ----- +-------------------------------------------------- [[sql-functions-string-space]] ==== `SPACE` -*Input*: `integer`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +SPACE(count<1>) +-------------------------------------------------- +*Input*: -Returns a character string consisting of count spaces. +<1> integer expression + +*Output*: string + +.Description: + +Returns a character string consisting of `count` spaces. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringSpace] ----- +-------------------------------------------------- [[sql-functions-string-substring]] ==== `SUBSTRING` -*Input*: `string`, `start`, `length`, *Output*: `integer` +.Synopsis: +[source, sql] +-------------------------------------------------- +SUBSTRING(source<1>, start<2>, length<3>) +-------------------------------------------------- +*Input*: -Returns a character string that is derived from the string, beginning at the character position specified by `start` for `length` characters. +<1> string expression +<2> integer expression +<3> integer expression + +*Output*: string + +.Description: + +Returns a character string that is derived from `source`, beginning at the character position specified by `start` for `length` characters. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringSubString] ----- +-------------------------------------------------- [[sql-functions-string-ucase]] ==== `UCASE` -*Input*: `string`, *Output*: `string` +.Synopsis: +[source, sql] +-------------------------------------------------- +UCASE(string_exp<1>) +-------------------------------------------------- +*Input*: + +<1> string expression + +*Output*: string + +.Description: Returns a string equal to that of the input, with all lowercase characters converted to uppercase. ["source","sql",subs="attributes,callouts,macros"] ----- +-------------------------------------------------- include-tagged::{sql-specs}/docs.csv-spec[stringUCase] ----- +-------------------------------------------------- diff --git a/docs/reference/sql/index.asciidoc b/docs/reference/sql/index.asciidoc index aa9eebea7b7..d7022452730 100644 --- a/docs/reference/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,7 +1,7 @@ [role="xpack"] [testenv="basic"] [[xpack-sql]] -= SQL Access += SQL access :sql-tests: {xes-repo-dir}/../../qa/sql :sql-specs: {sql-tests}/src/main/resources diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java index 6c68710c6d8..c20737998fe 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/Grok.java @@ -35,12 +35,12 @@ import java.io.InputStreamReader; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; -import java.util.Collections; public final class Grok { @@ -184,6 +184,7 @@ public final class Grok { String namedPatternRef = groupMatch(NAME_GROUP, region, grokPattern); String subName = groupMatch(SUBNAME_GROUP, region, grokPattern); // TODO(tal): Support definitions + @SuppressWarnings("unused") String definition = groupMatch(DEFINITION_GROUP, region, grokPattern); String patternName = groupMatch(PATTERN_GROUP, region, grokPattern); diff --git a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java index d30cf3d6fa2..f9d2055de1b 100644 --- a/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java +++ b/libs/grok/src/test/java/org/elasticsearch/grok/GrokTests.java @@ -262,8 +262,6 @@ public class GrokTests extends ESTestCase { } public void testBooleanCaptures() { - Map bank = new HashMap<>(); - String pattern = "%{WORD:name}=%{WORD:status:boolean}"; Grok g = new Grok(basePatterns, pattern); diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java index 0f3078715fd..4f2a320ad58 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.nio; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.ByteBuffer; @@ -61,8 +60,6 @@ public class FlushOperationTests extends ESTestCase { ByteBuffer[] buffers = {ByteBuffer.allocate(10), ByteBuffer.allocate(15), ByteBuffer.allocate(3)}; FlushOperation writeOp = new FlushOperation(buffers, listener); - ArgumentCaptor buffersCaptor = ArgumentCaptor.forClass(ByteBuffer[].class); - writeOp.incrementIndex(5); assertFalse(writeOp.isFullyFlushed()); ByteBuffer[] byteBuffers = writeOp.getBuffersToWrite(); diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index eae5e48a557..51a4f86a0d3 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -25,6 +25,8 @@ import java.io.Flushable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; import java.nio.file.Path; import java.time.ZonedDateTime; import java.util.Arrays; @@ -103,7 +105,8 @@ public final class XContentBuilder implements Closeable, Flushable { writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString())); writers.put(Calendar.class, XContentBuilder::timeValue); writers.put(GregorianCalendar.class, XContentBuilder::timeValue); - + writers.put(BigInteger.class, (b, v) -> b.value((BigInteger) v)); + writers.put(BigDecimal.class, (b, v) -> b.value((BigDecimal) v)); Map, HumanReadableTransformer> humanReadableTransformer = new HashMap<>(); Map, Function> dateTransformers = new HashMap<>(); @@ -546,6 +549,81 @@ public final class XContentBuilder implements Closeable, Flushable { return this; } + //////////////////////////////////////////////////////////////////////////// + // BigInteger + ////////////////////////////////// + + public XContentBuilder field(String name, BigInteger value) throws IOException { + if (value == null) { + return nullField(name); + } + ensureNameNotNull(name); + generator.writeNumberField(name, value); + return this; + } + + public XContentBuilder array(String name, BigInteger[] values) throws IOException { + return field(name).values(values); + } + + private XContentBuilder values(BigInteger[] values) throws IOException { + if (values == null) { + return nullValue(); + } + startArray(); + for (BigInteger b : values) { + value(b); + } + endArray(); + return this; + } + + public XContentBuilder value(BigInteger value) throws IOException { + if (value == null) { + return nullValue(); + } + generator.writeNumber(value); + return this; + } + + + //////////////////////////////////////////////////////////////////////////// + // BigDecimal + ////////////////////////////////// + + public XContentBuilder field(String name, BigDecimal value) throws IOException { + if (value == null) { + return nullField(name); + } + ensureNameNotNull(name); + generator.writeNumberField(name, value); + return this; + } + + public XContentBuilder array(String name, BigDecimal[] values) throws IOException { + return field(name).values(values); + } + + private XContentBuilder values(BigDecimal[] values) throws IOException { + if (values == null) { + return nullValue(); + } + startArray(); + for (BigDecimal b : values) { + value(b); + } + endArray(); + return this; + } + + public XContentBuilder value(BigDecimal value) throws IOException { + if (value == null) { + return nullValue(); + } + generator.writeNumber(value); + return this; + } + //////////////////////////////////////////////////////////////////////////// // String ////////////////////////////////// diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java index 142c1e399c7..48a82d91655 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentGenerator.java @@ -23,6 +23,8 @@ import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import java.io.InputStream; +import java.math.BigDecimal; +import java.math.BigInteger; public interface XContentGenerator extends Closeable, Flushable { @@ -70,6 +72,14 @@ public interface XContentGenerator extends Closeable, Flushable { void writeNumber(short value) throws IOException; + void writeNumber(BigInteger value) throws IOException; + + void writeNumberField(String name, BigInteger value) throws IOException; + + void writeNumber(BigDecimal value) throws IOException; + + void writeNumberField(String name, BigDecimal value) throws IOException; + void writeStringField(String name, String value) throws IOException; void writeString(String value) throws IOException; diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java index 6f09174a573..97d25653ad6 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/json/JsonXContentGenerator.java @@ -42,6 +42,8 @@ import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.math.BigDecimal; +import java.math.BigInteger; import java.util.Objects; import java.util.Set; @@ -226,6 +228,19 @@ public class JsonXContentGenerator implements XContentGenerator { generator.writeNumberField(name, value); } + @Override + public void writeNumberField(String name, BigInteger value) throws IOException { + // as jackson's JsonGenerator doesn't have this method for BigInteger + // we have to implement it ourselves + generator.writeFieldName(name); + generator.writeNumber(value); + } + + @Override + public void writeNumberField(String name, BigDecimal value) throws IOException { + generator.writeNumberField(name, value); + } + @Override public void writeNumber(int value) throws IOException { generator.writeNumber(value); @@ -246,6 +261,16 @@ public class JsonXContentGenerator implements XContentGenerator { generator.writeNumber(value); } + @Override + public void writeNumber(BigInteger value) throws IOException { + generator.writeNumber(value); + } + + @Override + public void writeNumber(BigDecimal value) throws IOException { + generator.writeNumber(value); + } + @Override public void writeStringField(String name, String value) throws IOException { generator.writeStringField(name, value); diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 6aa0a321adf..42d53bf4985 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -222,6 +222,7 @@ public class ObjectParserTests extends ESTestCase { public void testFailOnValueType() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"numeric_value\" : false}"); class TestStruct { + @SuppressWarnings("unused") public String test; } ObjectParser objectParser = new ObjectParser<>("foo"); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java index df67f24cc7f..e89219da4d9 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java @@ -54,6 +54,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory { private final CharArraySet keepWords; private static final String KEEP_WORDS_KEY = "keep_words"; private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path"; + @SuppressWarnings("unused") private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc // unsupported ancient option diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 67895e82e61..829d9746399 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -53,7 +53,6 @@ import org.apache.lucene.analysis.pt.PortugueseStemFilter; import org.apache.lucene.analysis.ru.RussianLightStemFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.sv.SwedishLightStemFilter; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -94,8 +93,6 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @Override public TokenStream create(TokenStream tokenStream) { - final Version indexVersion = indexSettings.getIndexVersionCreated(); - if ("arabic".equalsIgnoreCase(language)) { return new ArabicStemFilter(tokenStream); } else if ("armenian".equalsIgnoreCase(language)) { diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index d58a48e70c9..41e96253f28 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.grok.Grok; import org.elasticsearch.grok.ThreadWatchdog; +import org.elasticsearch.ingest.DropProcessor; import org.elasticsearch.ingest.PipelineProcessor; import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugins.ActionPlugin; diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml new file mode 100644 index 00000000000..3be038aca24 --- /dev/null +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -0,0 +1,59 @@ +--- +teardown: +- do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test Drop Processor": +- do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description" : "pipeline with drop", + "processors" : [ + { + "drop" : { + "if": "ctx.foo == 'bar'" + } + } + ] + } +- match: { acknowledged: true } + +- do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: { + foo: "bar" + } + +- do: + index: + index: test + type: test + id: 2 + pipeline: "my_pipeline" + body: { + foo: "blub" + } + +- do: + catch: missing + get: + index: test + type: test + id: 1 +- match: { found: false } + +- do: + get: + index: test + type: test + id: 2 +- match: { _source.foo: "blub" } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 97dddbdfe52..e6ed475a7be 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -28,11 +28,14 @@ import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Printer; import java.lang.reflect.Constructor; +import java.lang.reflect.Method; import java.net.MalformedURLException; import java.net.URL; import java.security.CodeSource; import java.security.SecureClassLoader; import java.security.cert.Certificate; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; @@ -89,16 +92,11 @@ final class Compiler { */ @Override public Class findClass(String name) throws ClassNotFoundException { - if (scriptClass.getName().equals(name)) { - return scriptClass; + Class found = additionalClasses.get(name); + if (found != null) { + return found; } - if (factoryClass != null && factoryClass.getName().equals(name)) { - return factoryClass; - } - if (statefulFactoryClass != null && statefulFactoryClass.getName().equals(name)) { - return statefulFactoryClass; - } - Class found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); + found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.')); return found != null ? found : super.findClass(name); } @@ -155,21 +153,16 @@ final class Compiler { */ private final Class scriptClass; - /** - * The class/interface to create the {@code scriptClass} instance. - */ - private final Class factoryClass; - - /** - * An optional class/interface to create the {@code factoryClass} instance. - */ - private final Class statefulFactoryClass; - /** * The whitelist the script will use. */ private final PainlessLookup painlessLookup; + /** + * Classes that do not exist in the lookup, but are needed by the script factories. + */ + private final Map> additionalClasses; + /** * Standard constructor. * @param scriptClass The class/interface the script will implement. @@ -179,9 +172,36 @@ final class Compiler { */ Compiler(Class scriptClass, Class factoryClass, Class statefulFactoryClass, PainlessLookup painlessLookup) { this.scriptClass = scriptClass; - this.factoryClass = factoryClass; - this.statefulFactoryClass = statefulFactoryClass; this.painlessLookup = painlessLookup; + Map> additionalClasses = new HashMap<>(); + additionalClasses.put(scriptClass.getName(), scriptClass); + addFactoryMethod(additionalClasses, factoryClass, "newInstance"); + addFactoryMethod(additionalClasses, statefulFactoryClass, "newFactory"); + addFactoryMethod(additionalClasses, statefulFactoryClass, "newInstance"); + this.additionalClasses = Collections.unmodifiableMap(additionalClasses); + } + + private static void addFactoryMethod(Map> additionalClasses, Class factoryClass, String methodName) { + if (factoryClass == null) { + return; + } + + Method factoryMethod = null; + for (Method method : factoryClass.getMethods()) { + if (methodName.equals(method.getName())) { + factoryMethod = method; + break; + } + } + if (factoryMethod == null) { + return; + } + + additionalClasses.put(factoryClass.getName(), factoryClass); + for (int i = 0; i < factoryMethod.getParameterTypes().length; ++i) { + Class parameterClazz = factoryMethod.getParameterTypes()[i]; + additionalClasses.put(parameterClazz.getName(), parameterClazz); + } } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index e07c016ddd0..d819f53bf0b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -295,7 +295,6 @@ public final class Locals { public final Class clazz; public final boolean readonly; private final int slot; - private boolean used; public Variable(Location location, String name, Class clazz, int slot, boolean readonly) { this.location = location; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index f87f8a134b8..98968465d34 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -19,10 +19,15 @@ package org.elasticsearch.painless.lookup; +import java.util.Objects; + public class PainlessCast { /** Create a standard cast with no boxing/unboxing. */ public static PainlessCast originalTypetoTargetType(Class originalType, Class targetType, boolean explicitCast) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, null); } @@ -30,6 +35,10 @@ public class PainlessCast { public static PainlessCast unboxOriginalType( Class originalType, Class targetType, boolean explicitCast, Class unboxOriginalType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(unboxOriginalType); + return new PainlessCast(originalType, targetType, explicitCast, unboxOriginalType, null, null, null); } @@ -37,6 +46,10 @@ public class PainlessCast { public static PainlessCast unboxTargetType( Class originalType, Class targetType, boolean explicitCast, Class unboxTargetType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(unboxTargetType); + return new PainlessCast(originalType, targetType, explicitCast, null, unboxTargetType, null, null); } @@ -44,6 +57,10 @@ public class PainlessCast { public static PainlessCast boxOriginalType( Class originalType, Class targetType, boolean explicitCast, Class boxOriginalType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(boxOriginalType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, boxOriginalType, null); } @@ -51,6 +68,10 @@ public class PainlessCast { public static PainlessCast boxTargetType( Class originalType, Class targetType, boolean explicitCast, Class boxTargetType) { + Objects.requireNonNull(originalType); + Objects.requireNonNull(targetType); + Objects.requireNonNull(boxTargetType); + return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } @@ -73,4 +94,30 @@ public class PainlessCast { this.boxOriginalType = boxOriginalType; this.boxTargetType = boxTargetType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessCast that = (PainlessCast)object; + + return explicitCast == that.explicitCast && + Objects.equals(originalType, that.originalType) && + Objects.equals(targetType, that.targetType) && + Objects.equals(unboxOriginalType, that.unboxOriginalType) && + Objects.equals(unboxTargetType, that.unboxTargetType) && + Objects.equals(boxOriginalType, that.boxOriginalType) && + Objects.equals(boxTargetType, that.boxTargetType); + } + + @Override + public int hashCode() { + return Objects.hash(originalType, targetType, explicitCast, unboxOriginalType, unboxTargetType, boxOriginalType, boxTargetType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index f5d6c97bb2f..786b4c6a3b9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; import java.util.Collections; import java.util.Map; +import java.util.Objects; public final class PainlessClass { @@ -57,4 +58,29 @@ public final class PainlessClass { this.functionalInterfaceMethod = functionalInterfaceMethod; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClass that = (PainlessClass)object; + + return Objects.equals(constructors, that.constructors) && + Objects.equals(staticMethods, that.staticMethods) && + Objects.equals(methods, that.methods) && + Objects.equals(staticFields, that.staticFields) && + Objects.equals(fields, that.fields) && + Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + } + + @Override + public int hashCode() { + return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java index 3418b2d8244..0f28830b3d4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBinding.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.List; +import java.util.Objects; public class PainlessClassBinding { @@ -38,4 +39,28 @@ public class PainlessClassBinding { this.returnType = returnType; this.typeParameters = typeParameters; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClassBinding that = (PainlessClassBinding)object; + + return Objects.equals(javaConstructor, that.javaConstructor) && + Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters); + } + + @Override + public int hashCode() { + + return Objects.hash(javaConstructor, javaMethod, returnType, typeParameters); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java index 92100d1bda0..fbf9e45bf16 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; import java.util.HashMap; import java.util.Map; +import java.util.Objects; final class PainlessClassBuilder { @@ -57,4 +58,29 @@ final class PainlessClassBuilder { return new PainlessClass(constructors, staticMethods, methods, staticFields, fields, getterMethodHandles, setterMethodHandles, functionalInterfaceMethod); } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClassBuilder that = (PainlessClassBuilder)object; + + return Objects.equals(constructors, that.constructors) && + Objects.equals(staticMethods, that.staticMethods) && + Objects.equals(methods, that.methods) && + Objects.equals(staticFields, that.staticFields) && + Objects.equals(fields, that.fields) && + Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod); + } + + @Override + public int hashCode() { + return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java index a3dc6c8122b..0f890e88b73 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessConstructor.java @@ -23,6 +23,7 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodType; import java.lang.reflect.Constructor; import java.util.List; +import java.util.Objects; public class PainlessConstructor { @@ -37,4 +38,26 @@ public class PainlessConstructor { this.methodHandle = methodHandle; this.methodType = methodType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessConstructor that = (PainlessConstructor)object; + + return Objects.equals(javaConstructor, that.javaConstructor) && + Objects.equals(typeParameters, that.typeParameters) && + Objects.equals(methodType, that.methodType); + } + + @Override + public int hashCode() { + return Objects.hash(javaConstructor, typeParameters, methodType); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index 9567e97331c..72a57159b44 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless.lookup; import java.lang.invoke.MethodHandle; import java.lang.reflect.Field; +import java.util.Objects; public final class PainlessField { @@ -37,4 +38,25 @@ public final class PainlessField { this.getterMethodHandle = getterMethodHandle; this.setterMethodHandle = setterMethodHandle; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessField that = (PainlessField)object; + + return Objects.equals(javaField, that.javaField) && + Objects.equals(typeParameter, that.typeParameter); + } + + @Override + public int hashCode() { + return Objects.hash(javaField, typeParameter); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index ce451f3dca8..b3bc8580b38 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.painless.lookup; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistClass; +import org.elasticsearch.painless.spi.WhitelistClassBinding; import org.elasticsearch.painless.spi.WhitelistConstructor; import org.elasticsearch.painless.spi.WhitelistField; import org.elasticsearch.painless.spi.WhitelistMethod; @@ -34,7 +34,6 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -51,155 +50,10 @@ import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCan public final class PainlessLookupBuilder { - private static class PainlessConstructorCacheKey { - - private final Class targetClass; - private final List> typeParameters; - - private PainlessConstructorCacheKey(Class targetClass, List> typeParameters) { - this.targetClass = targetClass; - this.typeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(typeParameters, that.typeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, typeParameters); - } - } - - private static class PainlessMethodCacheKey { - - private final Class targetClass; - private final String methodName; - private final Class returnType; - private final List> typeParameters; - - private PainlessMethodCacheKey(Class targetClass, String methodName, Class returnType, List> typeParameters) { - this.targetClass = targetClass; - this.methodName = methodName; - this.returnType = returnType; - this.typeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessMethodCacheKey that = (PainlessMethodCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(methodName, that.methodName) && - Objects.equals(returnType, that.returnType) && - Objects.equals(typeParameters, that.typeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, methodName, returnType, typeParameters); - } - } - - private static class PainlessFieldCacheKey { - - private final Class targetClass; - private final String fieldName; - private final Class typeParameter; - - private PainlessFieldCacheKey(Class targetClass, String fieldName, Class typeParameter) { - this.targetClass = targetClass; - this.fieldName = fieldName; - this.typeParameter = typeParameter; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessFieldCacheKey that = (PainlessFieldCacheKey) object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(fieldName, that.fieldName) && - Objects.equals(typeParameter, that.typeParameter); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, fieldName, typeParameter); - } - } - - private static class PainlessClassBindingCacheKey { - - private final Class targetClass; - private final String methodName; - private final Class methodReturnType; - private final List> methodTypeParameters; - - private PainlessClassBindingCacheKey(Class targetClass, - String methodName, Class returnType, List> typeParameters) { - - this.targetClass = targetClass; - this.methodName = methodName; - this.methodReturnType = returnType; - this.methodTypeParameters = Collections.unmodifiableList(typeParameters); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessClassBindingCacheKey that = (PainlessClassBindingCacheKey)object; - - return Objects.equals(targetClass, that.targetClass) && - Objects.equals(methodName, that.methodName) && - Objects.equals(methodReturnType, that.methodReturnType) && - Objects.equals(methodTypeParameters, that.methodTypeParameters); - } - - @Override - public int hashCode() { - return Objects.hash(targetClass, methodName, methodReturnType, methodTypeParameters); - } - } - - private static final Map painlessConstructorCache = new HashMap<>(); - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); - private static final Map painlessClassBindingCache = new HashMap<>(); + private static final Map painlessConstructorCache = new HashMap<>(); + private static final Map painlessMethodCache = new HashMap<>(); + private static final Map painlessFieldCache = new HashMap<>(); + private static final Map painlessClassBindingCache = new HashMap<>(); private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); @@ -335,8 +189,7 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]"); } - - Class existingClass = canonicalClassNamesToClasses.get(typeToCanonicalTypeName(clazz)); + Class existingClass = canonicalClassNamesToClasses.get(canonicalClassName); if (existingClass != null && existingClass != clazz) { throw new IllegalArgumentException("class [" + canonicalClassName + "] " + @@ -360,22 +213,22 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package"); } } else { - Class importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); + Class importedClass = canonicalClassNamesToClasses.get(importedCanonicalClassName); - if (importedPainlessClass == null) { + if (importedClass == null) { if (importClassName) { if (existingPainlessClassBuilder != null) { throw new IllegalArgumentException( - "inconsistent no_import parameters found for class [" + canonicalClassName + "]"); + "inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz); } - } else if (importedPainlessClass != clazz) { + } else if (importedClass != clazz) { throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " + - "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]"); + "classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedClass) + "]"); } else if (importClassName == false) { - throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]"); + throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]"); } } } @@ -440,36 +293,32 @@ public final class PainlessLookupBuilder { try { javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("constructor reflection object " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException("reflection object not found for constructor " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); } + MethodHandle methodHandle; + + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for constructor " + + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae); + } + + MethodType methodType = methodHandle.type(); + String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize); - PainlessConstructor painlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); + PainlessConstructor existingPainlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey); + PainlessConstructor newPainlessConstructor = new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType); - if (painlessConstructor == null) { - MethodHandle methodHandle; - - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("constructor method handle " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - - MethodType methodType = methodHandle.type(); - - painlessConstructor = painlessConstructorCache.computeIfAbsent( - new PainlessConstructorCacheKey(targetClass, typeParameters), - key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType) - ); - - painlessClassBuilder.constructors.put(painlessConstructorKey, painlessConstructor); - } else if (painlessConstructor.typeParameters.equals(typeParameters) == false){ - throw new IllegalArgumentException("cannot have constructors " + + if (existingPainlessConstructor == null) { + newPainlessConstructor = painlessConstructorCache.computeIfAbsent(newPainlessConstructor, key -> key); + painlessClassBuilder.constructors.put(painlessConstructorKey, newPainlessConstructor); + } else if (newPainlessConstructor.equals(existingPainlessConstructor) == false){ + throw new IllegalArgumentException("cannot add constructors with the same arity but are not equivalent for constructors " + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.typeParameters) + "] " + - "with the same arity and different type parameters"); + "[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters) + "]"); } } @@ -578,8 +427,8 @@ public final class PainlessLookupBuilder { try { javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + throw new IllegalArgumentException("reflection object not found for method [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme); } } else { try { @@ -591,9 +440,9 @@ public final class PainlessLookupBuilder { "[" + typeToCanonicalTypeName(augmentedClass) + "] must be static"); } } catch (NoSuchMethodException nsme) { - throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + - "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); + throw new IllegalArgumentException("reflection object not found for method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] " + + "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme); } } @@ -604,79 +453,54 @@ public final class PainlessLookupBuilder { typesToCanonicalTypeNames(typeParameters) + "]"); } - String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + MethodHandle methodHandle; - if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { - PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); - - if (painlessMethod == null) { - MethodHandle methodHandle; - - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - - MethodType methodType = methodHandle.type(); - - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod); - } else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have static methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + if (augmentedClass == null) { + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for method " + + "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]", iae); } } else { - PainlessMethod painlessMethod = painlessClassBuilder.methods.get(painlessMethodKey); - - if (painlessMethod == null) { - MethodHandle methodHandle; - - if (augmentedClass == null) { - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } - } else { - try { - methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + - "with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); - } - } - - MethodType methodType = methodHandle.type(); - - painlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod); - } else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have methods " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(returnType) + "], " + - typesToCanonicalTypeNames(typeParameters) + "] and " + - "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + try { + methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("method handle not found for method " + + "[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]" + + "with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae); } } + + MethodType methodType = methodHandle.type(); + + boolean isStatic = augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers()); + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + PainlessMethod existingPainlessMethod = isStatic ? + painlessClassBuilder.staticMethods.get(painlessMethodKey) : + painlessClassBuilder.methods.get(painlessMethodKey); + PainlessMethod newPainlessMethod = + new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType); + + if (existingPainlessMethod == null) { + newPainlessMethod = painlessMethodCache.computeIfAbsent(newPainlessMethod, key -> key); + + if (isStatic) { + painlessClassBuilder.staticMethods.put(painlessMethodKey, newPainlessMethod); + } else { + painlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod); + } + } else if (newPainlessMethod.equals(existingPainlessMethod) == false) { + throw new IllegalArgumentException("cannot add methods with the same name and arity but are not equivalent for methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(existingPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessMethod.typeParameters) + "]"); + } } public void addPainlessField(String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter) { @@ -687,7 +511,8 @@ public final class PainlessLookupBuilder { Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); if (targetClass == null) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + canonicalTypeNameParameter + "]]"); } Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); @@ -721,7 +546,8 @@ public final class PainlessLookupBuilder { PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); if (painlessClassBuilder == null) { - throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found"); + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " + + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "]]"); } if (isValidType(typeParameter) == false) { @@ -735,7 +561,7 @@ public final class PainlessLookupBuilder { javaField = targetClass.getField(fieldName); } catch (NoSuchFieldException nsme) { throw new IllegalArgumentException( - "field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme); + "reflection object not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", nsme); } if (javaField.getType() != typeToJavaType(typeParameter)) { @@ -760,20 +586,18 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final"); } - PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); + PainlessField existingPainlessField = painlessClassBuilder.staticFields.get(painlessFieldKey); + PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, null); - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(targetClass, fieldName, typeParameter), - key -> new PainlessField(javaField, typeParameter, methodHandleGetter, null)); - - painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField); - } else if (painlessField.typeParameter != typeParameter) { - throw new IllegalArgumentException("cannot have static fields " + + if (existingPainlessField == null) { + newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + painlessClassBuilder.staticFields.put(painlessFieldKey, newPainlessField); + } else if (newPainlessField.equals(existingPainlessField) == false) { + throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + "with the same name and different type parameters"); } } else { @@ -786,35 +610,41 @@ public final class PainlessLookupBuilder { "setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]"); } - PainlessField painlessField = painlessClassBuilder.fields.get(painlessFieldKey); + PainlessField existingPainlessField = painlessClassBuilder.fields.get(painlessFieldKey); + PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter); - if (painlessField == null) { - painlessField = painlessFieldCache.computeIfAbsent( - new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter), - key -> new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter)); - - painlessClassBuilder.fields.put(fieldName, painlessField); - } else if (painlessField.typeParameter != typeParameter) { - throw new IllegalArgumentException("cannot have fields " + + if (existingPainlessField == null) { + newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + painlessClassBuilder.fields.put(painlessFieldKey, newPainlessField); + } else if (newPainlessField.equals(existingPainlessField) == false) { + throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " + "[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "] and " + - "[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " + - typeToCanonicalTypeName(painlessField.typeParameter) + "] " + + "[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " + + typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " + "with the same name and different type parameters"); } } } - public void addImportedPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, + public void addImportedPainlessMethod(ClassLoader classLoader, String targetJavaClassName, String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { Objects.requireNonNull(classLoader); - Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(targetJavaClassName); Objects.requireNonNull(methodName); Objects.requireNonNull(returnCanonicalTypeName); Objects.requireNonNull(canonicalTypeNameParameters); - Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + Class targetClass; + + try { + targetClass = Class.forName(targetJavaClassName, true, classLoader); + } catch (ClassNotFoundException cnfe) { + throw new IllegalArgumentException("class [" + targetJavaClassName + "] not found", cnfe); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); if (targetClass == null) { throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + @@ -913,35 +743,33 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("imported method and class binding cannot have the same name [" + methodName + "]"); } - PainlessMethod importedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + MethodHandle methodHandle; - if (importedPainlessMethod == null) { - MethodHandle methodHandle; + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + } - try { - methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); - } catch (IllegalAccessException iae) { - throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + - "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); - } + MethodType methodType = methodHandle.type(); - MethodType methodType = methodHandle.type(); + PainlessMethod existingImportedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + PainlessMethod newImportedPainlessMethod = + new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType); - importedPainlessMethod = painlessMethodCache.computeIfAbsent( - new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); - - painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, importedPainlessMethod); - } else if (importedPainlessMethod.returnType == returnType && - importedPainlessMethod.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have imported methods " + + if (existingImportedPainlessMethod == null) { + newImportedPainlessMethod = painlessMethodCache.computeIfAbsent(newImportedPainlessMethod, key -> key); + painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, newImportedPainlessMethod); + } else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) { + throw new IllegalArgumentException("cannot add imported methods with the same name and arity " + + "but are not equivalent for methods " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + - "[" + typeToCanonicalTypeName(importedPainlessMethod.returnType) + "], " + - typesToCanonicalTypeNames(importedPainlessMethod.typeParameters) + "] " + - "with the same arity and different return type or type parameters"); + "[" + typeToCanonicalTypeName(existingImportedPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(existingImportedPainlessMethod.typeParameters) + "]"); } } @@ -987,7 +815,6 @@ public final class PainlessLookupBuilder { } public void addPainlessClassBinding(Class targetClass, String methodName, Class returnType, List> typeParameters) { - Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); Objects.requireNonNull(returnType); @@ -1100,31 +927,24 @@ public final class PainlessLookupBuilder { throw new IllegalArgumentException("class binding and imported method cannot have the same name [" + methodName + "]"); } - PainlessClassBinding painlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); + PainlessClassBinding existingPainlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey); + PainlessClassBinding newPainlessClassBinding = + new PainlessClassBinding(javaConstructor, javaMethod, returnType, typeParameters); - if (painlessClassBinding == null) { - Constructor finalJavaConstructor = javaConstructor; - Method finalJavaMethod = javaMethod; - - painlessClassBinding = painlessClassBindingCache.computeIfAbsent( - new PainlessClassBindingCacheKey(targetClass, methodName, returnType, typeParameters), - key -> new PainlessClassBinding(finalJavaConstructor, finalJavaMethod, returnType, typeParameters)); - - painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, painlessClassBinding); - } else if (painlessClassBinding.javaConstructor.equals(javaConstructor) == false || - painlessClassBinding.javaMethod.equals(javaMethod) == false || - painlessClassBinding.returnType != returnType || - painlessClassBinding.typeParameters.equals(typeParameters) == false) { - throw new IllegalArgumentException("cannot have class bindings " + + if (existingPainlessClassBinding == null) { + newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, key -> key); + painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, newPainlessClassBinding); + } else if (newPainlessClassBinding.equals(existingPainlessClassBinding)) { + throw new IllegalArgumentException("cannot add class bindings with the same name and arity " + + "but are not equivalent for methods " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + "[" + typeToCanonicalTypeName(returnType) + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " + "[[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + - "[" + typeToCanonicalTypeName(painlessClassBinding.returnType) + "], " + - typesToCanonicalTypeNames(painlessClassBinding.typeParameters) + "] and " + - "with the same name and arity but different constructors or methods"); + "[" + typeToCanonicalTypeName(existingPainlessClassBinding.returnType) + "], " + + typesToCanonicalTypeNames(existingPainlessClassBinding.typeParameters) + "]"); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index 89462170ae5..ce10d7a1b89 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -24,6 +24,7 @@ import java.lang.invoke.MethodType; import java.lang.reflect.Method; import java.util.Collections; import java.util.List; +import java.util.Objects; public class PainlessMethod { @@ -44,4 +45,28 @@ public class PainlessMethod { this.methodHandle = methodHandle; this.methodType = methodType; } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessMethod that = (PainlessMethod)object; + + return Objects.equals(javaMethod, that.javaMethod) && + Objects.equals(targetClass, that.targetClass) && + Objects.equals(returnType, that.returnType) && + Objects.equals(typeParameters, that.typeParameters) && + Objects.equals(methodType, that.methodType); + } + + @Override + public int hashCode() { + return Objects.hash(javaMethod, targetClass, returnType, typeParameters, methodType); + } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java index f7f3b89773b..61f00647f3c 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -91,7 +91,7 @@ public class ChildrenIT extends ParentChildTestCase { String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length]; Control control = categoryToControl.get(category); if (control == null) { - categoryToControl.put(category, control = new Control(category)); + categoryToControl.put(category, control = new Control()); } control.articleIds.add(id); } @@ -457,14 +457,8 @@ public class ChildrenIT extends ParentChildTestCase { } private static final class Control { - - final String category; final Set articleIds = new HashSet<>(); final Set commentIds = new HashSet<>(); final Map> commenterToCommentId = new HashMap<>(); - - private Control(String category) { - this.category = category; - } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index dd4fdabdacd..e55dab1c38f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -737,9 +737,6 @@ public abstract class AbstractAsyncBulkByScrollAction params; - private UpdateScript executable; - private Map context; - public ScriptApplier(WorkerBulkByScrollTaskState taskWorker, ScriptService scriptService, Script script, @@ -756,16 +753,8 @@ public abstract class AbstractAsyncBulkByScrollAction(); - } else { - context.clear(); - } + Map context = new HashMap<>(); context.put(IndexFieldMapper.NAME, doc.getIndex()); context.put(TypeFieldMapper.NAME, doc.getType()); context.put(IdFieldMapper.NAME, doc.getId()); @@ -778,7 +767,9 @@ public abstract class AbstractAsyncBulkByScrollAction T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); - UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) { + UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) { @Override - public void execute(Map ctx) { + public void execute() { scriptBody.accept(ctx); } - }; - UpdateScript.Factory factory = params -> updateScript; + };; ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody); when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript); when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory); diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index e7c36ff506e..05e3b1807f0 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -34,13 +34,13 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr dependencies { // network stack - compile "io.netty:netty-buffer:4.1.28.Final" - compile "io.netty:netty-codec:4.1.28.Final" - compile "io.netty:netty-codec-http:4.1.28.Final" - compile "io.netty:netty-common:4.1.28.Final" - compile "io.netty:netty-handler:4.1.28.Final" - compile "io.netty:netty-resolver:4.1.28.Final" - compile "io.netty:netty-transport:4.1.28.Final" + compile "io.netty:netty-buffer:4.1.29.Final" + compile "io.netty:netty-codec:4.1.29.Final" + compile "io.netty:netty-codec-http:4.1.29.Final" + compile "io.netty:netty-common:4.1.29.Final" + compile "io.netty:netty-handler:4.1.29.Final" + compile "io.netty:netty-resolver:4.1.29.Final" + compile "io.netty:netty-transport:4.1.29.Final" } dependencyLicenses { diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.28.Final.jar.sha1 deleted file mode 100644 index f8a652d0dd1..00000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6c2d13492778009d33f60e05ed90bcb535d1fd1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..17798a82aa7 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.28.Final.jar.sha1 deleted file mode 100644 index 70799bf1032..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a38361d893900947524f8a9da980555950e73d6a \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..f892420795b --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1651bc2e279216773c234cafe402d68d2a5adc90 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.28.Final.jar.sha1 deleted file mode 100644 index e1d34ebf89b..00000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -897100c1022c780b0a436b9349e507e8fa9800dc \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..aa97345bad1 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +454688b88cea27a4d407202d1fc79a6522345b5e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.28.Final.jar.sha1 deleted file mode 100644 index bc951426832..00000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -df69ce8bb9b544a71e7bbee290253cf7c93e6bad \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..47140876e6a --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +a5d6a735ed07d8f197daa48db7f097cfc971ee5e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.28.Final.jar.sha1 deleted file mode 100644 index 80dc8b8f6fe..00000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a035784682da0126bc25f10713dac732b5082a6d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..7c2d407f75e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1acf1d94799296a2517533ec75ce7e155e9c4ea7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.28.Final.jar.sha1 deleted file mode 100644 index afe004bd716..00000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f33557dcb31fa20da075ac05e4808115e32ef9b7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..bac08f57079 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +bbec1dc913732e4773893c14d795b15d6c1e878e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.28.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.28.Final.jar.sha1 deleted file mode 100644 index af19a16d6ed..00000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d2ef28f49d726737f0ffe84bf66529b3bf6e0c0d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..0ce64132afb --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c190b90f70e2ae8a48c068afad709e8728fcaa39 \ No newline at end of file diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 5f2e8b8c871..a73b5057280 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -48,6 +48,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; @@ -68,6 +69,7 @@ import java.net.InetSocketAddress; import java.util.Arrays; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -241,11 +243,15 @@ public class Netty4HttpServerTransport extends AbstractHttpServerTransport { } else if (origin.equals(ANY_ORIGIN)) { builder = Netty4CorsConfigBuilder.forAnyOrigin(); } else { - Pattern p = RestUtils.checkCorsSettingForRegex(origin); - if (p == null) { - builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); - } else { - builder = Netty4CorsConfigBuilder.forPattern(p); + try { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = Netty4CorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = Netty4CorsConfigBuilder.forPattern(p); + } + } catch (PatternSyntaxException e) { + throw new SettingsException("Bad regex in [" + SETTING_CORS_ALLOW_ORIGIN.getKey() + "]: [" + origin + "]", e); } } if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java index 1c3c71d710d..63e38823acb 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpServerTransportTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -75,6 +76,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import static org.elasticsearch.common.Strings.collectionToDelimitedString; @@ -148,6 +150,17 @@ public class Netty4HttpServerTransportTests extends ESTestCase { assertFalse(corsConfig.isCredentialsAllowed()); } + public void testCorsConfigWithBadRegex() { + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/[*/") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + SettingsException e = expectThrows(SettingsException.class, () -> Netty4HttpServerTransport.buildCorsConfig(settings)); + assertThat(e.getMessage(), containsString("Bad regex in [http.cors.allow-origin]: [/[*/]")); + assertThat(e.getCause(), instanceOf(PatternSyntaxException.class)); + } + /** * Test that {@link Netty4HttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index 0235e6e8136..2c20d4b4784 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -23,7 +23,6 @@ import com.ibm.icu.text.Collator; import com.ibm.icu.text.RawCollationKey; import com.ibm.icu.text.RuleBasedCollator; import com.ibm.icu.util.ULocale; - import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; @@ -34,6 +33,7 @@ import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -86,6 +86,7 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { this.collator = ref.collator; } + @Override public CollationFieldType clone() { return new CollationFieldType(this); } @@ -158,18 +159,25 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { @Override public Query fuzzyQuery(Object value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[fuzzy] queries are not supported on [" + CONTENT_TYPE + "] fields."); } @Override public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[prefix] queries are not supported on [" + CONTENT_TYPE + "] fields."); + } + + @Override + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + throw new UnsupportedOperationException("[wildcard] queries are not supported on [" + CONTENT_TYPE + "] fields."); } @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("[regexp] queries are not supported on [" + CONTENT_TYPE + "] fields."); } public static DocValueFormat COLLATE_FORMAT = new DocValueFormat() { @@ -239,7 +247,6 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper { private boolean numeric = false; private String variableTop = null; private boolean hiraganaQuaternaryMode = false; - private String nullValue = Defaults.NULL_VALUE; public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java index 71d8f25bf9f..a261e8b3b7e 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/CollationFieldTypeTests.java @@ -121,6 +121,14 @@ public class CollationFieldTypeTests extends FieldTypeTestCase { () -> ft.prefixQuery("prefix", null, null)); } + public void testWildcardQuery() { + MappedFieldType ft = createDefaultFieldType(); + ft.setName("field"); + ft.setIndexOptions(IndexOptions.DOCS); + expectThrows(UnsupportedOperationException.class, + () -> ft.wildcardQuery("foo*", null, null)); + } + public void testRangeQuery() { MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index aa08447fd20..8a8477b2294 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -509,7 +509,7 @@ import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsIngre import com.amazonaws.services.ec2.model.UpdateSecurityGroupRuleDescriptionsIngressResult; import com.amazonaws.services.ec2.waiters.AmazonEC2Waiters; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import java.util.ArrayList; import java.util.Collection; @@ -521,7 +521,7 @@ import java.util.regex.Pattern; public class AmazonEC2Mock implements AmazonEC2 { - private static final Logger logger = ESLoggerFactory.getLogger(AmazonEC2Mock.class.getName()); + private static final Logger logger = LogManager.getLogger(AmazonEC2Mock.class); public static final String PREFIX_PRIVATE_IP = "10.0.0."; public static final String PREFIX_PUBLIC_IP = "8.8.8."; diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java index dc7f8bf8596..cc9ae0c8daf 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapper.java @@ -19,6 +19,8 @@ package org.elasticsearch.discovery.gce; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import com.google.api.client.auth.oauth2.Credential; import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential; import com.google.api.client.http.HttpBackOffIOExceptionHandler; @@ -29,19 +31,14 @@ import com.google.api.client.http.HttpResponse; import com.google.api.client.http.HttpUnsuccessfulResponseHandler; import com.google.api.client.util.ExponentialBackOff; import com.google.api.client.util.Sleeper; -import org.apache.logging.log4j.Logger; import org.elasticsearch.cloud.gce.util.Access; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.util.Objects; public class RetryHttpInitializerWrapper implements HttpRequestInitializer { - - private TimeValue maxWait; - - private static final Logger logger = ESLoggerFactory.getLogger(RetryHttpInitializerWrapper.class.getName()); + private static final Logger logger = LogManager.getLogger(RetryHttpInitializerWrapper.class); // Intercepts the request for filling in the "Authorization" // header field, as well as recovering from certain unsuccessful @@ -52,6 +49,8 @@ public class RetryHttpInitializerWrapper implements HttpRequestInitializer { // A sleeper; you can replace it with a mock in your test. private final Sleeper sleeper; + private TimeValue maxWait; + public RetryHttpInitializerWrapper(Credential wrappedCredential) { this(wrappedCredential, Sleeper.DEFAULT, TimeValue.timeValueMillis(ExponentialBackOff.DEFAULT_MAX_ELAPSED_TIME_MILLIS)); } diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index cb8916b857c..d6d1793066a 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -29,13 +29,13 @@ dependencies { compile "org.elasticsearch:elasticsearch-nio:${version}" // network stack - compile "io.netty:netty-buffer:4.1.28.Final" - compile "io.netty:netty-codec:4.1.28.Final" - compile "io.netty:netty-codec-http:4.1.28.Final" - compile "io.netty:netty-common:4.1.28.Final" - compile "io.netty:netty-handler:4.1.28.Final" - compile "io.netty:netty-resolver:4.1.28.Final" - compile "io.netty:netty-transport:4.1.28.Final" + compile "io.netty:netty-buffer:4.1.29.Final" + compile "io.netty:netty-codec:4.1.29.Final" + compile "io.netty:netty-codec-http:4.1.29.Final" + compile "io.netty:netty-common:4.1.29.Final" + compile "io.netty:netty-handler:4.1.29.Final" + compile "io.netty:netty-resolver:4.1.29.Final" + compile "io.netty:netty-transport:4.1.29.Final" } dependencyLicenses { diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.28.Final.jar.sha1 deleted file mode 100644 index f8a652d0dd1..00000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d6c2d13492778009d33f60e05ed90bcb535d1fd1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..17798a82aa7 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.28.Final.jar.sha1 deleted file mode 100644 index 70799bf1032..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a38361d893900947524f8a9da980555950e73d6a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..f892420795b --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1651bc2e279216773c234cafe402d68d2a5adc90 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.28.Final.jar.sha1 deleted file mode 100644 index e1d34ebf89b..00000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -897100c1022c780b0a436b9349e507e8fa9800dc \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..aa97345bad1 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +454688b88cea27a4d407202d1fc79a6522345b5e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.28.Final.jar.sha1 deleted file mode 100644 index bc951426832..00000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -df69ce8bb9b544a71e7bbee290253cf7c93e6bad \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..47140876e6a --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +a5d6a735ed07d8f197daa48db7f097cfc971ee5e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.28.Final.jar.sha1 deleted file mode 100644 index 80dc8b8f6fe..00000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a035784682da0126bc25f10713dac732b5082a6d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..7c2d407f75e --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +1acf1d94799296a2517533ec75ce7e155e9c4ea7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.28.Final.jar.sha1 deleted file mode 100644 index afe004bd716..00000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f33557dcb31fa20da075ac05e4808115e32ef9b7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..bac08f57079 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +bbec1dc913732e4773893c14d795b15d6c1e878e \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.28.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.28.Final.jar.sha1 deleted file mode 100644 index af19a16d6ed..00000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.28.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d2ef28f49d726737f0ffe84bf66529b3bf6e0c0d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 new file mode 100644 index 00000000000..0ce64132afb --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.29.Final.jar.sha1 @@ -0,0 +1 @@ +c190b90f70e2ae8a48c068afad709e8728fcaa39 \ No newline at end of file diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index 9c672c1caf1..a7f8768bb69 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; @@ -57,6 +58,7 @@ import java.nio.channels.SocketChannel; import java.util.Arrays; import java.util.function.Consumer; import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; import static org.elasticsearch.common.settings.Setting.intSetting; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -176,11 +178,15 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { } else if (origin.equals(ANY_ORIGIN)) { builder = NioCorsConfigBuilder.forAnyOrigin(); } else { - Pattern p = RestUtils.checkCorsSettingForRegex(origin); - if (p == null) { - builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); - } else { - builder = NioCorsConfigBuilder.forPattern(p); + try { + Pattern p = RestUtils.checkCorsSettingForRegex(origin); + if (p == null) { + builder = NioCorsConfigBuilder.forOrigins(RestUtils.corsSettingAsArray(origin)); + } else { + builder = NioCorsConfigBuilder.forPattern(p); + } + } catch (PatternSyntaxException e) { + throw new SettingsException("Bad regex in [" + SETTING_CORS_ALLOW_ORIGIN.getKey() + "]: [" + origin + "]", e); } } if (SETTING_CORS_ALLOW_CREDENTIALS.get(settings)) { diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java index 8acec830f11..13b8e60336e 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.MockBigArrays; @@ -65,6 +66,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; @@ -139,6 +141,17 @@ public class NioHttpServerTransportTests extends ESTestCase { assertFalse(corsConfig.isCredentialsAllowed()); } + public void testCorsConfigWithBadRegex() { + final Settings settings = Settings.builder() + .put(SETTING_CORS_ENABLED.getKey(), true) + .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), "/[*/") + .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) + .build(); + SettingsException e = expectThrows(SettingsException.class, () -> NioHttpServerTransport.buildCorsConfig(settings)); + assertThat(e.getMessage(), containsString("Bad regex in [http.cors.allow-origin]: [/[*/]")); + assertThat(e.getCause(), instanceOf(PatternSyntaxException.class)); + } + /** * Test that {@link NioHttpServerTransport} supports the "Expect: 100-continue" HTTP header * @throws InterruptedException if the client communication with the server is interrupted diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java index f53c9d3b1f5..53e55faecf9 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerConfigurationTests.java @@ -58,7 +58,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { } public void testResolveMultipleConfigs() throws Exception { - final Level level = ESLoggerFactory.getLogger("test").getLevel(); + final Level level = LogManager.getLogger("test").getLevel(); try { final Path configDir = getDataPath("config"); final Settings settings = Settings.builder() @@ -106,7 +106,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { LogConfigurator.configure(environment); final String loggerName = "test"; - final Logger logger = ESLoggerFactory.getLogger(loggerName); + final Logger logger = LogManager.getLogger(loggerName); assertThat(logger.getLevel().toString(), equalTo(level)); } @@ -122,7 +122,7 @@ public class EvilLoggerConfigurationTests extends ESTestCase { // args should overwrite whatever is in the config final String loggerName = "test_resolve_order"; - final Logger logger = ESLoggerFactory.getLogger(loggerName); + final Logger logger = LogManager.getLogger(loggerName); assertTrue(logger.isTraceEnabled()); } @@ -134,14 +134,14 @@ public class EvilLoggerConfigurationTests extends ESTestCase { final Environment environment = new Environment(settings, configDir); LogConfigurator.configure(environment); - assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(Level.TRACE)); - assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(Level.DEBUG)); + assertThat(LogManager.getLogger("x").getLevel(), equalTo(Level.TRACE)); + assertThat(LogManager.getLogger("x.y").getLevel(), equalTo(Level.DEBUG)); final Level level = randomFrom(Level.TRACE, Level.DEBUG, Level.INFO, Level.WARN, Level.ERROR); - Loggers.setLevel(ESLoggerFactory.getLogger("x"), level); + Loggers.setLevel(LogManager.getLogger("x"), level); - assertThat(ESLoggerFactory.getLogger("x").getLevel(), equalTo(level)); - assertThat(ESLoggerFactory.getLogger("x.y").getLevel(), equalTo(level)); + assertThat(LogManager.getLogger("x").getLevel(), equalTo(level)); + assertThat(LogManager.getLogger("x.y").getLevel(), equalTo(level)); } public void testMissingConfigFile() { diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index 5013148fb74..bebdb320db4 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -82,7 +82,7 @@ public class EvilLoggerTests extends ESTestCase { public void testLocationInfoTest() throws IOException, UserException { setupLogging("location_info"); - final Logger testLogger = ESLoggerFactory.getLogger("test"); + final Logger testLogger = LogManager.getLogger("test"); testLogger.error("This is an error message"); testLogger.warn("This is a warning message"); @@ -108,7 +108,7 @@ public class EvilLoggerTests extends ESTestCase { public void testDeprecationLogger() throws IOException, UserException { setupLogging("deprecation"); - final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger("deprecation")); + final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger("deprecation")); final int deprecatedIterations = randomIntBetween(0, 256); for (int i = 0; i < deprecatedIterations; i++) { @@ -135,7 +135,7 @@ public class EvilLoggerTests extends ESTestCase { public void testConcurrentDeprecationLogger() throws IOException, UserException, BrokenBarrierException, InterruptedException { setupLogging("deprecation"); - final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger("deprecation")); + final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger("deprecation")); final int numberOfThreads = randomIntBetween(2, 4); final CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads); @@ -214,7 +214,7 @@ public class EvilLoggerTests extends ESTestCase { public void testDeprecationLoggerMaybeLog() throws IOException, UserException { setupLogging("deprecation"); - final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger("deprecation")); + final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger("deprecation")); final int iterations = randomIntBetween(1, 16); @@ -286,12 +286,12 @@ public class EvilLoggerTests extends ESTestCase { public void testFindAppender() throws IOException, UserException { setupLogging("find_appender"); - final Logger hasConsoleAppender = ESLoggerFactory.getLogger("has_console_appender"); + final Logger hasConsoleAppender = LogManager.getLogger("has_console_appender"); final Appender testLoggerConsoleAppender = Loggers.findAppender(hasConsoleAppender, ConsoleAppender.class); assertNotNull(testLoggerConsoleAppender); assertThat(testLoggerConsoleAppender.getName(), equalTo("console")); - final Logger hasCountingNoOpAppender = ESLoggerFactory.getLogger("has_counting_no_op_appender"); + final Logger hasCountingNoOpAppender = LogManager.getLogger("has_counting_no_op_appender"); assertNull(Loggers.findAppender(hasCountingNoOpAppender, ConsoleAppender.class)); final Appender countingNoOpAppender = Loggers.findAppender(hasCountingNoOpAppender, CountingNoOpAppender.class); assertThat(countingNoOpAppender.getName(), equalTo("counting_no_op")); diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 8f322719487..676608f3be8 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -20,11 +20,11 @@ package org.elasticsearch.smoketest; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -69,7 +69,7 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER = "tests.cluster"; - protected static final Logger logger = ESLoggerFactory.getLogger(ESSmokeClientTestCase.class.getName()); + protected static final Logger logger = LogManager.getLogger(ESSmokeClientTestCase.class); private static final AtomicInteger counter = new AtomicInteger(); private static Client client; diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index 233ff32b418..dc7242f2875 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -1,18 +1,22 @@ --- "Array of objects": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: - index: _index: test_index - _type: test_type _id: test_id - f1: v1 f2: 42 - index: _index: test_index - _type: test_type _id: test_id2 - f1: v2 f2: 47 @@ -25,23 +29,26 @@ --- "Empty _id": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: - index: _index: test - _type: type _id: '' - f: 1 - index: _index: test - _type: type _id: id - f: 2 - index: _index: test - _type: type - f: 3 - match: { errors: true } - match: { items.0.index.status: 400 } @@ -59,12 +66,17 @@ --- "empty action": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ headers: Content-Type: application/json bulk: + include_type_name: false body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "test_index", "_id": "test_id"}} {"f1": "v1", "f2": 42} {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml new file mode 100644 index 00000000000..233ff32b418 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/11_basic_with_types.yml @@ -0,0 +1,70 @@ +--- +"Array of objects": + - do: + bulk: + refresh: true + body: + - index: + _index: test_index + _type: test_type + _id: test_id + - f1: v1 + f2: 42 + - index: + _index: test_index + _type: test_type + _id: test_id2 + - f1: v2 + f2: 47 + + - do: + count: + index: test_index + + - match: {count: 2} + +--- +"Empty _id": + - do: + bulk: + refresh: true + body: + - index: + _index: test + _type: type + _id: '' + - f: 1 + - index: + _index: test + _type: type + _id: id + - f: 2 + - index: + _index: test + _type: type + - f: 3 + - match: { errors: true } + - match: { items.0.index.status: 400 } + - match: { items.0.index.error.type: illegal_argument_exception } + - match: { items.0.index.error.reason: if _id is specified it must not be empty } + - match: { items.1.index.result: created } + - match: { items.2.index.result: created } + + - do: + count: + index: test + + - match: { count: 2 } + +--- +"empty action": + + - do: + catch: /Malformed action\/metadata line \[3\], expected FIELD_NAME but found \[END_OBJECT\]/ + headers: + Content-Type: application/json + bulk: + body: | + {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"f1": "v1", "f2": 42} + {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml index def91f42807..742cf49c38e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml @@ -1,12 +1,18 @@ --- "List of strings": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}' + - '{"index": {"_index": "test_index", "_id": "test_id"}}' - '{"f1": "v1", "f2": 42}' - - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}' + - '{"index": {"_index": "test_index", "_id": "test_id2"}}' - '{"f1": "v2", "f2": 47}' - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml new file mode 100644 index 00000000000..def91f42807 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/21_list_of_strings_with_types.yml @@ -0,0 +1,17 @@ +--- +"List of strings": + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}}' + - '{"f1": "v1", "f2": 42}' + - '{"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}}' + - '{"f1": "v2", "f2": 47}' + + - do: + count: + index: test_index + + - match: {count: 2} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml index 1d117253c9b..15a70fa3f37 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml @@ -1,12 +1,18 @@ --- "One big string": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "test_index", "_id": "test_id"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"index": {"_index": "test_index", "_id": "test_id2"}} {"f1": "v2", "f2": 47} - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml new file mode 100644 index 00000000000..1d117253c9b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/31_big_string_with_types.yml @@ -0,0 +1,17 @@ +--- +"One big string": + - do: + bulk: + refresh: true + body: | + {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: test_index + + - match: {count: 2} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml index c852c376cc0..bf4bd079574 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml @@ -1,37 +1,43 @@ --- "Source filtering": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false refresh: true index: test_index - type: test_type id: test_id_1 body: { "foo": "bar", "bar": "foo" } - do: index: + include_type_name: false refresh: true index: test_index - type: test_type id: test_id_2 body: { "foo": "qux", "bar": "pux" } - do: index: + include_type_name: false refresh: true index: test_index - type: test_type id: test_id_3 body: { "foo": "corge", "bar": "forge" } - do: bulk: + include_type_name: false refresh: true body: | - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } } + { "update": { "_index": "test_index", "_id": "test_id_1", "_source": true } } { "doc": { "foo": "baz" } } - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "update": { "_index": "test_index", "_id": "test_id_2" } } { "_source": true, "doc": { "foo": "quux" } } - match: { items.0.update.get._source.foo: baz } @@ -39,8 +45,8 @@ - do: bulk: + include_type_name: false index: test_index - type: test_type _source: true body: | { "update": { "_id": "test_id_3" } } @@ -50,11 +56,12 @@ - do: bulk: + include_type_name: false refresh: true body: | - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } } + { "update": { "_index": "test_index", "_id": "test_id_1", "_source": {"includes": "bar"} } } { "doc": { "foo": "baz" } } - { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "update": { "_index": "test_index", "_id": "test_id_2" } } { "_source": {"includes": "foo"}, "doc": { "foo": "quux" } } - match: { items.0.update.get._source.bar: foo } @@ -64,8 +71,8 @@ - do: bulk: + include_type_name: false index: test_index - type: test_type _source_include: foo body: | { "update": { "_id": "test_id_3" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml new file mode 100644 index 00000000000..c852c376cc0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/41_source_with_types.yml @@ -0,0 +1,76 @@ +--- +"Source filtering": + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_1 + body: { "foo": "bar", "bar": "foo" } + + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_2 + body: { "foo": "qux", "bar": "pux" } + + - do: + index: + refresh: true + index: test_index + type: test_type + id: test_id_3 + body: { "foo": "corge", "bar": "forge" } + + + - do: + bulk: + refresh: true + body: | + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": true } } + { "doc": { "foo": "baz" } } + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "_source": true, "doc": { "foo": "quux" } } + + - match: { items.0.update.get._source.foo: baz } + - match: { items.1.update.get._source.foo: quux } + + - do: + bulk: + index: test_index + type: test_type + _source: true + body: | + { "update": { "_id": "test_id_3" } } + { "doc": { "foo": "garply" } } + + - match: { items.0.update.get._source.foo: garply } + + - do: + bulk: + refresh: true + body: | + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_1", "_source": {"includes": "bar"} } } + { "doc": { "foo": "baz" } } + { "update": { "_index": "test_index", "_type": "test_type", "_id": "test_id_2" } } + { "_source": {"includes": "foo"}, "doc": { "foo": "quux" } } + + - match: { items.0.update.get._source.bar: foo } + - is_false: items.0.update.get._source.foo + - match: { items.1.update.get._source.foo: quux } + - is_false: items.1.update.get._source.bar + + - do: + bulk: + index: test_index + type: test_type + _source_include: foo + body: | + { "update": { "_id": "test_id_3" } } + { "doc": { "foo": "garply" } } + + - match: { items.0.update.get._source.foo: garply } + - is_false: items.0.update.get._source.bar + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml index 6326b9464ca..059794873ad 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yml @@ -1,12 +1,18 @@ --- "refresh=true immediately makes changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: true body: | - {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}} + {"index": {"_index": "bulk_50_refresh_1", "_id": "bulk_50_refresh_id1"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}} + {"index": {"_index": "bulk_50_refresh_1", "_id": "bulk_50_refresh_id2"}} {"f1": "v2", "f2": 47} - do: @@ -16,13 +22,19 @@ --- "refresh=empty string immediately makes changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: "" body: | - {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}} + {"index": {"_index": "bulk_50_refresh_2", "_id": "bulk_50_refresh_id3"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}} + {"index": {"_index": "bulk_50_refresh_2", "_id": "bulk_50_refresh_id4"}} {"f1": "v2", "f2": 47} - do: @@ -33,13 +45,19 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: bulk: + include_type_name: false refresh: wait_for body: | - {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}} + {"index": {"_index": "bulk_50_refresh_3", "_id": "bulk_50_refresh_id5"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}} + {"index": {"_index": "bulk_50_refresh_3", "_id": "bulk_50_refresh_id6"}} {"f1": "v2", "f2": 47} - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml new file mode 100644 index 00000000000..6326b9464ca --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/51_refresh_with_types.yml @@ -0,0 +1,48 @@ +--- +"refresh=true immediately makes changes are visible in search": + - do: + bulk: + refresh: true + body: | + {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: bulk_50_refresh_1 + - match: {count: 2} + +--- +"refresh=empty string immediately makes changes are visible in search": + - do: + bulk: + refresh: "" + body: | + {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: bulk_50_refresh_2 + - match: {count: 2} + + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + bulk: + refresh: wait_for + body: | + {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}} + {"f1": "v1", "f2": 42} + {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}} + {"f1": "v2", "f2": 47} + + - do: + count: + index: bulk_50_refresh_3 + - match: {count: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml index a3671d5ac24..7f5d13125c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/10_basic.yml @@ -1,10 +1,14 @@ --- "Basic": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } @@ -12,8 +16,16 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 - match: { _version: 2 } + + - do: + catch: /illegal_argument_exception/ + delete: + include_type_name: false + index: index + type: type + id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml index d1bb4c0df34..c235b8ebfbf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/11_shard_header.yml @@ -1,8 +1,13 @@ --- "Delete check shard header": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: foobar body: settings: @@ -15,19 +20,19 @@ - do: index: + include_type_name: false index: foobar - type: baz id: 1 body: { foo: bar } - do: delete: + include_type_name: false index: foobar - type: baz id: 1 - match: { _index: foobar } - - match: { _type: baz } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 2} - match: { _shards.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml index d01e88be8ad..f6d4fffb68a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/12_result.yml @@ -1,17 +1,21 @@ --- "Delete result field": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - do: delete: + include_type_name: false index: test_1 - type: test id: 1 - match: { result: deleted } @@ -19,8 +23,8 @@ - do: catch: missing delete: + include_type_name: false index: test_1 - type: test id: 1 - match: { result: not_found } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml new file mode 100644 index 00000000000..a3671d5ac24 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/13_basic_with_types.yml @@ -0,0 +1,19 @@ +--- +"Basic": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - match: { _version: 1 } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml new file mode 100644 index 00000000000..d1bb4c0df34 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/14_shard_header_with_types.yml @@ -0,0 +1,36 @@ +--- +"Delete check shard header": + + - do: + indices.create: + index: foobar + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: foobar + type: baz + id: 1 + body: { foo: bar } + + - do: + delete: + index: foobar + type: baz + id: 1 + + - match: { _index: foobar } + - match: { _type: baz } + - match: { _id: "1"} + - match: { _version: 2} + - match: { _shards.total: 1} + - match: { _shards.successful: 1} + - match: { _shards.failed: 0} + - is_false: _shards.pending diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml new file mode 100644 index 00000000000..d01e88be8ad --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/15_result_with_types.yml @@ -0,0 +1,26 @@ +--- +"Delete result field": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - match: { result: deleted } + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + + - match: { result: not_found } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml index 3d9ddb79366..c21617bcac6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/20_internal_version.yml @@ -1,10 +1,14 @@ --- "Internal version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } @@ -13,15 +17,15 @@ - do: catch: conflict delete: + include_type_name: false index: test_1 - type: test id: 1 version: 2 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml new file mode 100644 index 00000000000..3d9ddb79366 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/21_internal_version_with_types.yml @@ -0,0 +1,28 @@ +--- +"Internal version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - match: { _version: 1} + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + version: 2 + + - do: + delete: + index: test_1 + type: test + id: 1 + version: 1 + + - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml index 453d64d85bb..c2cae2b6e1e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/25_external_version.yml @@ -1,10 +1,14 @@ --- "External version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -15,16 +19,16 @@ - do: catch: conflict delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external version: 4 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external version: 6 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml index 70f78c17faa..df119a57c12 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/26_external_gte_version.yml @@ -1,10 +1,14 @@ --- "External GTE version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -15,16 +19,16 @@ - do: catch: conflict delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external_gte version: 4 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external_gte version: 6 @@ -33,8 +37,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -44,8 +48,8 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 version_type: external_gte version: 6 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml new file mode 100644 index 00000000000..453d64d85bb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/27_external_version_with_types.yml @@ -0,0 +1,32 @@ +--- +"External version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 5 + + - match: { _version: 5} + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + version_type: external + version: 4 + + - do: + delete: + index: test_1 + type: test + id: 1 + version_type: external + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml new file mode 100644 index 00000000000..70f78c17faa --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/28_external_gte_version_with_types.yml @@ -0,0 +1,53 @@ +--- +"External GTE version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 5 + + - match: { _version: 5} + + - do: + catch: conflict + delete: + index: test_1 + type: test + id: 1 + version_type: external_gte + version: 4 + + - do: + delete: + index: test_1 + type: test + id: 1 + version_type: external_gte + version: 6 + + - match: { _version: 6} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 6 + + - match: { _version: 6} + + - do: + delete: + index: test_1 + type: test + id: 1 + version_type: external_gte + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml index 6f67b3a03f4..a0ad089b0fb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml @@ -1,16 +1,21 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: number_of_shards: 5 - do: index: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: { foo: bar } @@ -18,15 +23,15 @@ - do: catch: missing delete: + include_type_name: false index: test_1 - type: test id: 1 routing: 4 - do: delete: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml new file mode 100644 index 00000000000..6f67b3a03f4 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/31_routing_with_types.yml @@ -0,0 +1,32 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + number_of_shards: 5 + - do: + index: + index: test_1 + type: test + id: 1 + routing: 5 + body: { foo: bar } + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + routing: 4 + + - do: + delete: + index: test_1 + type: test + id: 1 + routing: 5 + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml index ad27bb68601..326186bf07b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yml @@ -1,8 +1,13 @@ --- "Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -16,8 +21,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } refresh: true @@ -27,8 +32,8 @@ # them to be different for this test to pass - do: index: + include_type_name: false index: test_1 - type: test id: 3 body: { foo: bar } refresh: true @@ -36,6 +41,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { terms: { _id: [1,3] }} @@ -44,12 +50,13 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 - do: search: + include_type_name: false index: test_1 body: query: { terms: { _id: [1,3] }} @@ -58,8 +65,8 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 3 refresh: true @@ -69,6 +76,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { terms: { _id: [1,3] }} @@ -77,10 +85,15 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } refresh: true @@ -88,6 +101,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -95,13 +109,14 @@ - do: delete: + include_type_name: false index: test_1 - type: test id: 1 refresh: "" - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -109,10 +124,15 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: delete_50_refresh_1 - type: test id: delete_50_refresh_id1 body: { foo: bar } refresh: true @@ -120,6 +140,7 @@ - do: search: + include_type_name: false index: delete_50_refresh_1 body: query: { term: { _id: delete_50_refresh_id1 }} @@ -127,14 +148,15 @@ - do: delete: + include_type_name: false index: delete_50_refresh_1 - type: test id: delete_50_refresh_id1 refresh: wait_for - is_false: forced_refresh - do: search: + include_type_name: false index: delete_50_refresh_1 body: query: { term: { _id: delete_50_refresh_id1 }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml new file mode 100644 index 00000000000..ad27bb68601 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/51_refresh_with_types.yml @@ -0,0 +1,141 @@ +--- +"Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + refresh_interval: -1 + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + refresh: true + +# If you wonder why this document get 3 as an id instead of 2, it is because the +# current routing algorithm would route 1 and 2 to the same shard while we need +# them to be different for this test to pass + - do: + index: + index: test_1 + type: test + id: 3 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { terms: { _id: [1,3] }} + + - match: { hits.total: 2 } + + - do: + delete: + index: test_1 + type: test + id: 1 + + - do: + search: + index: test_1 + body: + query: { terms: { _id: [1,3] }} + + - match: { hits.total: 2 } + + - do: + delete: + index: test_1 + type: test + id: 3 + refresh: true + +# If a replica shard where doc 1 is located gets initialized at this point, doc 1 +# won't be found by the following search as the shard gets automatically refreshed +# right before getting started. This is why this test only works with 0 replicas. + + - do: + search: + index: test_1 + body: + query: { terms: { _id: [1,3] }} + + - match: { hits.total: 1 } + +--- +"When refresh url parameter is an empty string that means \"refresh immediately\"": + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + - match: { hits.total: 1 } + + - do: + delete: + index: test_1 + type: test + id: 1 + refresh: "" + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + - match: { hits.total: 0 } + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + index: + index: delete_50_refresh_1 + type: test + id: delete_50_refresh_id1 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: delete_50_refresh_1 + body: + query: { term: { _id: delete_50_refresh_id1 }} + - match: { hits.total: 1 } + + - do: + delete: + index: delete_50_refresh_1 + type: test + id: delete_50_refresh_id1 + refresh: wait_for + - is_false: forced_refresh + + - do: + search: + index: delete_50_refresh_1 + body: + query: { term: { _id: delete_50_refresh_id1 }} + - match: { hits.total: 0 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml index 9cfdb48ae20..46b238482d7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/60_missing.yml @@ -1,19 +1,27 @@ --- "Missing document with catch": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: missing delete: + include_type_name: false index: test_1 - type: test id: 1 --- "Missing document with ignore": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: delete: + include_type_name: false index: test_1 - type: test id: 1 ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml new file mode 100644 index 00000000000..9cfdb48ae20 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/61_missing_with_types.yml @@ -0,0 +1,19 @@ +--- +"Missing document with catch": + + - do: + catch: missing + delete: + index: test_1 + type: test + id: 1 + +--- +"Missing document with ignore": + + - do: + delete: + index: test_1 + type: test + id: 1 + ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml index 39320d12136..2c7937aeacc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_caps/10_basic.yml @@ -24,6 +24,16 @@ setup: nested2: type: float doc_values: false + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false + - do: indices.create: index: test2 @@ -48,6 +58,15 @@ setup: nested2: type: float doc_values: true + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false - do: indices.create: index: test3 @@ -64,7 +83,7 @@ setup: geo: type: keyword object: - type: object + type: nested properties: nested1 : type : long @@ -72,6 +91,15 @@ setup: nested2: type: keyword doc_values: false + level1: + type: object + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false --- "Get simple field caps": @@ -112,7 +140,7 @@ setup: - is_false: fields.geo.keyword.non_searchable_indices - is_false: fields.geo.keyword.on_aggregatable_indices --- -"Get nested field caps": +"Get leaves field caps": - do: field_caps: @@ -140,6 +168,47 @@ setup: - is_false: fields.object\.nested2.keyword.non_aggregatable_indices - is_false: fields.object\.nested2.keyword.non_searchable_indices --- +"Get object and nested field caps": + - skip: + version: " - 6.99.99" + reason: object and nested fields are returned since 7.0 + + - do: + field_caps: + index: 'test1,test2,test3' + fields: object*,level1* + + - match: {fields.object.object.indices: ["test1", "test2"]} + - match: {fields.object.object.searchable: false} + - match: {fields.object.object.aggregatable: false} + - is_false: fields.object.object.non_aggregatable_indices + - is_false: fields.object.object.non_searchable_indices + - match: {fields.object.nested.indices: ["test3"]} + - match: {fields.object.nested.searchable: false} + - match: {fields.object.nested.aggregatable: false} + - is_false: fields.object.nested.non_aggregatable_indices + - is_false: fields.object.nested.non_searchable_indices + - match: {fields.level1.nested.indices: ["test1", "test2"]} + - match: {fields.level1.nested.searchable: false} + - match: {fields.level1.nested.aggregatable: false} + - is_false: fields.level1.nested.non_aggregatable_indices + - is_false: fields.level1.nested.non_searchable_indices + - match: {fields.level1.object.indices: ["test3"]} + - match: {fields.level1.object.searchable: false} + - match: {fields.level1.object.aggregatable: false} + - is_false: fields.level1.object.non_aggregatable_indices + - is_false: fields.level1.object.non_searchable_indices + - match: {fields.level1\.level2.object.searchable: false} + - match: {fields.level1\.level2.object.aggregatable: false} + - is_false: fields.level1\.level2.object.indices + - is_false: fields.level1\.level2.object.non_aggregatable_indices + - is_false: fields.level1\.level2.object.non_searchable_indices + - match: {fields.level1\.level2\.leaf1.text.searchable: false} + - match: {fields.level1\.level2\.leaf1.text.aggregatable: false} + - is_false: fields.level1\.level2\.leaf1.text.indices + - is_false: fields.level1\.level2\.leaf1.text.non_aggregatable_indices + - is_false: fields.level1\.level2\.leaf1.text..non_searchable_indices +--- "Get prefix field caps": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml index 0689f714d64..71403f0b56f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/10_basic.yml @@ -1,31 +1,32 @@ --- "Basic": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 中文 body: { "foo": "Hello: 中文" } - do: get: + include_type_name: false index: test_1 - type: test id: 中文 - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: 中文 } - match: { _source: { foo: "Hello: 中文" } } - do: + catch: /illegal_argument_exception/ get: - index: test_1 - type: _all - id: 中文 - - - match: { _index: test_1 } - - match: { _type: test } - - match: { _id: 中文 } - - match: { _source: { foo: "Hello: 中文" } } + index: index + type: type + id: 1 + include_type_name: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml new file mode 100644 index 00000000000..0689f714d64 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/11_basic_with_types.yml @@ -0,0 +1,31 @@ +--- +"Basic": + + - do: + index: + index: test_1 + type: test + id: 中文 + body: { "foo": "Hello: 中文" } + + - do: + get: + index: test_1 + type: test + id: 中文 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: 中文 } + - match: { _source: { foo: "Hello: 中文" } } + + - do: + get: + index: test_1 + type: _all + id: 中文 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: 中文 } + - match: { _source: { foo: "Hello: 中文" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml index 5e08112253e..fbab99fc3c6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml @@ -1,21 +1,25 @@ --- "Default values": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "foo": "bar" } - do: get: + include_type_name: false index: test_1 - type: _all id: 1 - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: '1' } - match: { _source: { foo: "bar" } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml new file mode 100644 index 00000000000..5e08112253e --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/16_default_values_with_types.yml @@ -0,0 +1,21 @@ +--- +"Default values": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "foo": "bar" } + + - do: + get: + index: test_1 + type: _all + id: 1 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: '1' } + - match: { _source: { foo: "bar" } } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml index fbffb9e0ea8..20971728ffd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -1,43 +1,47 @@ --- "Stored fields": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: mappings: - test: - properties: - foo: - type: keyword - store: true - count: - type: integer - store: true + properties: + foo: + type: keyword + store: true + count: + type: integer + store: true - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "foo": "bar", "count": 1 } - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: foo - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: '1' } - match: { fields.foo: [bar] } - is_false: _source - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: [foo, count] @@ -47,8 +51,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: [foo, count, _source] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml new file mode 100644 index 00000000000..fbffb9e0ea8 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/21_stored_fields_with_types.yml @@ -0,0 +1,59 @@ +--- +"Stored fields": + + - do: + indices.create: + index: test_1 + body: + mappings: + test: + properties: + foo: + type: keyword + store: true + count: + type: integer + store: true + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "foo": "bar", "count": 1 } + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: foo + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: '1' } + - match: { fields.foo: [bar] } + - is_false: _source + + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: [foo, count] + + - match: { fields.foo: [bar] } + - match: { fields.count: [1] } + - is_false: _source + + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: [foo, count, _source] + + - match: { fields.foo: [bar] } + - match: { fields.count: [1] } + - match: { _source.foo: bar } + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml index 276346cda4f..94162314225 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml @@ -1,8 +1,13 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -17,16 +22,16 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: { foo: bar } - do: get: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 stored_fields: [_routing] @@ -37,7 +42,7 @@ - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml new file mode 100644 index 00000000000..276346cda4f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/41_routing_with_types.yml @@ -0,0 +1,43 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + routing: 5 + body: { foo: bar } + + - do: + get: + index: test_1 + type: test + id: 1 + routing: 5 + stored_fields: [_routing] + + - match: { _id: "1"} + - match: { _routing: "5"} + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml index b88dbaafc4f..bd26eee1b5d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yml @@ -2,11 +2,13 @@ "REST test with headers": - skip: features: ["headers", "yaml"] + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "body": "foo" } @@ -14,12 +16,12 @@ headers: Accept: application/yaml get: + include_type_name: false index: test_1 - type: _all id: 1 - match: {_index: "test_1"} - - match: {_type: "test"} + - is_false: "_type" - match: {_id: "1"} - match: {_version: 1} - match: {found: true} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml new file mode 100644 index 00000000000..b88dbaafc4f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/51_with_headers_with_types.yml @@ -0,0 +1,26 @@ +--- +"REST test with headers": + - skip: + features: ["headers", "yaml"] + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "body": "foo" } + + - do: + headers: + Accept: application/yaml + get: + index: test_1 + type: _all + id: 1 + + - match: {_index: "test_1"} + - match: {_type: "test"} + - match: {_id: "1"} + - match: {_version: 1} + - match: {found: true} + - match: { _source: { body: foo }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml index 7d02b4667ef..c5955bf4d7a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/60_realtime_refresh.yml @@ -1,8 +1,13 @@ --- "Realtime Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -16,23 +21,23 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 realtime: false - do: get: + include_type_name: false index: test_1 - type: test id: 1 realtime: true @@ -40,8 +45,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 realtime: false refresh: true diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml new file mode 100644 index 00000000000..7d02b4667ef --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/61_realtime_refresh_with_types.yml @@ -0,0 +1,49 @@ +--- +"Realtime Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + refresh_interval: -1 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + realtime: false + + - do: + get: + index: test_1 + type: test + id: 1 + realtime: true + + - is_true: found + + - do: + get: + index: test_1 + type: test + id: 1 + realtime: false + refresh: true + + - is_true: found diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml index c858886ca3d..55520000e2f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -1,53 +1,57 @@ --- "Source filtering": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: mappings: - test: - properties: - count: - type: integer - store: true + properties: + count: + type: integer + store: true - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: - get: { index: test_1, type: test, id: 1, _source: false } + get: { include_type_name: false, index: test_1, id: 1, _source: false } - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: "1" } - is_false: _source - do: - get: { index: test_1, type: test, id: 1, _source: true } + get: { include_type_name: false, index: test_1, id: 1, _source: true } - match: { _source.include.field1: v1 } - do: - get: { index: test_1, type: test, id: 1, _source: include.field1 } + get: { include_type_name: false, index: test_1, id: 1, _source: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, type: test, id: 1, _source_include: include.field1 } + get: { include_type_name: false, index: test_1, id: 1, _source_include: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, type: test, id: 1, _source_include: "include.field1,include.field2" } + get: { include_type_name: false, index: test_1, id: 1, _source_include: "include.field1,include.field2" } - match: { _source.include.field1: v1 } - match: { _source.include.field2: v2 } - is_false: _source.count - do: - get: { index: test_1, type: test, id: 1, _source_include: include, _source_exclude: "*.field2" } + get: { include_type_name: false, index: test_1, id: 1, _source_include: include, _source_exclude: "*.field2" } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - is_false: _source.count @@ -55,14 +59,14 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 stored_fields: count _source: true - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: "1" } - match: { fields.count: [1] } - match: { _source.include.field1: v1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml new file mode 100644 index 00000000000..c858886ca3d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/71_source_filtering_with_types.yml @@ -0,0 +1,68 @@ +--- +"Source filtering": + + - do: + indices.create: + index: test_1 + body: + mappings: + test: + properties: + count: + type: integer + store: true + + - do: + index: + index: test_1 + type: test + id: 1 + body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } + - do: + get: { index: test_1, type: test, id: 1, _source: false } + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - is_false: _source + + - do: + get: { index: test_1, type: test, id: 1, _source: true } + - match: { _source.include.field1: v1 } + + - do: + get: { index: test_1, type: test, id: 1, _source: include.field1 } + - match: { _source.include.field1: v1 } + - is_false: _source.include.field2 + + - do: + get: { index: test_1, type: test, id: 1, _source_include: include.field1 } + - match: { _source.include.field1: v1 } + - is_false: _source.include.field2 + + - do: + get: { index: test_1, type: test, id: 1, _source_include: "include.field1,include.field2" } + - match: { _source.include.field1: v1 } + - match: { _source.include.field2: v2 } + - is_false: _source.count + + - do: + get: { index: test_1, type: test, id: 1, _source_include: include, _source_exclude: "*.field2" } + - match: { _source.include.field1: v1 } + - is_false: _source.include.field2 + - is_false: _source.count + + + - do: + get: + index: test_1 + type: test + id: 1 + stored_fields: count + _source: true + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - match: { fields.count: [1] } + - match: { _source.include.field1: v1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml index a60d1138856..48a6966b455 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/80_missing.yml @@ -1,19 +1,27 @@ --- "Missing document with catch": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 --- "Missing document with ignore": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: get: + include_type_name: false index: test_1 - type: test id: 1 ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml new file mode 100644 index 00000000000..a60d1138856 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/81_missing_with_types.yml @@ -0,0 +1,19 @@ +--- +"Missing document with catch": + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + +--- +"Missing document with ignore": + + - do: + get: + index: test_1 + type: test + id: 1 + ignore: 404 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml index c6631b83b18..6975d4f5be5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml @@ -1,26 +1,30 @@ --- "Versions": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 1} - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 2} - do: get: + include_type_name: false index: test_1 - type: test id: 1 version: 2 - match: { _id: "1" } @@ -28,15 +32,15 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 1 - do: get: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external @@ -45,8 +49,8 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 10 version_type: external @@ -54,16 +58,16 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 1 version_type: external - do: get: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external_gte @@ -72,8 +76,8 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 10 version_type: external_gte @@ -81,8 +85,8 @@ - do: catch: conflict get: + include_type_name: false index: test_1 - type: test id: 1 version: 1 version_type: external_gte diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml new file mode 100644 index 00000000000..c6631b83b18 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/91_versions_with_types.yml @@ -0,0 +1,89 @@ +--- +"Versions": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 1} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 2} + + - do: + get: + index: test_1 + type: test + id: 1 + version: 2 + - match: { _id: "1" } + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 1 + + - do: + get: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external + - match: { _id: "1" } + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 10 + version_type: external + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 1 + version_type: external + + - do: + get: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external_gte + - match: { _id: "1" } + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 10 + version_type: external_gte + + - do: + catch: conflict + get: + index: test_1 + type: test + id: 1 + version: 1 + version_type: external_gte + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml index daac81849fb..0deb7637694 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/10_with_id.yml @@ -1,26 +1,30 @@ --- "Index with ID": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test-weird-index-中文 - type: weird.type id: 1 body: { foo: bar } - match: { _index: test-weird-index-中文 } - - match: { _type: weird.type } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 1} - do: get: + include_type_name: false index: test-weird-index-中文 - type: weird.type id: 1 - match: { _index: test-weird-index-中文 } - - match: { _type: weird.type } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 1} - match: { _source: { foo: bar }} @@ -28,7 +32,16 @@ - do: catch: bad_request index: + include_type_name: false index: idx - type: type id: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa body: { foo: bar } + + - do: + catch: /illegal_argument_exception/ + index: + index: index + type: type + id: 1 + include_type_name: false + body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml new file mode 100644 index 00000000000..daac81849fb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/11_with_id_with_types.yml @@ -0,0 +1,34 @@ +--- +"Index with ID": + + - do: + index: + index: test-weird-index-中文 + type: weird.type + id: 1 + body: { foo: bar } + + - match: { _index: test-weird-index-中文 } + - match: { _type: weird.type } + - match: { _id: "1"} + - match: { _version: 1} + + - do: + get: + index: test-weird-index-中文 + type: weird.type + id: 1 + + - match: { _index: test-weird-index-中文 } + - match: { _type: weird.type } + - match: { _id: "1"} + - match: { _version: 1} + - match: { _source: { foo: bar }} + + - do: + catch: bad_request + index: + index: idx + type: type + id: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml index 45ebe0bbd3d..7198c694b51 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/12_result.yml @@ -1,10 +1,14 @@ --- "Index result field": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_index - type: test id: 1 body: { foo: bar } @@ -12,8 +16,8 @@ - do: index: + include_type_name: false index: test_index - type: test id: 1 body: { foo: bar } op_type: index diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml new file mode 100644 index 00000000000..45ebe0bbd3d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/13_result_with_types.yml @@ -0,0 +1,21 @@ +--- +"Index result field": + + - do: + index: + index: test_index + type: test + id: 1 + body: { foo: bar } + + - match: { result: created } + + - do: + index: + index: test_index + type: test + id: 1 + body: { foo: bar } + op_type: index + + - match: { result: updated } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml index 3fff0512b96..3bd607c66fa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/15_without_id.yml @@ -1,26 +1,38 @@ --- "Index without ID": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test body: { foo: bar } - is_true: _id - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _version: 1 } - set: { _id: id } - do: get: + include_type_name: false index: test_1 - type: test id: '$id' - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: $id } - match: { _version: 1 } - match: { _source: { foo: bar }} + + - do: + catch: /illegal_argument_exception/ + index: + index: index + type: type + include_type_name: false + body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml new file mode 100644 index 00000000000..3fff0512b96 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/16_without_id_with_types.yml @@ -0,0 +1,26 @@ +--- +"Index without ID": + + - do: + index: + index: test_1 + type: test + body: { foo: bar } + + - is_true: _id + - match: { _index: test_1 } + - match: { _type: test } + - match: { _version: 1 } + - set: { _id: id } + + - do: + get: + index: test_1 + type: test + id: '$id' + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: $id } + - match: { _version: 1 } + - match: { _source: { foo: bar }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml index 60ae26d46d0..ddab362b80f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/20_optype.yml @@ -1,10 +1,14 @@ --- "Optype": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 op_type: create body: { foo: bar } @@ -12,16 +16,16 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 op_type: create body: { foo: bar } - do: index: + include_type_name: false index: test_1 - type: test id: 1 op_type: index body: { foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml new file mode 100644 index 00000000000..60ae26d46d0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/21_optype_with_types.yml @@ -0,0 +1,29 @@ +--- +"Optype": + + - do: + index: + index: test_1 + type: test + id: 1 + op_type: create + body: { foo: bar } + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + op_type: create + body: { foo: bar } + + - do: + index: + index: test_1 + type: test + id: 1 + op_type: index + body: { foo: bar } + + - match: { _version: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml index 1767fbebbf9..53351c24feb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/30_internal_version.yml @@ -1,18 +1,22 @@ --- "Internal version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 1} - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - match: { _version: 2} @@ -20,15 +24,15 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version: 1 - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version: 2 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml new file mode 100644 index 00000000000..1767fbebbf9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/31_internal_version_with_types.yml @@ -0,0 +1,36 @@ +--- +"Internal version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 1} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + - match: { _version: 2} + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version: 1 + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version: 2 + + - match: { _version: 3 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml index f17e6b74931..054f8cad15d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/35_external_version.yml @@ -1,10 +1,14 @@ --- "External version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -14,8 +18,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -26,8 +30,8 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -36,8 +40,8 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external @@ -45,8 +49,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml index dccbe02ea14..67f534db341 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/36_external_gte_version.yml @@ -1,10 +1,14 @@ --- "External GTE version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -14,8 +18,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -26,8 +30,8 @@ - do: catch: conflict index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } version_type: external_gte @@ -35,8 +39,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar2 } version_type: external_gte @@ -46,8 +50,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar2 } version_type: external_gte diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml new file mode 100644 index 00000000000..f17e6b74931 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/37_external_version_with_types.yml @@ -0,0 +1,55 @@ +--- +"External version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 0 + + - match: { _version: 0 } + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 5 + + - match: { _version: 5 } + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 5 + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 0 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml new file mode 100644 index 00000000000..dccbe02ea14 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/38_external_gte_version_with_types.yml @@ -0,0 +1,56 @@ +--- +"External GTE version": + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 0 + + - match: { _version: 0} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 5 + + - match: { _version: 5} + + - do: + catch: conflict + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + version_type: external_gte + version: 0 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar2 } + version_type: external_gte + version: 5 + + - match: { _version: 5} + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar2 } + version_type: external_gte + version: 6 + + - match: { _version: 6} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml index 5b0cf94f423..523cf47f858 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml @@ -1,8 +1,13 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -17,16 +22,16 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: { foo: bar } - do: get: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 stored_fields: [_routing] @@ -37,7 +42,7 @@ - do: catch: missing get: + include_type_name: false index: test_1 - type: test id: 1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml new file mode 100644 index 00000000000..5b0cf94f423 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/41_routing_with_types.yml @@ -0,0 +1,43 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: test_1 + type: test + id: 1 + routing: 5 + body: { foo: bar } + + - do: + get: + index: test_1 + type: test + id: 1 + routing: 5 + stored_fields: [_routing] + + - match: { _id: "1"} + - match: { _routing: "5"} + + - do: + catch: missing + get: + index: test_1 + type: test + id: 1 + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml index cd78a4e4282..346338791d6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yml @@ -1,8 +1,13 @@ --- "Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -11,13 +16,14 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: { foo: bar } - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -26,8 +32,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 2 refresh: true body: { foo: bar } @@ -35,6 +41,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 2 }} @@ -43,10 +50,15 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 refresh: "" body: { foo: bar } @@ -54,6 +66,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -62,10 +75,15 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: index_60_refresh_1 - type: test id: index_60_refresh_id1 body: { foo: bar } refresh: wait_for @@ -73,6 +91,7 @@ - do: search: + include_type_name: false index: index_60_refresh_1 body: query: { term: { _id: index_60_refresh_id1 }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml new file mode 100644 index 00000000000..cd78a4e4282 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/61_refresh_with_types.yml @@ -0,0 +1,79 @@ +--- +"Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + index.refresh_interval: -1 + number_of_replicas: 0 + + - do: + index: + index: test_1 + type: test + id: 1 + body: { foo: bar } + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + + - match: { hits.total: 0 } + + - do: + index: + index: test_1 + type: test + id: 2 + refresh: true + body: { foo: bar } + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 2 }} + + - match: { hits.total: 1 } + +--- +"When refresh url parameter is an empty string that means \"refresh immediately\"": + - do: + index: + index: test_1 + type: test + id: 1 + refresh: "" + body: { foo: bar } + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + + - match: { hits.total: 1 } + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + index: + index: index_60_refresh_1 + type: test + id: index_60_refresh_id1 + body: { foo: bar } + refresh: wait_for + - is_false: forced_refresh + + - do: + search: + index: index_60_refresh_1 + body: + query: { term: { _id: index_60_refresh_id1 }} + - match: { hits.total: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index 6f7c5a60093..a96c31e9ce6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -1,24 +1,35 @@ --- "Create index with mappings": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index body: mappings: - type_1: {} + {} - do: indices.get_mapping: + include_type_name: false index: test_index - - is_true: test_index.mappings.type_1 + - is_true: test_index.mappings --- "Create index with settings": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index body: settings: @@ -33,8 +44,13 @@ --- "Create index": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index - match: { acknowledged: true } @@ -43,8 +59,13 @@ --- "Create index with wait_for_active_shards set to all": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index wait_for_active_shards: all body: @@ -57,15 +78,19 @@ --- "Create index with aliases": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index body: mappings: - type_1: - properties: - field: - type: text + properties: + field: + type: text aliases: test_alias: {} test_blias: @@ -93,6 +118,7 @@ reason: is_write_index is not implemented in ES <= 6.x - do: indices.create: + include_type_name: false index: test_index body: aliases: @@ -110,25 +136,15 @@ - is_false: test_index.aliases.test_blias.is_write_index - is_true: test_index.aliases.test_clias.is_write_index ---- -"Create index with no type mappings": - - do: - catch: /illegal_argument_exception/ - indices.create: - index: test_index - body: - mappings: - "" : {} - --- "Create index with invalid mappings": - do: catch: /illegal_argument_exception/ indices.create: + include_type_name: false index: test_index body: mappings: - test_type: - properties: - "": - type: keyword + properties: + "": + type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml new file mode 100644 index 00000000000..6f7c5a60093 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/11_basic_with_types.yml @@ -0,0 +1,134 @@ +--- +"Create index with mappings": + + - do: + indices.create: + index: test_index + body: + mappings: + type_1: {} + + - do: + indices.get_mapping: + index: test_index + + - is_true: test_index.mappings.type_1 + +--- +"Create index with settings": + + - do: + indices.create: + index: test_index + body: + settings: + number_of_replicas: "0" + + - do: + indices.get_settings: + index: test_index + + - match: { test_index.settings.index.number_of_replicas: "0"} + +--- +"Create index": + + - do: + indices.create: + index: test_index + + - match: { acknowledged: true } + - match: { index: "test_index"} + +--- +"Create index with wait_for_active_shards set to all": + + - do: + indices.create: + index: test_index + wait_for_active_shards: all + body: + settings: + number_of_replicas: "0" + + - match: { acknowledged: true } + - match: { shards_acknowledged: true } + +--- +"Create index with aliases": + + - do: + indices.create: + index: test_index + body: + mappings: + type_1: + properties: + field: + type: text + aliases: + test_alias: {} + test_blias: + routing: b + test_clias: + filter: + term: + field : value + + - do: + indices.get_alias: + index: test_index + + - match: {test_index.aliases.test_blias.search_routing: b} + - match: {test_index.aliases.test_blias.index_routing: b} + - is_false: test_index.aliases.test_blias.filter + - match: {test_index.aliases.test_clias.filter.term.field: value} + - is_false: test_index.aliases.test_clias.index_routing + - is_false: test_index.aliases.test_clias.search_routing + +--- +"Create index with write aliases": + - skip: + version: " - 6.99.99" + reason: is_write_index is not implemented in ES <= 6.x + - do: + indices.create: + index: test_index + body: + aliases: + test_alias: {} + test_blias: + is_write_index: false + test_clias: + is_write_index: true + + - do: + indices.get_alias: + index: test_index + + - is_false: test_index.aliases.test_alias.is_write_index + - is_false: test_index.aliases.test_blias.is_write_index + - is_true: test_index.aliases.test_clias.is_write_index + +--- +"Create index with no type mappings": + - do: + catch: /illegal_argument_exception/ + indices.create: + index: test_index + body: + mappings: + "" : {} + +--- +"Create index with invalid mappings": + - do: + catch: /illegal_argument_exception/ + indices.create: + index: test_index + body: + mappings: + test_type: + properties: + "": + type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml index 90bb2747a7b..36f45dd0e60 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/10_basic.yml @@ -1,26 +1,32 @@ --- setup: + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: - mappings: - doc: {} + mappings: {} - do: indices.create: + include_type_name: false index: test_2 body: - mappings: - doc: {} + mappings: {} --- "Get /{index}/_mapping with empty mappings": - do: indices.create: + include_type_name: false index: t - do: indices.get_mapping: + include_type_name: false index: t - match: { t.mappings: {}} @@ -29,116 +35,65 @@ setup: "Get /_mapping": - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc + - is_true: test_1.mappings + - is_true: test_2.mappings --- "Get /{index}/_mapping": - do: indices.get_mapping: - index: test_1 + include_type_name: false + index: test_1 - - is_true: test_1.mappings.doc + - is_true: test_1.mappings - is_false: test_2 + --- -"Get /{index}/_mapping/_all": +"Get /_all/_mapping": - do: indices.get_mapping: - index: test_1 - type: _all + include_type_name: false + index: _all - - is_true: test_1.mappings.doc - - is_false: test_2 + - is_true: test_1.mappings + - is_true: test_2.mappings --- -"Get /{index}/_mapping/*": +"Get /*/_mapping": - do: indices.get_mapping: - index: test_1 - type: '*' + include_type_name: false + index: '*' - - is_true: test_1.mappings.doc - - is_false: test_2 + - is_true: test_1.mappings + - is_true: test_2.mappings --- -"Get /{index}/_mapping/{type}": +"Get /index,index/_mapping": - do: indices.get_mapping: - index: test_1 - type: doc + include_type_name: false + index: test_1,test_2 - - is_true: test_1.mappings.doc - - is_false: test_2 + - is_true: test_1.mappings + - is_true: test_2.mappings --- -"Get /{index}/_mapping/{type*}": +"Get /index*/_mapping/": - do: indices.get_mapping: - index: test_1 - type: 'd*' + include_type_name: false + index: '*2' - - is_true: test_1.mappings.doc - - is_false: test_2 - ---- -"Get /_mapping/{type}": - - - do: - indices.get_mapping: - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /_all/_mapping/{type}": - - - do: - indices.get_mapping: - index: _all - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /*/_mapping/{type}": - - - do: - indices.get_mapping: - index: '*' - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index,index/_mapping/{type}": - - - do: - indices.get_mapping: - index: test_1,test_2 - type: doc - - - is_true: test_1.mappings.doc - - is_true: test_2.mappings.doc - ---- -"Get /index*/_mapping/{type}": - - - do: - indices.get_mapping: - index: '*2' - type: doc - - - is_true: test_2.mappings.doc + - is_true: test_2.mappings - is_false: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml new file mode 100644 index 00000000000..90bb2747a7b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/11_basic_with_types.yml @@ -0,0 +1,144 @@ +--- +setup: + - do: + indices.create: + index: test_1 + body: + mappings: + doc: {} + - do: + indices.create: + index: test_2 + body: + mappings: + doc: {} +--- +"Get /{index}/_mapping with empty mappings": + + - do: + indices.create: + index: t + + - do: + indices.get_mapping: + index: t + + - match: { t.mappings: {}} + +--- +"Get /_mapping": + + - do: + indices.get_mapping: {} + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /{index}/_mapping": + + - do: + indices.get_mapping: + index: test_1 + + - is_true: test_1.mappings.doc + - is_false: test_2 + + +--- +"Get /{index}/_mapping/_all": + + - do: + indices.get_mapping: + index: test_1 + type: _all + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /{index}/_mapping/*": + + - do: + indices.get_mapping: + index: test_1 + type: '*' + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /{index}/_mapping/{type}": + + - do: + indices.get_mapping: + index: test_1 + type: doc + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /{index}/_mapping/{type*}": + + - do: + indices.get_mapping: + index: test_1 + type: 'd*' + + - is_true: test_1.mappings.doc + - is_false: test_2 + +--- +"Get /_mapping/{type}": + + - do: + indices.get_mapping: + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /_all/_mapping/{type}": + + - do: + indices.get_mapping: + index: _all + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /*/_mapping/{type}": + + - do: + indices.get_mapping: + index: '*' + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /index,index/_mapping/{type}": + + - do: + indices.get_mapping: + index: test_1,test_2 + type: doc + + - is_true: test_1.mappings.doc + - is_true: test_2.mappings.doc + +--- +"Get /index*/_mapping/{type}": + + - do: + indices.get_mapping: + index: '*2' + type: doc + + - is_true: test_2.mappings.doc + - is_false: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 1d33f2d31bb..7588c661885 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -1,69 +1,104 @@ --- "Test Create and update mapping": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index - do: indices.put_mapping: + include_type_name: false index: test_index - type: test_type body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - text2: - type: text - analyzer: whitespace - subfield.text3: - type: text + properties: + text1: + type: text + analyzer: whitespace + text2: + type: text + analyzer: whitespace + subfield.text3: + type: text - do: indices.get_mapping: + include_type_name: false index: test_index - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace} - - match: {test_index.mappings.test_type.properties.text2.type: text} - - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace} + - match: {test_index.mappings.properties.text1.type: text} + - match: {test_index.mappings.properties.text1.analyzer: whitespace} + - match: {test_index.mappings.properties.text2.type: text} + - match: {test_index.mappings.properties.text2.analyzer: whitespace} - do: indices.put_mapping: + include_type_name: false index: test_index - type: test_type body: - test_type: - properties: - text1: - type: text - analyzer: whitespace - fields: - text_raw: - type: keyword + properties: + text1: + type: text + analyzer: whitespace + fields: + text_raw: + type: keyword - do: indices.get_mapping: + include_type_name: false index: test_index - - match: {test_index.mappings.test_type.properties.text1.type: text} - - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text} - - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword} + - match: {test_index.mappings.properties.text1.type: text} + - match: {test_index.mappings.properties.subfield.properties.text3.type: text} + - match: {test_index.mappings.properties.text1.fields.text_raw.type: keyword} --- "Create index with invalid mappings": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index - do: catch: /illegal_argument_exception/ indices.put_mapping: + include_type_name: false index: test_index - type: test_type body: - test_type: - properties: - "": - type: keyword + properties: + "": + type: keyword + +--- +"PUT mapping with a type and include_type_name: false": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + indices.create: + index: index + include_type_name: false + + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: index + type: _doc + include_type_name: false + body: + properties: + bar: + type: float + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml new file mode 100644 index 00000000000..1d33f2d31bb --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/11_basic_with_types.yml @@ -0,0 +1,69 @@ +--- +"Test Create and update mapping": + - do: + indices.create: + index: test_index + + - do: + indices.put_mapping: + index: test_index + type: test_type + body: + test_type: + properties: + text1: + type: text + analyzer: whitespace + text2: + type: text + analyzer: whitespace + subfield.text3: + type: text + + - do: + indices.get_mapping: + index: test_index + + - match: {test_index.mappings.test_type.properties.text1.type: text} + - match: {test_index.mappings.test_type.properties.text1.analyzer: whitespace} + - match: {test_index.mappings.test_type.properties.text2.type: text} + - match: {test_index.mappings.test_type.properties.text2.analyzer: whitespace} + + - do: + indices.put_mapping: + index: test_index + type: test_type + body: + test_type: + properties: + text1: + type: text + analyzer: whitespace + fields: + text_raw: + type: keyword + + + - do: + indices.get_mapping: + index: test_index + + - match: {test_index.mappings.test_type.properties.text1.type: text} + - match: {test_index.mappings.test_type.properties.subfield.properties.text3.type: text} + - match: {test_index.mappings.test_type.properties.text1.fields.text_raw.type: keyword} + +--- +"Create index with invalid mappings": + - do: + indices.create: + index: test_index + - do: + catch: /illegal_argument_exception/ + indices.put_mapping: + index: test_index + type: test_type + body: + test_type: + properties: + "": + type: keyword diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_no_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_no_types.yml deleted file mode 100644 index aa05deb3260..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/20_no_types.yml +++ /dev/null @@ -1,334 +0,0 @@ ---- -"Create indices and manage mappings without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - body: - mappings: - properties: - foo: - type: keyword - - - do: - indices.get_mapping: - index: index - include_type_name: false - - - match: { index.mappings.properties.foo.type: "keyword" } - - - do: - indices.put_mapping: - index: index - include_type_name: false - body: - properties: - bar: - type: float - - - do: - indices.get_mapping: - index: index - include_type_name: false - - - match: { index.mappings.properties.foo.type: "keyword" } - - match: { index.mappings.properties.bar.type: "float" } - ---- -"Index explicit IDs without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - include_type_name: false - index: index - id: 1 - body: { foo: bar } - - - match: { "_index": "index" } - - is_false: _type - - - do: - bulk: - index: index - include_type_name: false - body: | - { "index": { "_id": "2" } } - { "doc": { "foo": "baz" } } - - - match: { "items.0.index._index": "index" } - - is_false: items.0.index._type - ---- -"Index implicit IDs without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - include_type_name: false - body: { foo: bar } - - - match: { "_index": "index" } - - is_false: _type - - - do: - bulk: - index: index - include_type_name: false - body: | - { "index": { } } - { "doc": { "foo": "baz" } } - - - match: { "items.0.index._index": "index" } - - is_false: items.0.index._type - ---- -"Mixing include_type_name=false with explicit types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - index: - index: index - type: type - id: 1 - include_type_name: false - body: { foo: bar } - - - do: - catch: /illegal_argument_exception/ - index: - index: index - type: type - include_type_name: false - body: { foo: bar } - - - do: - catch: /illegal_argument_exception/ - get: - index: index - type: type - id: 1 - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - update: - index: index - type: type - id: 1 - include_type_name: false - body: - doc: { foo: baz } - - - do: - catch: /illegal_argument_exception/ - delete: - index: index - type: type - id: 1 - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - search: - index: index - type: type - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - search: - index: index - type: _doc - include_type_name: false - ---- -"Update API without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - update: - index: index - id: 1 - include_type_name: false - body: - doc: { "foo": "baz" } - - - match: { "_index": "index" } - - is_false: _type - ---- -"GET API without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - get: - index: index - id: 1 - include_type_name: false - - - match: { "_index": "index" } - - is_false: _type - ---- -"Delete API without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - delete: - index: index - id: 1 - include_type_name: false - - - match: { "_index": "index" } - - is_false: _type - ---- -"Search without types": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - index: - index: index - id: 1 - include_type_name: false - body: { "foo": "bar" } - - - do: - indices.refresh: - index: index - - - do: - search: - index: index - include_type_name: false - - - match: { "hits.total": 1 } - - match: { "hits.hits.0._index": "index" } - - is_false: hits.hits.0._type - ---- -"PUT mapping with a type and include_type_name: false": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - catch: /illegal_argument_exception/ - indices.put_mapping: - index: index - type: _doc - include_type_name: false - body: - properties: - bar: - type: float - ---- -"GET mappings on empty index with the include_type_name=false option": - - - skip: - version: " - 6.99.99" - reason: include_type_name was introduced in 7.0.0 - - - do: - indices.create: - index: index - include_type_name: false - - - do: - indices.get_mapping: - index: index - include_type_name: false - - - match: { index.mappings: {} } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml index e125ec2a70f..c4133e8d01b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml @@ -1,12 +1,19 @@ setup: + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_index1 - do: indices.create: + include_type_name: false index: test_index2 - do: indices.create: + include_type_name: false index: foo @@ -14,34 +21,33 @@ setup: "put one mapping per index": - do: indices.put_mapping: + include_type_name: false index: test_index1 - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: indices.put_mapping: + include_type_name: false index: test_index2 - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - match: { foo.mappings: {} } @@ -50,73 +56,73 @@ setup: - do: indices.put_mapping: + include_type_name: false index: _all - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + - match: {foo.mappings.properties.text.type: text} + - match: {foo.mappings.properties.text.analyzer: whitespace} --- "put mapping in * index": - do: indices.put_mapping: + include_type_name: false index: "*" - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + - match: {foo.mappings.properties.text.type: text} + - match: {foo.mappings.properties.text.analyzer: whitespace} --- "put mapping in prefix* index": - do: indices.put_mapping: + include_type_name: false index: "test_index*" - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - match: { foo.mappings: {} } @@ -124,67 +130,34 @@ setup: "put mapping in list of indices": - do: indices.put_mapping: + include_type_name: false index: [test_index1, test_index2] - type: test_type body: - test_type: - properties: - text: - type: text - analyzer: whitespace + properties: + text: + type: text + analyzer: whitespace - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index1.mappings.properties.text.type: text} + - match: {test_index1.mappings.properties.text.analyzer: whitespace} - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + - match: {test_index2.mappings.properties.text.type: text} + - match: {test_index2.mappings.properties.text.analyzer: whitespace} - match: { foo.mappings: {} } ---- -"put mapping with blank index": - - do: - indices.put_mapping: - type: test_type - body: - test_type: - properties: - text: - type: text - analyzer: whitespace - - - do: - indices.get_mapping: {} - - - match: {test_index1.mappings.test_type.properties.text.type: text} - - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {test_index2.mappings.test_type.properties.text.type: text} - - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} - - - match: {foo.mappings.test_type.properties.text.type: text} - - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} - ---- -"put mapping with missing type": - - - - do: - catch: param - indices.put_mapping: {} - --- "post a mapping with default analyzer twice": - do: indices.put_mapping: + include_type_name: false index: test_index1 - type: test_type body: - test_type: dynamic: false properties: text: @@ -193,18 +166,18 @@ setup: - do: indices.put_mapping: + include_type_name: false index: test_index1 - type: test_type body: - test_type: - dynamic: false - properties: - text: - analyzer: default - type: text + dynamic: false + properties: + text: + analyzer: default + type: text - do: - indices.get_mapping: {} + indices.get_mapping: + include_type_name: false - - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.properties.text.type: text} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml new file mode 100644 index 00000000000..e125ec2a70f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options_with_types.yml @@ -0,0 +1,210 @@ +setup: + - do: + indices.create: + index: test_index1 + - do: + indices.create: + index: test_index2 + - do: + indices.create: + index: foo + + +--- +"put one mapping per index": + - do: + indices.put_mapping: + index: test_index1 + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + - do: + indices.put_mapping: + index: test_index2 + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: { foo.mappings: {} } + +--- +"put mapping in _all index": + + - do: + indices.put_mapping: + index: _all + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {foo.mappings.test_type.properties.text.type: text} + - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + +--- +"put mapping in * index": + - do: + indices.put_mapping: + index: "*" + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {foo.mappings.test_type.properties.text.type: text} + - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + +--- +"put mapping in prefix* index": + - do: + indices.put_mapping: + index: "test_index*" + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: { foo.mappings: {} } + +--- +"put mapping in list of indices": + - do: + indices.put_mapping: + index: [test_index1, test_index2] + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: { foo.mappings: {} } + +--- +"put mapping with blank index": + - do: + indices.put_mapping: + type: test_type + body: + test_type: + properties: + text: + type: text + analyzer: whitespace + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + - match: {test_index1.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {test_index2.mappings.test_type.properties.text.type: text} + - match: {test_index2.mappings.test_type.properties.text.analyzer: whitespace} + + - match: {foo.mappings.test_type.properties.text.type: text} + - match: {foo.mappings.test_type.properties.text.analyzer: whitespace} + +--- +"put mapping with missing type": + + + - do: + catch: param + indices.put_mapping: {} + +--- +"post a mapping with default analyzer twice": + + - do: + indices.put_mapping: + index: test_index1 + type: test_type + body: + test_type: + dynamic: false + properties: + text: + analyzer: default + type: text + + - do: + indices.put_mapping: + index: test_index1 + type: test_type + body: + test_type: + dynamic: false + properties: + text: + analyzer: default + type: text + + - do: + indices.get_mapping: {} + + - match: {test_index1.mappings.test_type.properties.text.type: text} + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index a5f50464794..1b5f9856391 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -112,6 +112,16 @@ setup: - match: { hits.hits.0._source.bigint: 72057594037927936 } - is_false: hits.hits.0._source.include.field2 + +--- +"_source filtering on bigint": +- do: + search: + body: + _source: ["bigint"] + query: { match_all: {} } +- match: { hits.hits.0._source.bigint: 72057594037927936 } + --- "fields in body": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_no_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_no_types.yml new file mode 100644 index 00000000000..b2489d2ad01 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/220_no_types.yml @@ -0,0 +1,46 @@ +--- +"No type returned": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + index: + include_type_name: false + index: test_1 + id: 1 + body: {} + - do: + indices.refresh: {} + + - do: + search: + include_type_name: false + index: test_1 + + - length: { hits.hits: 1 } + - match: { hits.hits.0._index: "test_1" } + - is_false: "hits.hits.0._type" + - match: { hits.hits.0._id: "1" } + +--- +"Mixing include_type_name=false with explicit types": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + + - do: + catch: /illegal_argument_exception/ + search: + index: index + type: type + include_type_name: false + + - do: + catch: /illegal_argument_exception/ + search: + index: index + type: _doc + include_type_name: false diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml index b4581edd350..dd5ada3b1f1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/10_doc.yml @@ -1,10 +1,14 @@ --- "Partial document": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: foo: bar @@ -13,8 +17,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: @@ -23,14 +27,14 @@ one: 3 - match: { _index: test_1 } - - match: { _type: test } + - is_false: "_type" - match: { _id: "1" } - match: { _version: 2 } - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: baz } @@ -38,3 +42,12 @@ - match: { _source.nested.one: 3 } - match: { _source.nested.two: 2 } + - do: + catch: /illegal_argument_exception/ + update: + index: index + type: type + id: 1 + include_type_name: false + body: + doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml index eb2e4ff9a91..69f8f7c6427 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/11_shard_header.yml @@ -1,8 +1,13 @@ --- "Update check shard header": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: foobar body: settings: @@ -15,22 +20,22 @@ - do: index: + include_type_name: false index: foobar - type: baz id: 1 body: { foo: bar } - do: update: + include_type_name: false index: foobar - type: baz id: 1 body: doc: foo: baz - match: { _index: foobar } - - match: { _type: baz } + - is_false: "_type" - match: { _id: "1"} - match: { _version: 2} - match: { _shards.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml index 9adada6d54b..db4b56eedd3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/12_result.yml @@ -1,10 +1,14 @@ --- "Update result field": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar } @@ -15,8 +19,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar } @@ -27,8 +31,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar } @@ -40,8 +44,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml new file mode 100644 index 00000000000..b4581edd350 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/13_legacy_doc.yml @@ -0,0 +1,40 @@ +--- +"Partial document": + + - do: + index: + index: test_1 + type: test + id: 1 + body: + foo: bar + count: 1 + nested: { one: 1, two: 2 } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: + foo: baz + nested: + one: 3 + + - match: { _index: test_1 } + - match: { _type: test } + - match: { _id: "1" } + - match: { _version: 2 } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: baz } + - match: { _source.count: 1 } + - match: { _source.nested.one: 3 } + - match: { _source.nested.two: 2 } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml new file mode 100644 index 00000000000..eb2e4ff9a91 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/14_shard_header_with_types.yml @@ -0,0 +1,39 @@ +--- +"Update check shard header": + + - do: + indices.create: + index: foobar + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + + - do: + cluster.health: + wait_for_status: green + + - do: + index: + index: foobar + type: baz + id: 1 + body: { foo: bar } + + - do: + update: + index: foobar + type: baz + id: 1 + body: + doc: + foo: baz + + - match: { _index: foobar } + - match: { _type: baz } + - match: { _id: "1"} + - match: { _version: 2} + - match: { _shards.total: 1} + - match: { _shards.successful: 1} + - match: { _shards.failed: 0} + - is_false: _shards.pending diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml new file mode 100644 index 00000000000..9adada6d54b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/15_result_with_types.yml @@ -0,0 +1,52 @@ +--- +"Update result field": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + + - match: { _version: 1 } + - match: { result: created } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + + - match: { _version: 1 } + - match: { result: noop } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar } + doc_as_upsert: true + detect_noop: false + + - match: { _version: 2 } + - match: { result: updated } + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + doc_as_upsert: true + detect_noop: true + + - match: { _version: 3 } + - match: { result: updated } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml index f34e030ff66..1595e9d6f8a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -1,10 +1,14 @@ --- "Doc upsert": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar, count: 1 } @@ -12,8 +16,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: baz } @@ -22,8 +26,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar, count: 1 } @@ -31,8 +35,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml new file mode 100644 index 00000000000..f34e030ff66 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/21_doc_upsert_with_types.yml @@ -0,0 +1,41 @@ +--- +"Doc upsert": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar, count: 1 } + upsert: { foo: baz } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: baz } + - is_false: _source.count + + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar, count: 1 } + upsert: { foo: baz } + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: bar } + - match: { _source.count: 1 } + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index 7585b9f3e0b..884fa3e16f6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -1,10 +1,14 @@ --- "Doc as upsert": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: bar, count: 1 } @@ -12,8 +16,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: bar } @@ -22,8 +26,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { count: 2 } @@ -31,8 +35,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 - match: { _source.foo: bar } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml new file mode 100644 index 00000000000..7585b9f3e0b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/24_doc_as_upsert_with_types.yml @@ -0,0 +1,41 @@ +--- +"Doc as upsert": + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: bar, count: 1 } + doc_as_upsert: true + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: bar } + - match: { _source.count: 1 } + + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { count: 2 } + doc_as_upsert: true + + - do: + get: + index: test_1 + type: test + id: 1 + + - match: { _source.foo: bar } + - match: { _source.count: 2 } + + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml index 17c4806c693..20ff2020932 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/30_internal_version.yml @@ -1,11 +1,15 @@ --- "Internal version": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: missing update: + include_type_name: false index: test_1 - type: test id: 1 version: 1 body: @@ -13,8 +17,8 @@ - do: index: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } @@ -22,8 +26,8 @@ - do: catch: conflict update: + include_type_name: false index: test_1 - type: test id: 1 version: 2 body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml new file mode 100644 index 00000000000..17c4806c693 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/31_internal_version_with_types.yml @@ -0,0 +1,30 @@ +--- +"Internal version": + + - do: + catch: missing + update: + index: test_1 + type: test + id: 1 + version: 1 + body: + doc: { foo: baz } + + - do: + index: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + + - do: + catch: conflict + update: + index: test_1 + type: test + id: 1 + version: 2 + body: + doc: { foo: baz } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml index c0ec082b91a..904d3ce4b4f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_other_versions.yml @@ -1,11 +1,15 @@ --- "Not supported versions": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: catch: /Validation|Invalid/ update: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external @@ -16,8 +20,8 @@ - do: catch: /Validation|Invalid/ update: + include_type_name: false index: test_1 - type: test id: 1 version: 2 version_type: external_gte diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml new file mode 100644 index 00000000000..c0ec082b91a --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/36_other_versions_with_types.yml @@ -0,0 +1,27 @@ +--- +"Not supported versions": + + - do: + catch: /Validation|Invalid/ + update: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external + body: + doc: { foo: baz } + upsert: { foo: bar } + + - do: + catch: /Validation|Invalid/ + update: + index: test_1 + type: test + id: 1 + version: 2 + version_type: external_gte + body: + doc: { foo: baz } + upsert: { foo: bar } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml index 977db506710..643d79239d0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml @@ -1,8 +1,13 @@ --- "Routing": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -17,8 +22,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 body: @@ -27,8 +32,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 stored_fields: _routing @@ -38,16 +43,16 @@ - do: catch: missing update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } - do: update: + include_type_name: false index: test_1 - type: test id: 1 routing: 5 _source: foo diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml new file mode 100644 index 00000000000..977db506710 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/41_routing_with_types.yml @@ -0,0 +1,58 @@ +--- +"Routing": + + - do: + indices.create: + index: test_1 + body: + settings: + index: + number_of_shards: 5 + number_of_routing_shards: 5 + number_of_replicas: 0 + + - do: + cluster.health: + wait_for_status: green + + - do: + update: + index: test_1 + type: test + id: 1 + routing: 5 + body: + doc: { foo: baz } + upsert: { foo: bar } + + - do: + get: + index: test_1 + type: test + id: 1 + routing: 5 + stored_fields: _routing + + - match: { _routing: "5"} + + - do: + catch: missing + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + + - do: + update: + index: test_1 + type: test + id: 1 + routing: 5 + _source: foo + body: + doc: { foo: baz } + + - match: { get._source.foo: baz } + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml index 8ac1568a127..8039edc2e3a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yml @@ -1,8 +1,13 @@ --- "Refresh": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: indices.create: + include_type_name: false index: test_1 body: settings: @@ -11,8 +16,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 body: doc: { foo: baz } @@ -20,6 +25,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 1 }} @@ -28,8 +34,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 2 refresh: true body: @@ -39,6 +45,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { _id: 2 }} @@ -47,10 +54,15 @@ --- "When refresh url parameter is an empty string that means \"refresh immediately\"": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: test_1 - type: test id: 1 refresh: true body: { foo: bar } @@ -58,8 +70,8 @@ - do: update: + include_type_name: false index: test_1 - type: test id: 1 refresh: "" body: @@ -68,6 +80,7 @@ - do: search: + include_type_name: false index: test_1 body: query: { term: { cat: dog }} @@ -76,10 +89,15 @@ --- "refresh=wait_for waits until changes are visible in search": + + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: index: + include_type_name: false index: update_60_refresh_1 - type: test id: update_60_refresh_id1 body: { foo: bar } refresh: true @@ -87,6 +105,7 @@ - do: search: + include_type_name: false index: update_60_refresh_1 body: query: { term: { _id: update_60_refresh_id1 }} @@ -94,8 +113,8 @@ - do: update: + include_type_name: false index: update_60_refresh_1 - type: test id: update_60_refresh_id1 refresh: wait_for body: @@ -104,6 +123,7 @@ - do: search: + include_type_name: false index: update_60_refresh_1 body: query: { match: { test: asdf } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml new file mode 100644 index 00000000000..8ac1568a127 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/61_refresh_with_types.yml @@ -0,0 +1,110 @@ +--- +"Refresh": + + - do: + indices.create: + index: test_1 + body: + settings: + index.refresh_interval: -1 + number_of_replicas: 0 + + - do: + update: + index: test_1 + type: test + id: 1 + body: + doc: { foo: baz } + upsert: { foo: bar } + + - do: + search: + index: test_1 + body: + query: { term: { _id: 1 }} + + - match: { hits.total: 0 } + + - do: + update: + index: test_1 + type: test + id: 2 + refresh: true + body: + doc: { foo: baz } + upsert: { foo: bar } + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { _id: 2 }} + + - match: { hits.total: 1 } + +--- +"When refresh url parameter is an empty string that means \"refresh immediately\"": + - do: + index: + index: test_1 + type: test + id: 1 + refresh: true + body: { foo: bar } + - is_true: forced_refresh + + - do: + update: + index: test_1 + type: test + id: 1 + refresh: "" + body: + doc: {cat: dog} + - is_true: forced_refresh + + - do: + search: + index: test_1 + body: + query: { term: { cat: dog }} + + - match: { hits.total: 1 } + +--- +"refresh=wait_for waits until changes are visible in search": + - do: + index: + index: update_60_refresh_1 + type: test + id: update_60_refresh_id1 + body: { foo: bar } + refresh: true + - is_true: forced_refresh + + - do: + search: + index: update_60_refresh_1 + body: + query: { term: { _id: update_60_refresh_id1 }} + - match: { hits.total: 1 } + + - do: + update: + index: update_60_refresh_1 + type: test + id: update_60_refresh_id1 + refresh: wait_for + body: + doc: { test: asdf } + - is_false: forced_refresh + + - do: + search: + index: update_60_refresh_1 + body: + query: { match: { test: asdf } } + - match: { hits.total: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml index 4bb22e6b801..c69984e5cde 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/80_source_filtering.yml @@ -1,10 +1,14 @@ --- "Source filtering": + - skip: + version: " - 6.99.99" + reason: include_type_name was introduced in 7.0.0 + - do: update: + include_type_name: false index: test_1 - type: test id: 1 _source: [foo, bar] body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml new file mode 100644 index 00000000000..4bb22e6b801 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/81_source_filtering_with_types.yml @@ -0,0 +1,19 @@ +--- +"Source filtering": + + - do: + update: + index: test_1 + type: test + id: 1 + _source: [foo, bar] + body: + doc: { foo: baz } + upsert: { foo: bar } + + - match: { get._source.foo: bar } + - is_false: get._source.bar + +# TODO: +# +# - Add _routing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml index f7791d09863..7838c200851 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -7,12 +7,13 @@ - do: indices.create: + include_type_name: false index: test_1 - do: update: + include_type_name: false index: test_1 - type: test id: 1 parent: 5 fields: [ _routing ] @@ -24,8 +25,8 @@ - do: get: + include_type_name: false index: test_1 - type: test id: 1 parent: 5 stored_fields: [ _routing ] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml new file mode 100644 index 00000000000..f7791d09863 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/86_fields_meta_with_types.yml @@ -0,0 +1,33 @@ +--- +"Metadata Fields": + + - skip: + version: "all" + reason: "Update doesn't return metadata fields, waiting for #3259" + + - do: + indices.create: + index: test_1 + + - do: + update: + index: test_1 + type: test + id: 1 + parent: 5 + fields: [ _routing ] + body: + doc: { foo: baz } + upsert: { foo: bar } + + - match: { get._routing: "5" } + + - do: + get: + index: test_1 + type: test + id: 1 + parent: 5 + stored_fields: [ _routing ] + + diff --git a/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java new file mode 100644 index 00000000000..55b78739d1d --- /dev/null +++ b/server/src/main/java/org/apache/lucene/queries/SpanMatchNoDocsQuery.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.lucene.queries; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.TermStates; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanWeight; +import org.apache.lucene.search.spans.Spans; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.Set; + +/** + * A {@link SpanQuery} that matches no documents. + */ +public class SpanMatchNoDocsQuery extends SpanQuery { + private final String field; + private final String reason; + + public SpanMatchNoDocsQuery(String field, String reason) { + this.field = field; + this.reason = reason; + } + + @Override + public String getField() { + return field; + } + + @Override + public String toString(String field) { + return "SpanMatchNoDocsQuery(\"" + reason + "\")"; + } + + @Override + public boolean equals(Object o) { + return sameClassAs(o); + } + + @Override + public int hashCode() { + return classHash(); + } + + @Override + public SpanWeight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new SpanWeight(this, searcher, Collections.emptyMap(), boost) { + @Override + public void extractTermStates(Map contexts) {} + + @Override + public Spans getSpans(LeafReaderContext ctx, Postings requiredPostings) { + return null; + } + + @Override + public void extractTerms(Set terms) {} + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return true; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index 18f33ab397f..4ce810be931 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; @@ -86,6 +87,26 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA if (indicesService.isMetaDataField(field) || fieldPredicate.test(ft.name())) { FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); responseMap.put(field, fieldCap); + } else { + continue; + } + // add nested and object fields + int dotIndex = ft.name().lastIndexOf('.'); + while (dotIndex > -1) { + String parentField = ft.name().substring(0, dotIndex); + if (responseMap.containsKey(parentField)) { + // we added this path on another field already + break; + } + // checks if the parent field contains sub-fields + if (mapperService.fullName(parentField) == null) { + // no field type, it must be an object field + ObjectMapper mapper = mapperService.getObjectMapper(parentField); + String type = mapper.nested().isNested() ? "nested" : "object"; + FieldCapabilities fieldCap = new FieldCapabilities(parentField, type, false, false); + responseMap.put(parentField, fieldCap); + } + dotIndex = parentField.lastIndexOf('.'); } } } diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index d3a54bf7e45..62fb866ee11 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -48,7 +47,6 @@ public class TransportMainAction extends HandledTransportAction listener) { ClusterState clusterState = clusterService.state(); assert Node.NODE_NAME_SETTING.exists(settings); - final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false; listener.onResponse( new MainResponse(Node.NODE_NAME_SETTING.get(settings), Version.CURRENT, clusterState.getClusterName(), clusterState.metaData().clusterUUID(), Build.CURRENT)); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java index 78bf9f2fc72..0da39a593a2 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationOperation.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ReplicationGroup; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -114,9 +115,13 @@ public class ReplicationOperation< // of the sampled replication group, and advanced further than what the given replication group would allow it to. // This would entail that some shards could learn about a global checkpoint that would be higher than its local checkpoint. final long globalCheckpoint = primary.globalCheckpoint(); + // we have to capture the max_seq_no_of_updates after this request was completed on the primary to make sure the value of + // max_seq_no_of_updates on replica when this request is executed is at least the value on the primary when it was executed on. + final long maxSeqNoOfUpdatesOrDeletes = primary.maxSeqNoOfUpdatesOrDeletes(); + assert maxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "seqno_of_updates still uninitialized"; final ReplicationGroup replicationGroup = primary.getReplicationGroup(); markUnavailableShardsAsStale(replicaRequest, replicationGroup); - performOnReplicas(replicaRequest, globalCheckpoint, replicationGroup); + performOnReplicas(replicaRequest, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, replicationGroup); } successfulShards.incrementAndGet(); // mark primary as successful @@ -136,7 +141,7 @@ public class ReplicationOperation< } private void performOnReplicas(final ReplicaRequest replicaRequest, final long globalCheckpoint, - final ReplicationGroup replicationGroup) { + final long maxSeqNoOfUpdatesOrDeletes, final ReplicationGroup replicationGroup) { // for total stats, add number of unassigned shards and // number of initializing shards that are not ready yet to receive operations (recovery has not opened engine yet on the target) totalShards.addAndGet(replicationGroup.getSkippedShards().size()); @@ -145,19 +150,20 @@ public class ReplicationOperation< for (final ShardRouting shard : replicationGroup.getReplicationTargets()) { if (shard.isSameAllocation(primaryRouting) == false) { - performOnReplica(shard, replicaRequest, globalCheckpoint); + performOnReplica(shard, replicaRequest, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes); } } } - private void performOnReplica(final ShardRouting shard, final ReplicaRequest replicaRequest, final long globalCheckpoint) { + private void performOnReplica(final ShardRouting shard, final ReplicaRequest replicaRequest, + final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes) { if (logger.isTraceEnabled()) { logger.trace("[{}] sending op [{}] to replica {} for request [{}]", shard.shardId(), opType, shard, replicaRequest); } totalShards.incrementAndGet(); pendingActions.incrementAndGet(); - replicasProxy.performOn(shard, replicaRequest, globalCheckpoint, new ActionListener() { + replicasProxy.performOn(shard, replicaRequest, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, new ActionListener() { @Override public void onResponse(ReplicaResponse response) { successfulShards.incrementAndGet(); @@ -322,6 +328,12 @@ public class ReplicationOperation< */ long globalCheckpoint(); + /** + * Returns the maximum seq_no of updates (index operations overwrite Lucene) or deletes on the primary. + * This value must be captured after the execution of a replication request on the primary is completed. + */ + long maxSeqNoOfUpdatesOrDeletes(); + /** * Returns the current replication group on the primary shard * @@ -338,12 +350,15 @@ public class ReplicationOperation< /** * Performs the specified request on the specified replica. * - * @param replica the shard this request should be executed on - * @param replicaRequest the operation to perform - * @param globalCheckpoint the global checkpoint on the primary - * @param listener callback for handling the response or failure + * @param replica the shard this request should be executed on + * @param replicaRequest the operation to perform + * @param globalCheckpoint the global checkpoint on the primary + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates (index operations overwriting Lucene) or deletes on primary + * after this replication was executed on it. + * @param listener callback for handling the response or failure */ - void performOn(ShardRouting replica, RequestT replicaRequest, long globalCheckpoint, ActionListener listener); + void performOn(ShardRouting replica, RequestT replicaRequest, long globalCheckpoint, + long maxSeqNoOfUpdatesOrDeletes, ActionListener listener); /** * Fail the specified shard if needed, removing it from the current set diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index dbdd5acae1f..695c9162633 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -200,7 +200,7 @@ public abstract class TransportReplicationAction< /** * Synchronously execute the specified replica operation. This is done under a permit from - * {@link IndexShard#acquireReplicaOperationPermit(long, long, ActionListener, String, Object)}. + * {@link IndexShard#acquireReplicaOperationPermit(long, long, long, ActionListener, String, Object)}. * * @param shardRequest the request to the replica shard * @param replica the replica shard to perform the operation on @@ -489,6 +489,7 @@ public abstract class TransportReplicationAction< replicaRequest.getTargetAllocationID(), replicaRequest.getPrimaryTerm(), replicaRequest.getGlobalCheckpoint(), + replicaRequest.getMaxSeqNoOfUpdatesOrDeletes(), channel, (ReplicationTask) task).run(); } @@ -513,6 +514,7 @@ public abstract class TransportReplicationAction< private final String targetAllocationID; private final long primaryTerm; private final long globalCheckpoint; + private final long maxSeqNoOfUpdatesOrDeletes; private final TransportChannel channel; private final IndexShard replica; /** @@ -528,6 +530,7 @@ public abstract class TransportReplicationAction< String targetAllocationID, long primaryTerm, long globalCheckpoint, + long maxSeqNoOfUpdatesOrDeletes, TransportChannel channel, ReplicationTask task) { this.request = request; @@ -536,6 +539,7 @@ public abstract class TransportReplicationAction< this.targetAllocationID = targetAllocationID; this.primaryTerm = primaryTerm; this.globalCheckpoint = globalCheckpoint; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; final ShardId shardId = request.shardId(); assert shardId != null : "request shardId must be set"; this.replica = getIndexShard(shardId); @@ -575,7 +579,8 @@ public abstract class TransportReplicationAction< new TransportChannelResponseHandler<>(logger, channel, extraMessage, () -> TransportResponse.Empty.INSTANCE); transportService.sendRequest(clusterService.localNode(), transportReplicaAction, - new ConcreteReplicaRequest<>(request, targetAllocationID, primaryTerm, globalCheckpoint), + new ConcreteReplicaRequest<>(request, targetAllocationID, primaryTerm, + globalCheckpoint, maxSeqNoOfUpdatesOrDeletes), handler); } @@ -613,7 +618,7 @@ public abstract class TransportReplicationAction< throw new ShardNotFoundException(this.replica.shardId(), "expected aID [{}] but found [{}]", targetAllocationID, actualAllocationId); } - replica.acquireReplicaOperationPermit(primaryTerm, globalCheckpoint, this, executor, request); + replica.acquireReplicaOperationPermit(primaryTerm, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes, this, executor, request); } /** @@ -1023,6 +1028,11 @@ public abstract class TransportReplicationAction< return indexShard.getGlobalCheckpoint(); } + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + } + @Override public ReplicationGroup getReplicationGroup() { return indexShard.getReplicationGroup(); @@ -1107,6 +1117,7 @@ public abstract class TransportReplicationAction< final ShardRouting replica, final ReplicaRequest request, final long globalCheckpoint, + final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { String nodeId = replica.currentNodeId(); final DiscoveryNode node = clusterService.state().nodes().get(nodeId); @@ -1114,8 +1125,8 @@ public abstract class TransportReplicationAction< listener.onFailure(new NoNodeAvailableException("unknown node [" + nodeId + "]")); return; } - final ConcreteReplicaRequest replicaRequest = - new ConcreteReplicaRequest<>(request, replica.allocationId().getId(), primaryTerm, globalCheckpoint); + final ConcreteReplicaRequest replicaRequest = new ConcreteReplicaRequest<>( + request, replica.allocationId().getId(), primaryTerm, globalCheckpoint, maxSeqNoOfUpdatesOrDeletes); sendReplicaRequest(replicaRequest, node, listener); } @@ -1263,15 +1274,17 @@ public abstract class TransportReplicationAction< protected static final class ConcreteReplicaRequest extends ConcreteShardRequest { private long globalCheckpoint; + private long maxSeqNoOfUpdatesOrDeletes; public ConcreteReplicaRequest(final Supplier requestSupplier) { super(requestSupplier); } public ConcreteReplicaRequest(final R request, final String targetAllocationID, final long primaryTerm, - final long globalCheckpoint) { + final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes) { super(request, targetAllocationID, primaryTerm); this.globalCheckpoint = globalCheckpoint; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; } @Override @@ -1282,6 +1295,13 @@ public abstract class TransportReplicationAction< } else { globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; } + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); + } else { + // UNASSIGNED_SEQ_NO (-2) means uninitialized, and replicas will disable + // optimization using seq_no if its max_seq_no_of_updates is still uninitialized + maxSeqNoOfUpdatesOrDeletes = SequenceNumbers.UNASSIGNED_SEQ_NO; + } } @Override @@ -1290,12 +1310,19 @@ public abstract class TransportReplicationAction< if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1)) { out.writeZLong(globalCheckpoint); } + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeZLong(maxSeqNoOfUpdatesOrDeletes); + } } public long getGlobalCheckpoint() { return globalCheckpoint; } + public long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + @Override public String toString() { return "ConcreteReplicaRequest{" + @@ -1303,6 +1330,7 @@ public abstract class TransportReplicationAction< ", primaryTerm='" + getPrimaryTerm() + '\'' + ", request=" + getRequest() + ", globalCheckpoint=" + globalCheckpoint + + ", maxSeqNoOfUpdatesOrDeletes=" + maxSeqNoOfUpdatesOrDeletes + '}'; } } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java index 88c65381e7a..6dc11877e7c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardRequest.java @@ -44,7 +44,6 @@ public abstract class SingleShardRequest params; - if (CTX_IN_PARAMS) { - params = new HashMap<>(script.getParams()); - params.put(ContextFields.CTX, ctx); - deprecationLogger.deprecated("Using `ctx` via `params.ctx` is deprecated. " + - "Use -Des.scripting.update.ctx_in_params=false to enforce non-deprecated usage."); - } else { - params = script.getParams(); - } - UpdateScript executableScript = factory.newInstance(params); - executableScript.execute(ctx); + UpdateScript executableScript = factory.newInstance(script.getParams(), ctx); + executableScript.execute(); } } catch (Exception e) { throw new IllegalArgumentException("failed to execute script", e); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 475e9b94c68..3395d951415 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -307,7 +307,7 @@ final class Bootstrap { final boolean closeStandardStreams = (foreground == false) || quiet; try { if (closeStandardStreams) { - final Logger rootLogger = ESLoggerFactory.getRootLogger(); + final Logger rootLogger = LogManager.getRootLogger(); final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class); if (maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); @@ -339,7 +339,7 @@ final class Bootstrap { } } catch (NodeValidationException | RuntimeException e) { // disable console logging, so user does not see the exception twice (jvm will show it already) - final Logger rootLogger = ESLoggerFactory.getRootLogger(); + final Logger rootLogger = LogManager.getRootLogger(); final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class); if (foreground && maybeConsoleAppender != null) { Loggers.removeAppender(rootLogger, maybeConsoleAppender); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 1f6a9fe027d..09fde36e1f9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -27,10 +27,11 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -923,8 +924,9 @@ public class IndexNameExpressionResolver extends AbstractComponent { } DateTimeFormatter parser = dateFormatter.withZone(timeZone); FormatDateTimeFormatter formatter = new FormatDateTimeFormatter(dateFormatterPattern, parser, Locale.ROOT); - DateMathParser dateMathParser = new DateMathParser(formatter); - long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, timeZone); + DateMathParser dateMathParser = formatter.toDateMathParser(); + long millis = dateMathParser.parse(mathExpression, context::getStartTime, false, + DateUtils.dateTimeZoneToZoneId(timeZone)); String time = formatter.printer().print(millis); beforePlaceHolderSb.append(time); diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index fbb2fd19f0e..e90d7261c99 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -24,6 +24,7 @@ import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Assertions; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; @@ -32,7 +33,6 @@ import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; @@ -54,7 +54,7 @@ import java.util.Objects; */ public abstract class ShapeBuilder> implements NamedWriteable, ToXContentObject { - protected static final Logger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName()); + protected static final Logger LOGGER = LogManager.getLogger(ShapeBuilder.class); private static final boolean DEBUG; static { diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/Join.java b/server/src/main/java/org/elasticsearch/common/inject/internal/Join.java deleted file mode 100644 index e44bed9d88a..00000000000 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/Join.java +++ /dev/null @@ -1,318 +0,0 @@ -/* - * Copyright (C) 2007 Google Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.elasticsearch.common.inject.internal; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.util.CollectionUtils; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Iterator; -import java.util.Map; -import java.util.Objects; - -/** - * Utility for joining pieces of text separated by a delimiter. It can handle - * iterators, collections, arrays, and varargs, and can append to any - * {@link Appendable} or just return a {@link String}. For example, - * {@code join(":", "a", "b", "c")} returns {@code "a:b:c"}. - *

- * All methods of this class throw {@link NullPointerException} when a value - * of {@code null} is supplied for any parameter. The elements within the - * collection, iterator, array, or varargs parameter list may be null -- - * these will be represented in the output by the string {@code "null"}. - * - * @author Kevin Bourrillion - */ -public final class Join { - private Join() { - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. If {@code tokens} is empty, it - * returns an empty string. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return a string consisting of the joined elements - */ - public static String join(String delimiter, Iterable tokens) { - return join(delimiter, tokens.iterator()); - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. If {@code tokens} is empty, it - * returns an empty string. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return a string consisting of the joined elements - */ - public static String join(String delimiter, Object[] tokens) { - return join(delimiter, Arrays.asList(tokens)); - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param firstToken the first object to append - * @param otherTokens subsequent objects to append - * @return a string consisting of the joined elements - */ - public static String join( - String delimiter, @Nullable Object firstToken, Object... otherTokens) { - Objects.requireNonNull(otherTokens); - return join(delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); - } - - /** - * Returns a string containing the {@code tokens}, converted to strings if - * necessary, separated by {@code delimiter}. If {@code tokens} is empty, it - * returns an empty string. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return a string consisting of the joined elements - */ - public static String join(String delimiter, Iterator tokens) { - StringBuilder sb = new StringBuilder(); - join(sb, delimiter, tokens); - return sb.toString(); - } - - /** - * Returns a string containing the contents of {@code map}, with entries - * separated by {@code entryDelimiter}, and keys and values separated with - * {@code keyValueSeparator}. - *

- * Each key and value will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param keyValueSeparator a string to append between every key and its - * associated value - * @param entryDelimiter a string to append between every entry, but not at - * the beginning or end - * @param map the map containing the data to join - * @return a string consisting of the joined entries of the map; empty if the - * map is empty - */ - public static String join( - String keyValueSeparator, String entryDelimiter, Map map) { - return join(new StringBuilder(), keyValueSeparator, entryDelimiter, map) - .toString(); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join( - T appendable, String delimiter, Iterable tokens) { - return join(appendable, delimiter, tokens.iterator()); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join( - T appendable, String delimiter, Object[] tokens) { - return join(appendable, delimiter, Arrays.asList(tokens)); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param firstToken the first object to append - * @param otherTokens subsequent objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join(T appendable, String delimiter, - @Nullable Object firstToken, Object... otherTokens) { - Objects.requireNonNull(otherTokens); - return join(appendable, delimiter, CollectionUtils.asArrayList(firstToken, otherTokens)); - } - - /** - * Appends each of the {@code tokens} to {@code appendable}, separated by - * {@code delimiter}. - *

- * Each token will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param delimiter a string to append between every element, but not at the - * beginning or end - * @param tokens objects to append - * @return the same {@code Appendable} instance that was passed in - * @throws JoinException if an {@link IOException} occurs - */ - public static T join( - T appendable, String delimiter, Iterator tokens) { - - /* This method is the workhorse of the class */ - - Objects.requireNonNull(appendable); - Objects.requireNonNull(delimiter); - if (tokens.hasNext()) { - try { - appendOneToken(appendable, tokens.next()); - while (tokens.hasNext()) { - appendable.append(delimiter); - appendOneToken(appendable, tokens.next()); - } - } catch (IOException e) { - throw new JoinException(e); - } - } - return appendable; - } - - /** - * Appends the contents of {@code map} to {@code appendable}, with entries - * separated by {@code entryDelimiter}, and keys and values separated with - * {@code keyValueSeparator}. - *

- * Each key and value will be converted to a {@link CharSequence} using - * {@link String#valueOf(Object)}, if it isn't a {@link CharSequence} already. - * Note that this implies that null tokens will be appended as the - * four-character string {@code "null"}. - * - * @param appendable the object to append the results to - * @param keyValueSeparator a string to append between every key and its - * associated value - * @param entryDelimiter a string to append between every entry, but not at - * the beginning or end - * @param map the map containing the data to join - * @return the same {@code Appendable} instance that was passed in - */ - public static T join(T appendable, - String keyValueSeparator, String entryDelimiter, Map map) { - Objects.requireNonNull(appendable); - Objects.requireNonNull(keyValueSeparator); - Objects.requireNonNull(entryDelimiter); - Iterator> entries = map.entrySet().iterator(); - if (entries.hasNext()) { - try { - appendOneEntry(appendable, keyValueSeparator, entries.next()); - while (entries.hasNext()) { - appendable.append(entryDelimiter); - appendOneEntry(appendable, keyValueSeparator, entries.next()); - } - } catch (IOException e) { - throw new JoinException(e); - } - } - return appendable; - } - - private static void appendOneEntry( - Appendable appendable, String keyValueSeparator, Map.Entry entry) - throws IOException { - appendOneToken(appendable, entry.getKey()); - appendable.append(keyValueSeparator); - appendOneToken(appendable, entry.getValue()); - } - - private static void appendOneToken(Appendable appendable, Object token) - throws IOException { - appendable.append(toCharSequence(token)); - } - - private static CharSequence toCharSequence(Object token) { - return (token instanceof CharSequence) - ? (CharSequence) token - : String.valueOf(token); - } - - /** - * Exception thrown in response to an {@link IOException} from the supplied - * {@link Appendable}. This is used because most callers won't want to - * worry about catching an IOException. - */ - public static class JoinException extends RuntimeException { - private JoinException(IOException cause) { - super(cause); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java b/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java index ed46a5c3c54..7e4d54867fb 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java +++ b/server/src/main/java/org/elasticsearch/common/inject/internal/MoreTypes.java @@ -634,7 +634,6 @@ public class MoreTypes { private final int modifiers; private final boolean synthetic; private final Class memberType; - private final String memberKey; private MemberImpl(Member member) { this.declaringClass = member.getDeclaringClass(); @@ -642,7 +641,6 @@ public class MoreTypes { this.modifiers = member.getModifiers(); this.synthetic = member.isSynthetic(); this.memberType = memberType(member); - this.memberKey = memberKey(member); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java b/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java index 72a60e8678c..e953e9563c6 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/joda/FormatDateTimeFormatter.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.joda; +import org.elasticsearch.common.time.DateMathParser; import org.joda.time.format.DateTimeFormatter; import java.util.Locale; @@ -64,4 +65,8 @@ public class FormatDateTimeFormatter { public Locale locale() { return locale; } + + public DateMathParser toDateMathParser() { + return new JodaDateMathParser(this); + } } diff --git a/server/src/main/java/org/elasticsearch/common/joda/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java similarity index 95% rename from server/src/main/java/org/elasticsearch/common/joda/DateMathParser.java rename to server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java index ba5531c813c..0cef1d3e09b 100644 --- a/server/src/main/java/org/elasticsearch/common/joda/DateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java @@ -20,10 +20,13 @@ package org.elasticsearch.common.joda; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; +import java.time.ZoneId; import java.util.Objects; import java.util.function.LongSupplier; @@ -34,23 +37,21 @@ import java.util.function.LongSupplier; * is appended to a datetime with the following syntax: * ||[+-/](\d+)?[yMwdhHms]. */ -public class DateMathParser { +public class JodaDateMathParser implements DateMathParser { private final FormatDateTimeFormatter dateTimeFormatter; - public DateMathParser(FormatDateTimeFormatter dateTimeFormatter) { + public JodaDateMathParser(FormatDateTimeFormatter dateTimeFormatter) { Objects.requireNonNull(dateTimeFormatter); this.dateTimeFormatter = dateTimeFormatter; } - public long parse(String text, LongSupplier now) { - return parse(text, now, false, null); - } - // Note: we take a callable here for the timestamp in order to be able to figure out // if it has been used. For instance, the request cache does not cache requests that make // use of `now`. - public long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone timeZone) { + @Override + public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) { + final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz); long time; String mathString; if (text.startsWith("now")) { diff --git a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 2159014f825..6388853d00b 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/server/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -70,23 +70,4 @@ public final class ESLoggerFactory { public static Logger getLogger(Class clazz) { return getLogger(null, clazz); } - - /** - * Get or build a logger. - * @deprecated Prefer {@link LogManager#getLogger} - */ - @Deprecated - public static Logger getLogger(String name) { - return getLogger(null, name); - } - - /** - * Get the root logger. - * @deprecated Prefer {@link LogManager#getRootLogger} - */ - @Deprecated - public static Logger getRootLogger() { - return LogManager.getRootLogger(); - } - } diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 89c9ce229d6..6f8358e113d 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -258,13 +258,13 @@ public class LogConfigurator { private static void configureLoggerLevels(final Settings settings) { if (Loggers.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) { final Level level = Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings); - Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); + Loggers.setLevel(LogManager.getRootLogger(), level); } Loggers.LOG_LEVEL_SETTING.getAllConcreteSettings(settings) // do not set a log level for a logger named level (from the default log setting) .filter(s -> s.getKey().equals(Loggers.LOG_DEFAULT_LEVEL_SETTING.getKey()) == false).forEach(s -> { final Level level = s.get(settings); - Loggers.setLevel(ESLoggerFactory.getLogger(s.getKey().substring("logger.".length())), level); + Loggers.setLevel(LogManager.getLogger(s.getKey().substring("logger.".length())), level); }); } diff --git a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java index 70681ed40e2..a772867c084 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/server/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -93,7 +93,7 @@ public class Loggers { */ @Deprecated public static Logger getLogger(String s) { - return ESLoggerFactory.getLogger(s); + return LogManager.getLogger(s); } /** diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index 5d1e4537f65..f931ee2dc31 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -759,11 +759,11 @@ public final class XMoreLikeThis { if (queue.size() < limit) { // there is still space in the queue - queue.add(new ScoreTerm(word, topField, score, idf, docFreq, tf)); + queue.add(new ScoreTerm(word, topField, score)); } else { ScoreTerm term = queue.top(); if (term.score < score) { // update the smallest in the queue in place and update the queue. - term.update(word, topField, score, idf, docFreq, tf); + term.update(word, topField, score); queue.updateTop(); } } @@ -1026,30 +1026,20 @@ public final class XMoreLikeThis { } private static class ScoreTerm { - // only really need 1st 3 entries, other ones are for troubleshooting String word; String topField; float score; - float idf; - int docFreq; - int tf; - ScoreTerm(String word, String topField, float score, float idf, int docFreq, int tf) { + ScoreTerm(String word, String topField, float score) { this.word = word; this.topField = topField; this.score = score; - this.idf = idf; - this.docFreq = docFreq; - this.tf = tf; } - void update(String word, String topField, float score, float idf, int docFreq, int tf) { + void update(String word, String topField, float score) { this.word = word; this.topField = topField; this.score = score; - this.idf = idf; - this.docFreq = docFreq; - this.tf = tf; } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 7e90aa3f442..4b4ebb7414a 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.common.settings; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.indices.close.TransportCloseIndexAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.AutoCreateIndex; @@ -44,7 +45,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationD import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; @@ -158,12 +158,12 @@ public final class ClusterSettings extends AbstractScopedSettings { if ("_root".equals(component)) { final String rootLevel = value.get(key); if (rootLevel == null) { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings)); + Loggers.setLevel(LogManager.getRootLogger(), Loggers.LOG_DEFAULT_LEVEL_SETTING.get(settings)); } else { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), rootLevel); + Loggers.setLevel(LogManager.getRootLogger(), rootLevel); } } else { - Loggers.setLevel(ESLoggerFactory.getLogger(component), value.get(key)); + Loggers.setLevel(LogManager.getLogger(component), value.get(key)); } } } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java index 5e5ecc5bafd..b2cb319071f 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateMathParser.java @@ -19,56 +19,31 @@ package org.elasticsearch.common.time; -import org.elasticsearch.ElasticsearchParseException; +import org.joda.time.DateTimeZone; -import java.time.DateTimeException; -import java.time.DayOfWeek; -import java.time.Instant; -import java.time.LocalTime; import java.time.ZoneId; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.temporal.ChronoField; -import java.time.temporal.TemporalAccessor; -import java.time.temporal.TemporalAdjusters; -import java.time.temporal.TemporalField; -import java.time.temporal.TemporalQueries; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; import java.util.function.LongSupplier; /** - * A parser for date/time formatted text with optional date math. - * - * The format of the datetime is configurable, and unix timestamps can also be used. Datemath - * is appended to a datetime with the following syntax: - * ||[+-/](\d+)?[yMwdhHms]. + * An abstraction over date math parsing to allow different implementation for joda and java time. */ -public class DateMathParser { +public interface DateMathParser { - // base fields which should be used for default parsing, when we round up - private static final Map ROUND_UP_BASE_FIELDS = new HashMap<>(6); - { - ROUND_UP_BASE_FIELDS.put(ChronoField.MONTH_OF_YEAR, 1L); - ROUND_UP_BASE_FIELDS.put(ChronoField.DAY_OF_MONTH, 1L); - ROUND_UP_BASE_FIELDS.put(ChronoField.HOUR_OF_DAY, 23L); - ROUND_UP_BASE_FIELDS.put(ChronoField.MINUTE_OF_HOUR, 59L); - ROUND_UP_BASE_FIELDS.put(ChronoField.SECOND_OF_MINUTE, 59L); - ROUND_UP_BASE_FIELDS.put(ChronoField.MILLI_OF_SECOND, 999L); + /** + * Parse a date math expression without timzeone info and rounding down. + */ + default long parse(String text, LongSupplier now) { + return parse(text, now, false, (ZoneId) null); } - private final DateFormatter formatter; - private final DateFormatter roundUpFormatter; + // Note: we take a callable here for the timestamp in order to be able to figure out + // if it has been used. For instance, the request cache does not cache requests that make + // use of `now`. - public DateMathParser(DateFormatter formatter) { - Objects.requireNonNull(formatter); - this.formatter = formatter; - this.roundUpFormatter = formatter.parseDefaulting(ROUND_UP_BASE_FIELDS); - } - - public long parse(String text, LongSupplier now) { - return parse(text, now, false, null); + // exists for backcompat, do not use! + @Deprecated + default long parse(String text, LongSupplier now, boolean roundUp, DateTimeZone tz) { + return parse(text, now, roundUp, tz == null ? null : ZoneId.of(tz.getID())); } /** @@ -92,176 +67,8 @@ public class DateMathParser { * @param text the input * @param now a supplier to retrieve the current date in milliseconds, if needed for additions * @param roundUp should the result be rounded up - * @param timeZone an optional timezone that should be applied before returning the milliseconds since the epoch + * @param tz an optional timezone that should be applied before returning the milliseconds since the epoch * @return the parsed date in milliseconds since the epoch */ - public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { - long time; - String mathString; - if (text.startsWith("now")) { - try { - time = now.getAsLong(); - } catch (Exception e) { - throw new ElasticsearchParseException("could not read the current timestamp", e); - } - mathString = text.substring("now".length()); - } else { - int index = text.indexOf("||"); - if (index == -1) { - return parseDateTime(text, timeZone, roundUp); - } - time = parseDateTime(text.substring(0, index), timeZone, false); - mathString = text.substring(index + 2); - } - - return parseMath(mathString, time, roundUp, timeZone); - } - - private long parseMath(final String mathString, final long time, final boolean roundUp, - ZoneId timeZone) throws ElasticsearchParseException { - if (timeZone == null) { - timeZone = ZoneOffset.UTC; - } - ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone); - for (int i = 0; i < mathString.length(); ) { - char c = mathString.charAt(i++); - final boolean round; - final int sign; - if (c == '/') { - round = true; - sign = 1; - } else { - round = false; - if (c == '+') { - sign = 1; - } else if (c == '-') { - sign = -1; - } else { - throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString); - } - } - - if (i >= mathString.length()) { - throw new ElasticsearchParseException("truncated date math [{}]", mathString); - } - - final int num; - if (!Character.isDigit(mathString.charAt(i))) { - num = 1; - } else { - int numFrom = i; - while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) { - i++; - } - if (i >= mathString.length()) { - throw new ElasticsearchParseException("truncated date math [{}]", mathString); - } - num = Integer.parseInt(mathString.substring(numFrom, i)); - } - if (round) { - if (num != 1) { - throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString); - } - } - char unit = mathString.charAt(i++); - switch (unit) { - case 'y': - if (round) { - dateTime = dateTime.withDayOfYear(1).with(LocalTime.MIN); - } else { - dateTime = dateTime.plusYears(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusYears(1); - } - break; - case 'M': - if (round) { - dateTime = dateTime.withDayOfMonth(1).with(LocalTime.MIN); - } else { - dateTime = dateTime.plusMonths(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusMonths(1); - } - break; - case 'w': - if (round) { - dateTime = dateTime.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY)).with(LocalTime.MIN); - } else { - dateTime = dateTime.plusWeeks(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusWeeks(1); - } - break; - case 'd': - if (round) { - dateTime = dateTime.with(LocalTime.MIN); - } else { - dateTime = dateTime.plusDays(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusDays(1); - } - break; - case 'h': - case 'H': - if (round) { - dateTime = dateTime.withMinute(0).withSecond(0).withNano(0); - } else { - dateTime = dateTime.plusHours(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusHours(1); - } - break; - case 'm': - if (round) { - dateTime = dateTime.withSecond(0).withNano(0); - } else { - dateTime = dateTime.plusMinutes(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusMinutes(1); - } - break; - case 's': - if (round) { - dateTime = dateTime.withNano(0); - } else { - dateTime = dateTime.plusSeconds(sign * num); - } - if (roundUp) { - dateTime = dateTime.plusSeconds(1); - } - break; - default: - throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString); - } - if (roundUp) { - dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit()); - } - } - return dateTime.toInstant().toEpochMilli(); - } - - private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { - DateFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter; - try { - if (timeZone == null) { - return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli(); - } else { - TemporalAccessor accessor = formatter.parse(value); - ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor); - if (zoneId != null) { - timeZone = zoneId; - } - - return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli(); - } - } catch (IllegalArgumentException | DateTimeException e) { - throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage()); - } - } + long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz); } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateUtils.java b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java new file mode 100644 index 00000000000..ed04321ee83 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateUtils.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.joda.time.DateTimeZone; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class DateUtils { + public static DateTimeZone zoneIdToDateTimeZone(ZoneId zoneId) { + if (zoneId == null) { + return null; + } + if (zoneId instanceof ZoneOffset) { + // the id for zoneoffset is not ISO compatible, so cannot be read by ZoneId.of + return DateTimeZone.forOffsetMillis(((ZoneOffset)zoneId).getTotalSeconds() * 1000); + } + return DateTimeZone.forID(zoneId.getId()); + } + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(DateFormatters.class)); + // pkg private for tests + static final Map DEPRECATED_SHORT_TIMEZONES; + static { + Map tzs = new HashMap<>(); + tzs.put("EST", "-05:00"); // eastern time without daylight savings + tzs.put("HST", "-10:00"); + tzs.put("MST", "-07:00"); + tzs.put("ROC", "Asia/Taipei"); + tzs.put("Eire", "Europe/London"); + DEPRECATED_SHORT_TIMEZONES = Collections.unmodifiableMap(tzs); + } + + public static ZoneId dateTimeZoneToZoneId(DateTimeZone timeZone) { + if (timeZone == null) { + return null; + } + + String deprecatedId = DEPRECATED_SHORT_TIMEZONES.get(timeZone.getID()); + if (deprecatedId != null) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("timezone", + "Use of short timezone id " + timeZone.getID() + " is deprecated. Use " + deprecatedId + " instead"); + return ZoneId.of(deprecatedId); + } + return ZoneId.of(timeZone.getID()); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java new file mode 100644 index 00000000000..c3a59f52190 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/JavaDateMathParser.java @@ -0,0 +1,240 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.ElasticsearchParseException; + +import java.time.DateTimeException; +import java.time.DayOfWeek; +import java.time.Instant; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalAdjusters; +import java.time.temporal.TemporalField; +import java.time.temporal.TemporalQueries; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.function.LongSupplier; + +/** + * A parser for date/time formatted text with optional date math. + * + * The format of the datetime is configurable, and unix timestamps can also be used. Datemath + * is appended to a datetime with the following syntax: + * ||[+-/](\d+)?[yMwdhHms]. + */ +public class JavaDateMathParser implements DateMathParser { + + // base fields which should be used for default parsing, when we round up + private static final Map ROUND_UP_BASE_FIELDS = new HashMap<>(6); + { + ROUND_UP_BASE_FIELDS.put(ChronoField.MONTH_OF_YEAR, 1L); + ROUND_UP_BASE_FIELDS.put(ChronoField.DAY_OF_MONTH, 1L); + ROUND_UP_BASE_FIELDS.put(ChronoField.HOUR_OF_DAY, 23L); + ROUND_UP_BASE_FIELDS.put(ChronoField.MINUTE_OF_HOUR, 59L); + ROUND_UP_BASE_FIELDS.put(ChronoField.SECOND_OF_MINUTE, 59L); + ROUND_UP_BASE_FIELDS.put(ChronoField.MILLI_OF_SECOND, 999L); + } + + private final DateFormatter formatter; + private final DateFormatter roundUpFormatter; + + public JavaDateMathParser(DateFormatter formatter) { + Objects.requireNonNull(formatter); + this.formatter = formatter; + this.roundUpFormatter = formatter.parseDefaulting(ROUND_UP_BASE_FIELDS); + } + + @Override + public long parse(String text, LongSupplier now, boolean roundUp, ZoneId timeZone) { + long time; + String mathString; + if (text.startsWith("now")) { + try { + time = now.getAsLong(); + } catch (Exception e) { + throw new ElasticsearchParseException("could not read the current timestamp", e); + } + mathString = text.substring("now".length()); + } else { + int index = text.indexOf("||"); + if (index == -1) { + return parseDateTime(text, timeZone, roundUp); + } + time = parseDateTime(text.substring(0, index), timeZone, false); + mathString = text.substring(index + 2); + } + + return parseMath(mathString, time, roundUp, timeZone); + } + + private long parseMath(final String mathString, final long time, final boolean roundUp, + ZoneId timeZone) throws ElasticsearchParseException { + if (timeZone == null) { + timeZone = ZoneOffset.UTC; + } + ZonedDateTime dateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(time), timeZone); + for (int i = 0; i < mathString.length(); ) { + char c = mathString.charAt(i++); + final boolean round; + final int sign; + if (c == '/') { + round = true; + sign = 1; + } else { + round = false; + if (c == '+') { + sign = 1; + } else if (c == '-') { + sign = -1; + } else { + throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString); + } + } + + if (i >= mathString.length()) { + throw new ElasticsearchParseException("truncated date math [{}]", mathString); + } + + final int num; + if (!Character.isDigit(mathString.charAt(i))) { + num = 1; + } else { + int numFrom = i; + while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) { + i++; + } + if (i >= mathString.length()) { + throw new ElasticsearchParseException("truncated date math [{}]", mathString); + } + num = Integer.parseInt(mathString.substring(numFrom, i)); + } + if (round) { + if (num != 1) { + throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString); + } + } + char unit = mathString.charAt(i++); + switch (unit) { + case 'y': + if (round) { + dateTime = dateTime.withDayOfYear(1).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusYears(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusYears(1); + } + break; + case 'M': + if (round) { + dateTime = dateTime.withDayOfMonth(1).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusMonths(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusMonths(1); + } + break; + case 'w': + if (round) { + dateTime = dateTime.with(TemporalAdjusters.previousOrSame(DayOfWeek.MONDAY)).with(LocalTime.MIN); + } else { + dateTime = dateTime.plusWeeks(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusWeeks(1); + } + break; + case 'd': + if (round) { + dateTime = dateTime.with(LocalTime.MIN); + } else { + dateTime = dateTime.plusDays(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusDays(1); + } + break; + case 'h': + case 'H': + if (round) { + dateTime = dateTime.withMinute(0).withSecond(0).withNano(0); + } else { + dateTime = dateTime.plusHours(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusHours(1); + } + break; + case 'm': + if (round) { + dateTime = dateTime.withSecond(0).withNano(0); + } else { + dateTime = dateTime.plusMinutes(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusMinutes(1); + } + break; + case 's': + if (round) { + dateTime = dateTime.withNano(0); + } else { + dateTime = dateTime.plusSeconds(sign * num); + } + if (roundUp) { + dateTime = dateTime.plusSeconds(1); + } + break; + default: + throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString); + } + if (roundUp) { + dateTime = dateTime.minus(1, ChronoField.MILLI_OF_SECOND.getBaseUnit()); + } + } + return dateTime.toInstant().toEpochMilli(); + } + + private long parseDateTime(String value, ZoneId timeZone, boolean roundUpIfNoTime) { + DateFormatter formatter = roundUpIfNoTime ? this.roundUpFormatter : this.formatter; + try { + if (timeZone == null) { + return DateFormatters.toZonedDateTime(formatter.parse(value)).toInstant().toEpochMilli(); + } else { + TemporalAccessor accessor = formatter.parse(value); + ZoneId zoneId = TemporalQueries.zone().queryFrom(accessor); + if (zoneId != null) { + timeZone = zoneId; + } + + return DateFormatters.toZonedDateTime(accessor).withZoneSameLocal(timeZone).toInstant().toEpochMilli(); + } + } catch (IllegalArgumentException | DateTimeException e) { + throw new ElasticsearchParseException("failed to parse date field [{}]: [{}]", e, value, e.getMessage()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java b/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java index fd47fd0e86d..d3ca7a222e0 100644 --- a/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java +++ b/server/src/main/java/org/elasticsearch/discovery/AckClusterStatePublishResponseHandler.java @@ -19,9 +19,9 @@ package org.elasticsearch.discovery; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.util.Set; @@ -32,7 +32,7 @@ import java.util.Set; */ public class AckClusterStatePublishResponseHandler extends BlockingClusterStatePublishResponseHandler { - private static final Logger logger = ESLoggerFactory.getLogger(AckClusterStatePublishResponseHandler.class.getName()); + private static final Logger logger = LogManager.getLogger(AckClusterStatePublishResponseHandler.class); private final Discovery.AckListener ackListener; diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index f50f88456a5..c45517e9567 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -43,6 +43,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; +import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequest; @@ -110,6 +111,7 @@ public abstract class Engine implements Closeable { public static final String SYNC_COMMIT_ID = "sync_id"; public static final String HISTORY_UUID_KEY = "history_uuid"; public static final String MIN_RETAINED_SEQNO = "min_retained_seq_no"; + public static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; protected final ShardId shardId; protected final String allocationId; @@ -662,7 +664,15 @@ public abstract class Engine implements Closeable { } Releasable releasable = store::decRef; try { - EngineSearcher engineSearcher = new EngineSearcher(source, getReferenceManager(scope), store, logger); + ReferenceManager referenceManager = getReferenceManager(scope); + Searcher engineSearcher = new Searcher(source, referenceManager.acquire(), + s -> { + try { + referenceManager.release(s); + } finally { + store.decRef(); + } + }, logger); releasable = null; // success - hand over the reference to the engine searcher return engineSearcher; } catch (AlreadyClosedException ex) { @@ -1166,40 +1176,67 @@ public abstract class Engine implements Closeable { } public static class Searcher implements Releasable { - private final String source; private final IndexSearcher searcher; + private final AtomicBoolean released = new AtomicBoolean(false); + private final Logger logger; + private final IOUtils.IOConsumer onClose; - public Searcher(String source, IndexSearcher searcher) { + public Searcher(String source, IndexSearcher searcher, Logger logger) { + this(source, searcher, s -> s.getIndexReader().close(), logger); + } + + public Searcher(String source, IndexSearcher searcher, IOUtils.IOConsumer onClose, Logger logger) { this.source = source; this.searcher = searcher; + this.onClose = onClose; + this.logger = logger; } /** * The source that caused this searcher to be acquired. */ - public String source() { + public final String source() { return source; } - public IndexReader reader() { + public final IndexReader reader() { return searcher.getIndexReader(); } - public DirectoryReader getDirectoryReader() { + public final DirectoryReader getDirectoryReader() { if (reader() instanceof DirectoryReader) { return (DirectoryReader) reader(); } throw new IllegalStateException("Can't use " + reader().getClass() + " as a directory reader"); } - public IndexSearcher searcher() { + public final IndexSearcher searcher() { return searcher; } @Override public void close() { - // Nothing to close here + if (released.compareAndSet(false, true) == false) { + /* In general, searchers should never be released twice or this would break reference counting. There is one rare case + * when it might happen though: when the request and the Reaper thread would both try to release it in a very short amount + * of time, this is why we only log a warning instead of throwing an exception. + */ + logger.warn("Searcher was released twice", new IllegalStateException("Double release")); + return; + } + try { + onClose.accept(searcher()); + } catch (IOException e) { + throw new IllegalStateException("Cannot close", e); + } catch (AlreadyClosedException e) { + // This means there's a bug somewhere: don't suppress it + throw new AssertionError(e); + } + } + + public final Logger getLogger() { + return logger; } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java b/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java deleted file mode 100644 index 7fd0fe6cc39..00000000000 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineSearcher.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.engine; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.ReferenceManager; -import org.apache.lucene.store.AlreadyClosedException; -import org.elasticsearch.index.store.Store; - -import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; - -/** - * Searcher for an Engine - */ -final class EngineSearcher extends Engine.Searcher { - private final AtomicBoolean released = new AtomicBoolean(false); - private final Store store; - private final Logger logger; - private final ReferenceManager referenceManager; - - EngineSearcher(String source, ReferenceManager searcherReferenceManager, Store store, Logger logger) throws IOException { - super(source, searcherReferenceManager.acquire()); - this.store = store; - this.logger = logger; - this.referenceManager = searcherReferenceManager; - } - - @Override - public void close() { - if (!released.compareAndSet(false, true)) { - /* In general, searchers should never be released twice or this would break reference counting. There is one rare case - * when it might happen though: when the request and the Reaper thread would both try to release it in a very short amount - * of time, this is why we only log a warning instead of throwing an exception. - */ - logger.warn("Searcher was released twice", new IllegalStateException("Double release")); - return; - } - try { - referenceManager.release(searcher()); - } catch (IOException e) { - throw new IllegalStateException("Cannot close", e); - } catch (AlreadyClosedException e) { - // This means there's a bug somewhere: don't suppress it - throw new AssertionError(e); - } finally { - store.decRef(); - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 2f38562b7af..85e7cebcb88 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -47,6 +47,7 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.InfoStream; import org.elasticsearch.Assertions; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.SuppressForbidden; @@ -138,7 +139,6 @@ public class InternalEngine extends Engine { // incoming indexing ops to a single thread: private final AtomicInteger throttleRequestCount = new AtomicInteger(); private final AtomicBoolean pendingTranslogRecovery = new AtomicBoolean(false); - public static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; private final AtomicLong maxUnsafeAutoIdTimestamp = new AtomicLong(-1); private final AtomicLong maxSeenAutoIdTimestamp = new AtomicLong(-1); private final AtomicLong maxSeqNoOfNonAppendOnlyOperations = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); @@ -606,7 +606,7 @@ public class InternalEngine extends Engine { // in the case of a already pruned translog generation we might get null here - yet very unlikely TranslogLeafReader reader = new TranslogLeafReader((Translog.Index) operation, engineConfig .getIndexSettings().getIndexVersionCreated()); - return new GetResult(new Searcher("realtime_get", new IndexSearcher(reader)), + return new GetResult(new Searcher("realtime_get", new IndexSearcher(reader), logger), new VersionsAndSeqNoResolver.DocIdAndVersion(0, ((Translog.Index) operation).version(), reader, 0)); } } catch (IOException e) { @@ -976,6 +976,7 @@ public class InternalEngine extends Engine { if (plan.addStaleOpToLucene) { addStaleDocs(index.docs(), indexWriter); } else if (plan.useLuceneUpdateDocument) { + assert assertMaxSeqNoOfUpdatesIsAdvanced(index.uid(), plan.seqNoForIndexing, true, true); updateDocs(index.uid(), index.docs(), indexWriter); } else { // document does not exists, we can optimize for create, but double check if assertions are running @@ -1275,8 +1276,8 @@ public class InternalEngine extends Engine { return plan; } - private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) - throws IOException { + private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws IOException { + assert assertMaxSeqNoOfUpdatesIsAdvanced(delete.uid(), plan.seqNoOfDeletion, false, false); try { if (softDeleteEnabled) { final ParsedDocument tombstone = engineConfig.getTombstoneDocSupplier().newDeleteTombstoneDoc(delete.type(), delete.id()); @@ -2084,7 +2085,7 @@ public class InternalEngine extends Engine { if (warmer != null) { try { assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass(); - warmer.warm(new Searcher("top_reader_warming", searcher)); + warmer.warm(new Searcher("top_reader_warming", searcher, s -> {}, logger)); } catch (Exception e) { if (isEngineClosed.get() == false) { logger.warn("failed to prepare/warm", e); @@ -2556,6 +2557,29 @@ public class InternalEngine extends Engine { assert maxUnsafeAutoIdTimestamp.get() <= maxSeenAutoIdTimestamp.get(); } + private boolean assertMaxSeqNoOfUpdatesIsAdvanced(Term id, long seqNo, boolean allowDeleted, boolean relaxIfGapInSeqNo) { + final long maxSeqNoOfUpdates = getMaxSeqNoOfUpdatesOrDeletes(); + // If the primary is on an old version which does not replicate msu, we need to relax this assertion for that. + if (maxSeqNoOfUpdates == SequenceNumbers.UNASSIGNED_SEQ_NO) { + assert config().getIndexSettings().getIndexVersionCreated().before(Version.V_6_5_0); + return true; + } + // We treat a delete on the tombstones on replicas as a regular document, then use updateDocument (not addDocument). + if (allowDeleted) { + final VersionValue versionValue = versionMap.getVersionForAssert(id.bytes()); + if (versionValue != null && versionValue.isDelete()) { + return true; + } + } + // Operations can be processed on a replica in a different order than on the primary. If the order on the primary is index-1, + // delete-2, index-3, and the order on a replica is index-1, index-3, delete-2, then the msu of index-3 on the replica is 2 + // even though it is an update (overwrites index-1). We should relax this assertion if there is a pending gap in the seq_no. + if (relaxIfGapInSeqNo && getLocalCheckpoint() < maxSeqNoOfUpdates) { + return true; + } + assert seqNo <= maxSeqNoOfUpdates : "id=" + id + " seq_no=" + seqNo + " msu=" + maxSeqNoOfUpdates; + return true; + } @Override public void initializeMaxSeqNoOfUpdatesOrDeletes() { diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index 7848921b67e..26ef259a1e1 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.search.IndexSearcher; @@ -95,7 +96,7 @@ public final class ReadOnlyEngine extends Engine { this.lastCommittedSegmentInfos = Lucene.readSegmentInfos(directory); this.translogStats = translogStats == null ? new TranslogStats(0, 0, 0, 0, 0) : translogStats; this.seqNoStats = seqNoStats == null ? buildSeqNoStats(lastCommittedSegmentInfos) : seqNoStats; - reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), config.getShardId()); + reader = ElasticsearchDirectoryReader.wrap(open(directory), config.getShardId()); if (config.getIndexSettings().isSoftDeleteEnabled()) { reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD); } @@ -103,7 +104,7 @@ public final class ReadOnlyEngine extends Engine { this.indexCommit = reader.getIndexCommit(); this.searcherManager = new SearcherManager(reader, new RamAccountingSearcherFactory(engineConfig.getCircuitBreakerService())); - this.docsStats = docsStats(reader); + this.docsStats = docsStats(lastCommittedSegmentInfos); this.indexWriterLock = indexWriterLock; success = true; } finally { @@ -116,6 +117,28 @@ public final class ReadOnlyEngine extends Engine { } } + protected DirectoryReader open(final Directory directory) throws IOException { + return DirectoryReader.open(directory); + } + + private DocsStats docsStats(final SegmentInfos lastCommittedSegmentInfos) { + long numDocs = 0; + long numDeletedDocs = 0; + long sizeInBytes = 0; + if (lastCommittedSegmentInfos != null) { + for (SegmentCommitInfo segmentCommitInfo : lastCommittedSegmentInfos) { + numDocs += segmentCommitInfo.info.maxDoc() - segmentCommitInfo.getDelCount() - segmentCommitInfo.getSoftDelCount(); + numDeletedDocs += segmentCommitInfo.getDelCount() + segmentCommitInfo.getSoftDelCount(); + try { + sizeInBytes += segmentCommitInfo.sizeInBytes(); + } catch (IOException e) { + throw new UncheckedIOException("Failed to get size for [" + segmentCommitInfo.info.name + "]", e); + } + } + } + return new DocsStats(numDocs, numDeletedDocs, sizeInBytes); + } + @Override protected void closeNoLock(String reason, CountDownLatch closedLatch) { if (isClosed.compareAndSet(false, true)) { diff --git a/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java b/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java index aa9f5c8c6de..a67485d4f4a 100644 --- a/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java +++ b/server/src/main/java/org/elasticsearch/index/fieldvisitor/SingleFieldsVisitor.java @@ -19,42 +19,79 @@ package org.elasticsearch.index.fieldvisitor; import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.StoredFieldVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.TypeFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Uid; +import org.apache.lucene.util.BytesRef; -import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; -public class SingleFieldsVisitor extends FieldsVisitor { +/** + * {@linkplain StoredFieldVisitor} that loads a single field value. + */ +public final class SingleFieldsVisitor extends StoredFieldVisitor { + private final MappedFieldType field; + private final List destination; - private String field; - - public SingleFieldsVisitor(String field) { - super(false); + /** + * Build the field visitor; + * @param field the name of the field to load + * @param destination where to put the field's values + */ + public SingleFieldsVisitor(MappedFieldType field, List destination) { this.field = field; + this.destination = destination; } @Override - public Status needsField(FieldInfo fieldInfo) throws IOException { - if (fieldInfo.name.equals(field)) { + public Status needsField(FieldInfo fieldInfo) { + if (fieldInfo.name.equals(field.name())) { return Status.YES; } + /* + * We can't return Status.STOP here because we could be loading + * multi-valued fields. + */ return Status.NO; } - public void reset(String field) { - this.field = field; - super.reset(); + private void addValue(Object value) { + destination.add(field.valueForDisplay(value)); } @Override - public void postProcess(MapperService mapperService) { - super.postProcess(mapperService); - if (id != null) { - addValue(IdFieldMapper.NAME, id); - } - if (type != null) { - addValue(TypeFieldMapper.NAME, type); + public void binaryField(FieldInfo fieldInfo, byte[] value) { + if (IdFieldMapper.NAME.equals(fieldInfo.name)) { + addValue(Uid.decodeId(value)); + } else { + addValue(new BytesRef(value)); } } + + @Override + public void stringField(FieldInfo fieldInfo, byte[] bytes) { + addValue(new String(bytes, StandardCharsets.UTF_8)); + } + + @Override + public void intField(FieldInfo fieldInfo, int value) { + addValue(value); + } + + @Override + public void longField(FieldInfo fieldInfo, long value) { + addValue(value); + } + + @Override + public void floatField(FieldInfo fieldInfo, float value) { + addValue(value); + } + + @Override + public void doubleField(FieldInfo fieldInfo, double value) { + addValue(value); + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index cb44e777f87..9e0b9f62acb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -30,8 +30,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -55,7 +53,6 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseField; * A field mapper for boolean fields. */ public class BooleanFieldMapper extends FieldMapper { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(Loggers.getLogger(BooleanFieldMapper.class)); public static final String CONTENT_TYPE = "boolean"; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index c8360e468d7..0de2731ffd1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -36,10 +36,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -231,7 +232,7 @@ public class DateFieldMapper extends FieldMapper { public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { checkIfFrozen(); this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter); + this.dateMathParser = dateTimeFormatter.toDateMathParser(); } protected DateMathParser dateMathParser() { @@ -262,7 +263,7 @@ public class DateFieldMapper extends FieldMapper { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { + @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { failIfNotIndexed(); if (relation == ShapeRelation.DISJOINT) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + @@ -296,8 +297,8 @@ public class DateFieldMapper extends FieldMapper { return query; } - public long parseToMilliseconds(Object value, boolean roundUp, - @Nullable DateTimeZone zone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { + public long parseToMilliseconds(Object value, boolean roundUp, @Nullable DateTimeZone zone, + @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { DateMathParser dateParser = dateMathParser(); if (forcedDateParser != null) { dateParser = forcedDateParser; @@ -309,13 +310,13 @@ public class DateFieldMapper extends FieldMapper { } else { strValue = value.toString(); } - return dateParser.parse(strValue, context::nowInMillis, roundUp, zone); + return dateParser.parse(strValue, context::nowInMillis, roundUp, DateUtils.dateTimeZoneToZoneId(zone)); } @Override - public Relation isFieldWithinQuery(IndexReader reader, - Object from, Object to, boolean includeLower, boolean includeUpper, - DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { + public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, + DateTimeZone timeZone, DateMathParser dateParser, + QueryRewriteContext context) throws IOException { if (dateParser == null) { dateParser = this.dateMathParser; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 350dc27c615..587e9abd50b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -306,12 +306,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper // its an array of other possible values if (token == XContentParser.Token.VALUE_NUMBER) { double lon = context.parser().doubleValue(); - token = context.parser().nextToken(); + context.parser().nextToken(); double lat = context.parser().doubleValue(); token = context.parser().nextToken(); - Double alt = Double.NaN; if (token == XContentParser.Token.VALUE_NUMBER) { - alt = GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue()); + GeoPoint.assertZValue(ignoreZValue.value(), context.parser().doubleValue()); } else if (token != XContentParser.Token.END_ARRAY) { throw new ElasticsearchParseException("[{}] field type does not accept > 3 dimensions", CONTENT_TYPE); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index bb048ab9afa..456805e6416 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; @@ -150,8 +151,19 @@ public class IndexFieldMapper extends MetadataFieldMapper { + " vs. " + values); } + @Override + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) { + return Queries.newMatchAllQuery(); + } else { + return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value); + } + } + private boolean isSameIndex(Object value, String indexName) { - String pattern = value instanceof BytesRef ? pattern = ((BytesRef) value).utf8ToString() : value.toString(); + String pattern = value instanceof BytesRef ? ((BytesRef) value).utf8ToString() : value.toString(); return Regex.simpleMatch(pattern, indexName); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 82a601de05e..45bb5ed395d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -38,7 +38,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -345,6 +345,12 @@ public abstract class MappedFieldType extends FieldType { throw new QueryShardException(context, "Can only use prefix queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } + public Query wildcardQuery(String value, + @Nullable MultiTermQuery.RewriteMethod method, + QueryShardContext context) { + throw new QueryShardException(context, "Can only use wildcard queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); + } + public Query regexpQuery(String value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, QueryShardContext context) { throw new QueryShardException(context, "Can only use regexp queries on keyword and text fields - not on [" + name + "] which is of type [" + typeName() + "]"); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java index 4c356c3a559..0deb6e8afa0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -44,11 +44,12 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -60,6 +61,7 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; +import java.time.ZoneId; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; @@ -257,7 +259,7 @@ public class RangeFieldMapper extends FieldMapper { public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { checkIfFrozen(); this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter); + this.dateMathParser = dateTimeFormatter.toDateMathParser(); } protected DateMathParser dateMathParser() { @@ -587,15 +589,16 @@ public class RangeFieldMapper extends FieldMapper { public Query rangeQuery(String field, boolean hasDocValues, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser parser, QueryShardContext context) { - DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone; + DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone; + ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(zone); DateMathParser dateMathParser = (parser == null) ? - new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER) : parser; + DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser() : parser; Long low = lowerTerm == null ? Long.MIN_VALUE : dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(), - context::nowInMillis, false, zone); + context::nowInMillis, false, zoneId); Long high = upperTerm == null ? Long.MAX_VALUE : dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(), - context::nowInMillis, false, zone); + context::nowInMillis, false, zoneId); return super.rangeQuery(field, hasDocValues, low, high, includeLower, includeUpper, relation, zone, dateMathParser, context); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java index b91be82cd6b..3d3b1607870 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SimpleMappedFieldType.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.query.QueryShardContext; import org.joda.time.DateTimeZone; @@ -40,7 +40,7 @@ public abstract class SimpleMappedFieldType extends MappedFieldType { @Override public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { + ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { if (relation == ShapeRelation.DISJOINT) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support DISJOINT ranges"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 37834b93a1e..cde8e392dab 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -19,20 +19,24 @@ package org.elasticsearch.index.mapper; -import java.util.List; - import org.apache.lucene.index.Term; -import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.support.QueryParsers; + +import java.util.List; /** Base class for {@link MappedFieldType} implementations that use the same * representation for internal index terms as the external representation so @@ -74,6 +78,19 @@ public abstract class StringFieldType extends TermBasedFieldType { return query; } + @Override + public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, QueryShardContext context) { + Query termQuery = termQuery(value, context); + if (termQuery instanceof MatchNoDocsQuery || termQuery instanceof MatchAllDocsQuery) { + return termQuery; + } + Term term = MappedFieldType.extractTerm(termQuery); + + WildcardQuery query = new WildcardQuery(term); + QueryParsers.setRewriteMethod(query, method); + return query; + } + @Override public Query regexpQuery(String value, int flags, int maxDeterminizedStates, MultiTermQuery.RewriteMethod method, QueryShardContext context) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index f7bcab21d72..7851bb1655a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; -import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.AnalyzerWrapper; import org.apache.lucene.analysis.CachingTokenFilter; @@ -42,7 +41,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -64,8 +62,6 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; /** A {@link FieldMapper} for full-text fields. */ public class TextFieldMapper extends FieldMapper { - private static final Logger logger = ESLoggerFactory.getLogger(TextFieldMapper.class); - public static final String CONTENT_TYPE = "text"; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; @@ -481,6 +477,7 @@ public class TextFieldMapper extends FieldMapper { } } + @Override public TextFieldType clone() { return new TextFieldType(this); } diff --git a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java index 379f583ef60..93528bb9520 100644 --- a/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/FuzzyQueryBuilder.java @@ -181,6 +181,7 @@ public class FuzzyQueryBuilder extends AbstractQueryBuilder i out.writeOptionalString(this.rewrite); } + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java index be9abfc5e44..ee9fa5b114b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiTermQueryBuilder.java @@ -19,5 +19,8 @@ package org.elasticsearch.index.query; public interface MultiTermQueryBuilder extends QueryBuilder { - + /** + * Get the field name for this query. + */ + String fieldName(); } diff --git a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java index c1cd99d712a..eacb2be100c 100644 --- a/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/PrefixQueryBuilder.java @@ -87,6 +87,7 @@ public class PrefixQueryBuilder extends AbstractQueryBuilder out.writeOptionalString(rewrite); } + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index b297036f2f3..756c6456a9f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -29,10 +29,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.lucene.BytesRefs; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; @@ -146,6 +146,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i /** * Get the field name for this query. */ + @Override public String fieldName() { return this.fieldName; } @@ -302,7 +303,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i DateMathParser getForceDateParser() { // pkg private for testing if (this.format != null) { - return new DateMathParser(this.format); + return this.format.toDateMathParser(); } return null; } diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index 39d7c1e2cf0..472c1014874 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -104,6 +104,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder } /** Returns the field name used in this query. */ + @Override public String fieldName() { return this.fieldName; } diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 6ea068176b4..22fca7d1d0b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -21,9 +21,11 @@ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.TermStates; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; @@ -190,9 +192,14 @@ public class SpanMultiTermQueryBuilder extends AbstractQueryBuilder clauses = new ArrayList<>(); + int[] articulationPoints = graph.articulationPoints(); + int lastState = 0; + int maxBooleanClause = BooleanQuery.getMaxClauseCount(); + for (int i = 0; i <= articulationPoints.length; i++) { + int start = lastState; + int end = -1; + if (i < articulationPoints.length) { + end = articulationPoints[i]; + } + lastState = end; + final SpanQuery queryPos; + if (graph.hasSidePath(start)) { + List queries = new ArrayList<>(); + Iterator it = graph.getFiniteStrings(start, end); + while (it.hasNext()) { + TokenStream ts = it.next(); + SpanQuery q = createSpanQuery(ts, field); + if (q != null) { + if (queries.size() >= maxBooleanClause) { + throw new BooleanQuery.TooManyClauses(); + } + queries.add(q); + } + } + if (queries.size() > 0) { + queryPos = new SpanOrQuery(queries.toArray(new SpanQuery[0])); + } else { + queryPos = null; + } + } else { + Term[] terms = graph.getTerms(field, start); + assert terms.length > 0; + if (terms.length >= maxBooleanClause) { + throw new BooleanQuery.TooManyClauses(); + } + if (terms.length == 1) { + queryPos = new SpanTermQuery(terms[0]); + } else { + SpanTermQuery[] orClauses = new SpanTermQuery[terms.length]; + for (int idx = 0; idx < terms.length; idx++) { + orClauses[idx] = new SpanTermQuery(terms[idx]); + } + + queryPos = new SpanOrQuery(orClauses); + } + } + + if (queryPos != null) { + if (clauses.size() >= maxBooleanClause) { + throw new BooleanQuery.TooManyClauses(); + } + clauses.add(queryPos); + } + } + + if (clauses.isEmpty()) { + return null; + } else if (clauses.size() == 1) { + return clauses.get(0); + } else { + return new SpanNearQuery(clauses.toArray(new SpanQuery[0]), phraseSlop, true); + } + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java index a6949c05597..bc62f4067b9 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -24,8 +24,8 @@ import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.IndexSearcher; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.engine.Engine; import java.io.IOException; @@ -97,21 +97,10 @@ public class IndexSearcherWrapper { if (reader == nonClosingReaderWrapper && indexSearcher == innerIndexSearcher) { return engineSearcher; } else { - return new Engine.Searcher(engineSearcher.source(), indexSearcher) { - @Override - public void close() throws ElasticsearchException { - try { - reader().close(); - // we close the reader to make sure wrappers can release resources if needed.... - // our NonClosingReaderWrapper makes sure that our reader is not closed - } catch (IOException e) { - throw new ElasticsearchException("failed to close reader", e); - } finally { - engineSearcher.close(); - } - - } - }; + // we close the reader to make sure wrappers can release resources if needed.... + // our NonClosingReaderWrapper makes sure that our reader is not closed + return new Engine.Searcher(engineSearcher.source(), indexSearcher, s -> IOUtils.close(s.getIndexReader(), engineSearcher), + engineSearcher.getLogger()); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index f5f8d70925f..11d8f44bef1 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -79,7 +79,6 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.RefreshFailedEngineException; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.engine.SegmentsStats; @@ -502,6 +501,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * the reverted operations on this shard by replaying the translog to avoid losing acknowledged writes. */ final Engine engine = getEngine(); + if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { + // If the old primary was on an old version that did not replicate the msu, + // we need to bootstrap it manually from its local history. + assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); + engine.advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats().getMaxSeqNo()); + } engine.restoreLocalHistoryFromTranslog((resettingEngine, snapshot) -> runTranslogRecovery(resettingEngine, snapshot, Engine.Operation.Origin.LOCAL_RESET, () -> {})); /* Rolling the translog generation is not strictly needed here (as we will never have collisions between @@ -511,12 +516,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl */ engine.rollTranslogGeneration(); engine.fillSeqNoGaps(newPrimaryTerm); - if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { - // TODO: Enable this assertion after we replicate max_seq_no_updates during replication - // assert indexSettings.getIndexVersionCreated().before(Version.V_7_0_0_alpha1) : - // indexSettings.getIndexVersionCreated(); - engine.initializeMaxSeqNoOfUpdatesOrDeletes(); - } replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(), getLocalCheckpoint()); primaryReplicaSyncer.accept(this, new ActionListener() { @Override @@ -1399,8 +1398,8 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl private boolean assertMaxUnsafeAutoIdInCommit() throws IOException { final Map userData = SegmentInfos.readLatestCommit(store.directory()).getUserData(); - assert userData.containsKey(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID) : - "opening index which was created post 5.5.0 but " + InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID + assert userData.containsKey(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID) : + "opening index which was created post 5.5.0 but " + Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID + " is not found in commit"; return true; } @@ -1955,12 +1954,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { replicationTracker.activateWithPrimaryContext(primaryContext); // make changes to primaryMode flag only under mutex - // If the old primary was on an old version, this primary (was replica before) - // does not have max_of_updates yet. Thus we need to bootstrap it manually. if (getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO) { - // TODO: Enable this assertion after we replicate max_seq_no_updates during replication - // assert indexSettings.getIndexVersionCreated().before(Version.V_7_0_0_alpha1) : indexSettings.getIndexVersionCreated(); - getEngine().initializeMaxSeqNoOfUpdatesOrDeletes(); + // If the old primary was on an old version that did not replicate the msu, + // we need to bootstrap it manually from its local history. + assert indexSettings.getIndexVersionCreated().before(Version.V_6_5_0); + getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNoStats().getMaxSeqNo()); } } } @@ -2316,15 +2314,17 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * {@link IllegalStateException}. If permit acquisition is delayed, the listener will be invoked on the executor with the specified * name. * - * @param opPrimaryTerm the operation primary term - * @param globalCheckpoint the global checkpoint associated with the request - * @param onPermitAcquired the listener for permit acquisition - * @param executorOnDelay the name of the executor to invoke the listener on if permit acquisition is delayed - * @param debugInfo an extra information that can be useful when tracing an unreleased permit. When assertions are enabled - * the tracing will capture the supplied object's {@link Object#toString()} value. Otherwise the object - * isn't used + * @param opPrimaryTerm the operation primary term + * @param globalCheckpoint the global checkpoint associated with the request + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates (index operations overwrite Lucene) or deletes captured on the primary + * after this replication request was executed on it (see {@link #getMaxSeqNoOfUpdatesOrDeletes()} + * @param onPermitAcquired the listener for permit acquisition + * @param executorOnDelay the name of the executor to invoke the listener on if permit acquisition is delayed + * @param debugInfo an extra information that can be useful when tracing an unreleased permit. When assertions are + * enabled the tracing will capture the supplied object's {@link Object#toString()} value. + * Otherwise the object isn't used */ - public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long globalCheckpoint, + public void acquireReplicaOperationPermit(final long opPrimaryTerm, final long globalCheckpoint, final long maxSeqNoOfUpdatesOrDeletes, final ActionListener onPermitAcquired, final String executorOnDelay, final Object debugInfo) { verifyNotClosed(); @@ -2378,6 +2378,7 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl assert assertReplicationTarget(); try { updateGlobalCheckpointOnReplica(globalCheckpoint, "operation"); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes); } catch (Exception e) { releasable.close(); onPermitAcquired.onFailure(e); @@ -2729,12 +2730,11 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl newEngine = createNewEngine(newEngineConfig()); active.set(true); } + newEngine.advanceMaxSeqNoOfUpdatesOrDeletes(globalCheckpoint); final Engine.TranslogRecoveryRunner translogRunner = (engine, snapshot) -> runTranslogRecovery( engine, snapshot, Engine.Operation.Origin.LOCAL_RESET, () -> { // TODO: add a dedicate recovery stats for the reset translog }); - // TODO: do not use init method here but use advance with the max_seq_no received from the primary - newEngine.initializeMaxSeqNoOfUpdatesOrDeletes(); newEngine.recoverFromTranslog(translogRunner, globalCheckpoint); } @@ -2763,10 +2763,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * These transfers guarantee that every index/delete operation when executing on a replica engine will observe this marker a value * which is at least the value of the max_seq_no_of_updates marker on the primary after that operation was executed on the primary. * - * @see #acquireReplicaOperationPermit(long, long, ActionListener, String, Object) - * @see org.elasticsearch.indices.recovery.RecoveryTarget#indexTranslogOperations(List, int, long) + * @see #acquireReplicaOperationPermit(long, long, long, ActionListener, String, Object) + * @see org.elasticsearch.indices.recovery.RecoveryTarget#indexTranslogOperations(List, int, long, long) */ public void advanceMaxSeqNoOfUpdatesOrDeletes(long seqNo) { + assert seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO + || getMaxSeqNoOfUpdatesOrDeletes() == SequenceNumbers.UNASSIGNED_SEQ_NO : + "replica has max_seq_no_of_updates=" + getMaxSeqNoOfUpdatesOrDeletes() + " but primary does not"; getEngine().advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); assert seqNo <= getMaxSeqNoOfUpdatesOrDeletes() : getMaxSeqNoOfUpdatesOrDeletes() + " < " + seqNo; } diff --git a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java index 09391c9bc96..cc9ac40c274 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java @@ -28,7 +28,6 @@ import org.apache.lucene.store.NoLockFactory; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.store.Store; import java.io.Closeable; @@ -66,7 +65,7 @@ final class LocalShardSnapshot implements Closeable { } long maxUnsafeAutoIdTimestamp() { - return Long.parseLong(shard.getEngine().commitStats().getUserData().get(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID)); + return Long.parseLong(shard.getEngine().commitStats().getUserData().get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID)); } Directory getSnapshotDirectory() { diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index c4b971e470d..4d8b63fc719 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -40,8 +40,8 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException; @@ -181,7 +181,7 @@ final class StoreRecovery { final HashMap liveCommitData = new HashMap<>(3); liveCommitData.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(maxSeqNo)); liveCommitData.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(maxSeqNo)); - liveCommitData.put(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp)); + liveCommitData.put(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp)); return liveCommitData.entrySet().iterator(); }); writer.commit(); diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index 06a476e64ec..552ef3c4aae 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -206,7 +206,7 @@ public final class SimilarityService extends AbstractIndexComponent { CollectionStatistics collectionStats = new CollectionStatistics("some_field", 1200, 1100, 3000, 2000); TermStatistics termStats = new TermStatistics(new BytesRef("some_value"), 100, 130); SimScorer scorer = similarity.scorer(2f, collectionStats, termStats); - FieldInvertState state = new FieldInvertState(indexCreatedVersion.major, "some_field", + FieldInvertState state = new FieldInvertState(indexCreatedVersion.luceneVersion.major, "some_field", IndexOptions.DOCS_AND_FREQS, 20, 20, 0, 50, 10, 3); // length = 20, no overlap final long norm = similarity.computeNorm(state); for (int freq = 1; freq <= 10; ++freq) { @@ -214,6 +214,7 @@ public final class SimilarityService extends AbstractIndexComponent { if (score < 0) { fail(indexCreatedVersion, "Similarities should not return negative scores:\n" + scorer.explain(Explanation.match(freq, "term freq"), norm)); + break; } } } @@ -222,7 +223,7 @@ public final class SimilarityService extends AbstractIndexComponent { CollectionStatistics collectionStats = new CollectionStatistics("some_field", 1200, 1100, 3000, 2000); TermStatistics termStats = new TermStatistics(new BytesRef("some_value"), 100, 130); SimScorer scorer = similarity.scorer(2f, collectionStats, termStats); - FieldInvertState state = new FieldInvertState(indexCreatedVersion.major, "some_field", + FieldInvertState state = new FieldInvertState(indexCreatedVersion.luceneVersion.major, "some_field", IndexOptions.DOCS_AND_FREQS, 20, 20, 0, 50, 10, 3); // length = 20, no overlap final long norm = similarity.computeNorm(state); float previousScore = 0; @@ -232,6 +233,7 @@ public final class SimilarityService extends AbstractIndexComponent { fail(indexCreatedVersion, "Similarity scores should not decrease when term frequency increases:\n" + scorer.explain(Explanation.match(freq - 1, "term freq"), norm) + "\n" + scorer.explain(Explanation.match(freq, "term freq"), norm)); + break; } previousScore = score; } @@ -245,7 +247,7 @@ public final class SimilarityService extends AbstractIndexComponent { long previousNorm = 0; float previousScore = Float.MAX_VALUE; for (int length = 1; length <= 10; ++length) { - FieldInvertState state = new FieldInvertState(indexCreatedVersion.major, "some_field", + FieldInvertState state = new FieldInvertState(indexCreatedVersion.luceneVersion.major, "some_field", IndexOptions.DOCS_AND_FREQS, length, length, 0, 50, 10, 3); // length = 20, no overlap final long norm = similarity.computeNorm(state); if (Long.compareUnsigned(previousNorm, norm) > 0) { @@ -257,6 +259,7 @@ public final class SimilarityService extends AbstractIndexComponent { fail(indexCreatedVersion, "Similarity scores should not increase when norm increases:\n" + scorer.explain(Explanation.match(1, "term freq"), norm - 1) + "\n" + scorer.explain(Explanation.match(1, "term freq"), norm)); + break; } previousScore = score; previousNorm = norm; diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index ddd642a2f0f..13499dfd60c 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -75,7 +75,6 @@ import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.CombinedDeletionPolicy; import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; @@ -1425,7 +1424,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref map.put(Engine.HISTORY_UUID_KEY, UUIDs.randomBase64UUID()); map.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(SequenceNumbers.NO_OPS_PERFORMED)); map.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(SequenceNumbers.NO_OPS_PERFORMED)); - map.put(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, "-1"); + map.put(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, "-1"); updateCommitData(writer, map); } finally { metadataLock.writeLock().unlock(); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index ba88e30727d..f60994a4bce 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -456,7 +456,7 @@ public class PeerRecoveryTargetService extends AbstractComponent implements Inde final RecoveryTarget recoveryTarget = recoveryRef.target(); try { recoveryTarget.indexTranslogOperations(request.operations(), request.totalTranslogOps(), - request.maxSeenAutoIdTimestampOnPrimary()); + request.maxSeenAutoIdTimestampOnPrimary(), request.maxSeqNoOfUpdatesOrDeletesOnPrimary()); channel.sendResponse(new RecoveryTranslogOperationsResponse(recoveryTarget.indexShard().getLocalCheckpoint())); } catch (MapperException exception) { // in very rare cases a translog replay from primary is processed before a mapping update on this node diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 20e6d857873..46f98275740 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -215,10 +215,12 @@ public class RecoverySourceHandler { } final long targetLocalCheckpoint; try (Translog.Snapshot snapshot = shard.getHistoryOperations("peer-recovery", startingSeqNo)) { - // We have to capture the max auto_id_timestamp after taking a snapshot of operations to guarantee - // that the auto_id_timestamp of every operation in the snapshot is at most this timestamp value. + // we have to capture the max_seen_auto_id_timestamp and the max_seq_no_of_updates to make sure that these values + // are at least as high as the corresponding values on the primary when any of these operations were executed on it. final long maxSeenAutoIdTimestamp = shard.getMaxSeenAutoIdTimestamp(); - targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp); + final long maxSeqNoOfUpdatesOrDeletes = shard.getMaxSeqNoOfUpdatesOrDeletes(); + targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, + maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); } catch (Exception e) { throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e); } @@ -445,16 +447,17 @@ public class RecoverySourceHandler { * point-in-time view of the translog). It then sends each translog operation to the target node so it can be replayed into the new * shard. * - * @param startingSeqNo the sequence number to start recovery from, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if all - * ops should be sent - * @param requiredSeqNoRangeStart the lower sequence number of the required range (ending with endingSeqNo) - * @param endingSeqNo the highest sequence number that should be sent - * @param snapshot a snapshot of the translog - * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary + * @param startingSeqNo the sequence number to start recovery from, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if all + * ops should be sent + * @param requiredSeqNoRangeStart the lower sequence number of the required range (ending with endingSeqNo) + * @param endingSeqNo the highest sequence number that should be sent + * @param snapshot a snapshot of the translog + * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. * @return the local checkpoint on the target */ long phase2(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, final Translog.Snapshot snapshot, - final long maxSeenAutoIdTimestamp) + final long maxSeenAutoIdTimestamp, final long maxSeqNoOfUpdatesOrDeletes) throws IOException { if (shard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(request.shardId()); @@ -468,7 +471,7 @@ public class RecoverySourceHandler { // send all the snapshot's translog operations to the target final SendSnapshotResult result = sendSnapshot( - startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp); + startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); stopWatch.stop(); logger.trace("recovery [phase2]: took [{}]", stopWatch.totalTime()); @@ -531,16 +534,18 @@ public class RecoverySourceHandler { *

* Operations are bulked into a single request depending on an operation count limit or size-in-bytes limit. * - * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent - * @param requiredSeqNoRangeStart the lower sequence number of the required range - * @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive) - * @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the - * total number of operations sent - * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary + * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent + * @param requiredSeqNoRangeStart the lower sequence number of the required range + * @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive) + * @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the + * total number of operations sent + * @param maxSeenAutoIdTimestamp the max auto_id_timestamp of append-only requests on the primary + * @param maxSeqNoOfUpdatesOrDeletes the max seq_no of updates or deletes on the primary after these operations were executed on it. * @throws IOException if an I/O exception occurred reading the translog snapshot */ protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, - final Translog.Snapshot snapshot, final long maxSeenAutoIdTimestamp) throws IOException { + final Translog.Snapshot snapshot, final long maxSeenAutoIdTimestamp, + final long maxSeqNoOfUpdatesOrDeletes) throws IOException { assert requiredSeqNoRangeStart <= endingSeqNo + 1: "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; assert startingSeqNo <= requiredSeqNoRangeStart : @@ -558,8 +563,11 @@ public class RecoverySourceHandler { logger.trace("no translog operations to send"); } - final CancellableThreads.IOInterruptable sendBatch = () -> - targetLocalCheckpoint.set(recoveryTarget.indexTranslogOperations(operations, expectedTotalOps, maxSeenAutoIdTimestamp)); + final CancellableThreads.IOInterruptable sendBatch = () -> { + final long targetCheckpoint = recoveryTarget.indexTranslogOperations( + operations, expectedTotalOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); + targetLocalCheckpoint.set(targetCheckpoint); + }; // send operations in batches Translog.Operation operation; diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java index e2f21fe8edd..3a3a78941b1 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTarget.java @@ -386,8 +386,8 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps, - long maxSeenAutoIdTimestampOnPrimary) throws IOException { + public long indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary, + long maxSeqNoOfDeletesOrUpdatesOnPrimary) throws IOException { final RecoveryState.Translog translog = state().getTranslog(); translog.totalOperations(totalTranslogOps); assert indexShard().recoveryState() == state(); @@ -401,6 +401,11 @@ public class RecoveryTarget extends AbstractRefCounted implements RecoveryTarget * replay these operations first (without timestamp), then optimize append-only requests (with timestamp). */ indexShard().updateMaxUnsafeAutoIdTimestamp(maxSeenAutoIdTimestampOnPrimary); + /* + * Bootstrap the max_seq_no_of_updates from the primary to make sure that the max_seq_no_of_updates on this replica when + * replaying any of these operations will be at least the max_seq_no_of_updates on the primary when that operation was executed on. + */ + indexShard().advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfDeletesOrUpdatesOnPrimary); for (Translog.Operation operation : operations) { Engine.Result result = indexShard().applyTranslogOperation(operation, Engine.Operation.Origin.PEER_RECOVERY); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java index 53220c58609..b7c3de97b4e 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTargetHandler.java @@ -59,13 +59,17 @@ public interface RecoveryTargetHandler { /** * Index a set of translog operations on the target - * @param operations operations to index - * @param totalTranslogOps current number of total operations expected to be indexed - * @param maxSeenAutoIdTimestampOnPrimary the maximum auto_id_timestamp of all append-only requests processed by the primary shard + * + * @param operations operations to index + * @param totalTranslogOps current number of total operations expected to be indexed + * @param maxSeenAutoIdTimestampOnPrimary the maximum auto_id_timestamp of all append-only requests processed by the primary shard + * @param maxSeqNoOfUpdatesOrDeletesOnPrimary the max seq_no of update operations (index operations overwrite Lucene) or delete ops on + * the primary shard when capturing these operations. This value is at least as high as the + * max_seq_no_of_updates on the primary was when any of these ops were processed on it. * @return the local checkpoint on the target shard */ long indexTranslogOperations(List operations, int totalTranslogOps, - long maxSeenAutoIdTimestampOnPrimary) throws IOException; + long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfUpdatesOrDeletesOnPrimary) throws IOException; /** * Notifies the target of the files it is going to receive diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java index ae74673d30d..0ae5d507eb3 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryTranslogOperationsRequest.java @@ -23,6 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.transport.TransportRequest; @@ -37,17 +38,19 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { private List operations; private int totalTranslogOps = RecoveryState.Translog.UNKNOWN; private long maxSeenAutoIdTimestampOnPrimary; + private long maxSeqNoOfUpdatesOrDeletesOnPrimary; public RecoveryTranslogOperationsRequest() { } - RecoveryTranslogOperationsRequest(long recoveryId, ShardId shardId, List operations, - int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary) { + RecoveryTranslogOperationsRequest(long recoveryId, ShardId shardId, List operations, int totalTranslogOps, + long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfUpdatesOrDeletesOnPrimary) { this.recoveryId = recoveryId; this.shardId = shardId; this.operations = operations; this.totalTranslogOps = totalTranslogOps; this.maxSeenAutoIdTimestampOnPrimary = maxSeenAutoIdTimestampOnPrimary; + this.maxSeqNoOfUpdatesOrDeletesOnPrimary = maxSeqNoOfUpdatesOrDeletesOnPrimary; } public long recoveryId() { @@ -70,6 +73,10 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { return maxSeenAutoIdTimestampOnPrimary; } + public long maxSeqNoOfUpdatesOrDeletesOnPrimary() { + return maxSeqNoOfUpdatesOrDeletesOnPrimary; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -82,6 +89,12 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { } else { maxSeenAutoIdTimestampOnPrimary = IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP; } + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { + maxSeqNoOfUpdatesOrDeletesOnPrimary = in.readZLong(); + } else { + // UNASSIGNED_SEQ_NO means uninitialized and replica won't enable optimization using seq_no + maxSeqNoOfUpdatesOrDeletesOnPrimary = SequenceNumbers.UNASSIGNED_SEQ_NO; + } } @Override @@ -94,5 +107,8 @@ public class RecoveryTranslogOperationsRequest extends TransportRequest { if (out.getVersion().onOrAfter(Version.V_6_5_0)) { out.writeZLong(maxSeenAutoIdTimestampOnPrimary); } + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { + out.writeZLong(maxSeqNoOfUpdatesOrDeletesOnPrimary); + } } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java index 3a7f28e8eb7..b37fefee7da 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RemoteRecoveryTargetHandler.java @@ -110,9 +110,10 @@ public class RemoteRecoveryTargetHandler implements RecoveryTargetHandler { } @Override - public long indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestampOnPrimary) { - final RecoveryTranslogOperationsRequest translogOperationsRequest = - new RecoveryTranslogOperationsRequest(recoveryId, shardId, operations, totalTranslogOps, maxSeenAutoIdTimestampOnPrimary); + public long indexTranslogOperations(List operations, int totalTranslogOps, + long maxSeenAutoIdTimestampOnPrimary, long maxSeqNoOfDeletesOrUpdatesOnPrimary) { + final RecoveryTranslogOperationsRequest translogOperationsRequest = new RecoveryTranslogOperationsRequest( + recoveryId, shardId, operations, totalTranslogOps, maxSeenAutoIdTimestampOnPrimary, maxSeqNoOfDeletesOrUpdatesOnPrimary); final TransportFuture future = transportService.submitRequest( targetNode, PeerRecoveryTargetService.Actions.TRANSLOG_OPS, diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DropProcessor.java b/server/src/main/java/org/elasticsearch/ingest/DropProcessor.java similarity index 90% rename from modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DropProcessor.java rename to server/src/main/java/org/elasticsearch/ingest/DropProcessor.java index a0eabe38979..d4ccf7ad635 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DropProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/DropProcessor.java @@ -17,12 +17,9 @@ * under the License. */ -package org.elasticsearch.ingest.common; +package org.elasticsearch.ingest; import java.util.Map; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; /** * Drop processor only returns {@code null} for the execution result to indicate that any document diff --git a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java index f8575b4a012..d6ed68bcafa 100644 --- a/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java +++ b/server/src/main/java/org/elasticsearch/rest/BytesRestResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.rest; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ElasticsearchException; @@ -27,7 +28,6 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -117,7 +117,7 @@ public class BytesRestResponse extends RestResponse { return this.status; } - private static final Logger SUPPRESSED_ERROR_LOGGER = ESLoggerFactory.getLogger("rest.suppressed"); + private static final Logger SUPPRESSED_ERROR_LOGGER = LogManager.getLogger("rest.suppressed"); private static XContentBuilder build(RestChannel channel, RestStatus status, Exception e) throws IOException { ToXContent.Params params = channel.request(); diff --git a/server/src/main/java/org/elasticsearch/script/UpdateScript.java b/server/src/main/java/org/elasticsearch/script/UpdateScript.java index c6a1d5dd9ea..e1eaf14bcb9 100644 --- a/server/src/main/java/org/elasticsearch/script/UpdateScript.java +++ b/server/src/main/java/org/elasticsearch/script/UpdateScript.java @@ -20,6 +20,8 @@ package org.elasticsearch.script; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; /** @@ -27,7 +29,18 @@ import java.util.Map; */ public abstract class UpdateScript { - public static final String[] PARAMETERS = { "ctx" }; + public static final String[] PARAMETERS = { }; + + private static final Map DEPRECATIONS; + static { + Map deprecations = new HashMap<>(); + deprecations.put( + "ctx", + "Accessing variable [ctx] via [params.ctx] from within a update script " + + "is deprecated in favor of directly accessing [ctx]." + ); + DEPRECATIONS = Collections.unmodifiableMap(deprecations); + } /** The context used to compile {@link UpdateScript} factories. */ public static final ScriptContext CONTEXT = new ScriptContext<>("update", Factory.class); @@ -35,8 +48,14 @@ public abstract class UpdateScript { /** The generic runtime parameters for the script. */ private final Map params; - public UpdateScript(Map params) { - this.params = params; + /** The update context for the script. */ + private final Map ctx; + + public UpdateScript(Map params, Map ctx) { + Map paramsWithCtx = new HashMap<>(params); + paramsWithCtx.put("ctx", ctx); + this.params = new ParameterMap(paramsWithCtx, DEPRECATIONS); + this.ctx = ctx; } /** Return the parameters for this script. */ @@ -44,9 +63,14 @@ public abstract class UpdateScript { return params; } - public abstract void execute(Map ctx); + /** Return the update context for this script. */ + public Map getCtx() { + return ctx; + } + + public abstract void execute(); public interface Factory { - UpdateScript newInstance(Map params); + UpdateScript newInstance(Map params, Map ctx); } } diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 3a3b1c680ab..e5ece1afa33 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -25,11 +25,12 @@ import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.common.time.DateUtils; import org.joda.time.DateTimeZone; import java.io.IOException; @@ -171,13 +172,14 @@ public interface DocValueFormat extends NamedWriteable { public static final String NAME = "date_time"; final FormatDateTimeFormatter formatter; + // TODO: change this to ZoneId, but will require careful change to serialization final DateTimeZone timeZone; private final DateMathParser parser; public DateTime(FormatDateTimeFormatter formatter, DateTimeZone timeZone) { this.formatter = Objects.requireNonNull(formatter); this.timeZone = Objects.requireNonNull(timeZone); - this.parser = new DateMathParser(formatter); + this.parser = formatter.toDateMathParser(); } public DateTime(StreamInput in) throws IOException { @@ -212,7 +214,7 @@ public interface DocValueFormat extends NamedWriteable { @Override public long parseLong(String value, boolean roundUp, LongSupplier now) { - return parser.parse(value, now, roundUp, timeZone); + return parser.parse(value, now, roundUp, DateUtils.dateTimeZoneToZoneId(timeZone)); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index de4f0aab676..9198582411f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -159,10 +159,9 @@ public class AggregatorFactories { } } - public static final AggregatorFactories EMPTY = new AggregatorFactories(null, new AggregatorFactory[0], + public static final AggregatorFactories EMPTY = new AggregatorFactories(new AggregatorFactory[0], new ArrayList()); - private AggregatorFactory parent; private AggregatorFactory[] factories; private List pipelineAggregatorFactories; @@ -170,9 +169,7 @@ public class AggregatorFactories { return new Builder(); } - private AggregatorFactories(AggregatorFactory parent, AggregatorFactory[] factories, - List pipelineAggregators) { - this.parent = parent; + private AggregatorFactories(AggregatorFactory[] factories, List pipelineAggregators) { this.factories = factories; this.pipelineAggregatorFactories = pipelineAggregators; } @@ -328,7 +325,7 @@ public class AggregatorFactories { for (int i = 0; i < aggregationBuilders.size(); i++) { aggFactories[i] = aggregationBuilders.get(i).build(context, parent); } - return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators); + return new AggregatorFactories(aggFactories, orderedpipelineAggregators); } private List resolvePipelineAggregatorOrder( diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index da328edd7aa..eafdbe10977 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -143,6 +143,14 @@ public abstract class InternalAggregation implements Aggregation, NamedWriteable public abstract InternalAggregation doReduce(List aggregations, ReduceContext reduceContext); + /** + * Return true if this aggregation is mapped, and can lead a reduction. If this agg returns + * false, it should return itself if asked to lead a reduction + */ + public boolean isMapped() { + return true; + } + /** * Get the value of specified path in the aggregation. * diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java index 57170e2f8ab..95140b50d2b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregations.java @@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import java.io.IOException; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -37,6 +38,15 @@ import static java.util.Collections.emptyMap; public final class InternalAggregations extends Aggregations implements Streamable { public static final InternalAggregations EMPTY = new InternalAggregations(); + private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> { + if (agg1.isMapped() == agg2.isMapped()) { + return 0; + } else if (agg1.isMapped() && agg2.isMapped() == false) { + return -1; + } else { + return 1; + } + }; private InternalAggregations() { } @@ -73,6 +83,9 @@ public final class InternalAggregations extends Aggregations implements Streamab List reducedAggregations = new ArrayList<>(); for (Map.Entry> entry : aggByName.entrySet()) { List aggregations = entry.getValue(); + // Sort aggregations so that unmapped aggs come last in the list + // If all aggs are unmapped, the agg that leads the reduction will just return itself + aggregations.sort(INTERNAL_AGG_COMPARATOR); InternalAggregation first = aggregations.get(0); // the list can't be empty as it's created on demand reducedAggregations.add(first.reduce(aggregations, context)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index 482b8be1c45..251dc7e4283 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -144,6 +144,13 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder doBuild(SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subfactoriesBuilder) throws IOException { @@ -193,11 +200,7 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder source: sources) { - builder.startObject(); - builder.startObject(source.name()); - source.toXContent(builder, params); - builder.endObject(); - builder.endObject(); + CompositeValuesSourceParserHelper.toXContent(source, builder, params); } builder.endArray(); if (after != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java index 713ecfc6628..d773a09d645 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java @@ -25,7 +25,9 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.AbstractObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.support.ValueType; @@ -33,7 +35,7 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -class CompositeValuesSourceParserHelper { +public class CompositeValuesSourceParserHelper { static , T> void declareValuesSourceFields(AbstractObjectParser objectParser, ValueType targetValueType) { objectParser.declareField(VB::field, XContentParser::text, @@ -57,7 +59,7 @@ class CompositeValuesSourceParserHelper { objectParser.declareField(VB::order, XContentParser::text, new ParseField("order"), ObjectParser.ValueType.STRING); } - static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput out) throws IOException { + public static void writeTo(CompositeValuesSourceBuilder builder, StreamOutput out) throws IOException { final byte code; if (builder.getClass() == TermsValuesSourceBuilder.class) { code = 0; @@ -72,7 +74,7 @@ class CompositeValuesSourceParserHelper { builder.writeTo(out); } - static CompositeValuesSourceBuilder readFrom(StreamInput in) throws IOException { + public static CompositeValuesSourceBuilder readFrom(StreamInput in) throws IOException { int code = in.readByte(); switch(code) { case 0: @@ -86,7 +88,7 @@ class CompositeValuesSourceParserHelper { } } - static CompositeValuesSourceBuilder fromXContent(XContentParser parser) throws IOException { + public static CompositeValuesSourceBuilder fromXContent(XContentParser parser) throws IOException { XContentParser.Token token = parser.currentToken(); ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); token = parser.nextToken(); @@ -117,4 +119,14 @@ class CompositeValuesSourceParserHelper { parser.nextToken(); return builder; } + + public static XContentBuilder toXContent(CompositeValuesSourceBuilder source, XContentBuilder builder, Params params) + throws IOException { + builder.startObject(); + builder.startObject(source.name()); + source.toXContent(builder, params); + builder.endObject(); + builder.endObject(); + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java index bb785efde48..dba7fbb34fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregationBuilder.java @@ -25,10 +25,10 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.DateTimeUnit; import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -72,7 +72,7 @@ import static java.util.Collections.unmodifiableMap; public class DateHistogramAggregationBuilder extends ValuesSourceAggregationBuilder implements MultiBucketAggregationBuilder { public static final String NAME = "date_histogram"; - private static DateMathParser EPOCH_MILLIS_PARSER = new DateMathParser(Joda.forPattern("epoch_millis", Locale.ROOT)); + private static DateMathParser EPOCH_MILLIS_PARSER = Joda.forPattern("epoch_millis", Locale.ROOT).toDateMathParser(); public static final Map DATE_FIELD_UNITS; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java index 3459e110d7e..5f5f557ffd5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/UnmappedSampler.java @@ -50,12 +50,12 @@ public class UnmappedSampler extends InternalSampler { @Override public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { - for (InternalAggregation agg : aggregations) { - if (!(agg instanceof UnmappedSampler)) { - return agg.reduce(aggregations, reduceContext); - } - } - return this; + return new UnmappedSampler(name, pipelineAggregators(), metaData); + } + + @Override + public boolean isMapped() { + return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java index 66fc171bbe3..f2c9f8b29ad 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/UnmappedSignificantTerms.java @@ -51,13 +51,13 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms { private Bucket(BytesRef term, long subsetDf, long subsetSize, long supersetDf, long supersetSize, InternalAggregations aggregations, - DocValueFormat format) { + DocValueFormat format) { super(subsetDf, subsetSize, supersetDf, supersetSize, aggregations, format); } } public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List pipelineAggregators, - Map metaData) { + Map metaData) { super(name, requiredSize, minDocCount, pipelineAggregators, metaData); } @@ -100,12 +100,12 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms aggregations, ReduceContext reduceContext) { - for (InternalAggregation aggregation : aggregations) { - if (!(aggregation instanceof UnmappedSignificantTerms)) { - return aggregation.reduce(aggregations, reduceContext); - } - } - return this; + return new UnmappedSignificantTerms(name, requiredSize, minDocCount, pipelineAggregators(), metaData); + } + + @Override + public boolean isMapped() { + return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 595991dac06..17a3e603b6f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -22,10 +22,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; import java.io.IOException; import java.util.Collections; @@ -95,12 +95,12 @@ public class UnmappedTerms extends InternalTerms aggregations, ReduceContext reduceContext) { - for (InternalAggregation agg : aggregations) { - if (!(agg instanceof UnmappedTerms)) { - return agg.reduce(aggregations, reduceContext); - } - } - return this; + return new UnmappedTerms(name, order, requiredSize, minDocCount, pipelineAggregators(), metaData); + } + + @Override + public boolean isMapped() { + return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 38b783e6b95..c2add245058 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -721,7 +721,6 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder fieldDataFields = new ArrayList<>(); while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { FieldAndFormat ff = FieldAndFormat.fromXContent(parser); factory.docValueField(ff.field, ff.format); diff --git a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java index 9ec20001adc..d98a8585ecf 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/LeafFieldsLookup.java @@ -22,10 +22,13 @@ import org.apache.lucene.index.LeafReader; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.TypeFieldMapper; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -48,13 +51,10 @@ public class LeafFieldsLookup implements Map { private final Map cachedFieldData = new HashMap<>(); - private final SingleFieldsVisitor fieldVisitor; - LeafFieldsLookup(MapperService mapperService, @Nullable String[] types, LeafReader reader) { this.mapperService = mapperService; this.types = types; this.reader = reader; - this.fieldVisitor = new SingleFieldsVisitor(null); } public void setDocument(int docId) { @@ -142,16 +142,23 @@ public class LeafFieldsLookup implements Map { cachedFieldData.put(name, data); } if (data.fields() == null) { - String fieldName = data.fieldType().name(); - fieldVisitor.reset(fieldName); - try { - reader.document(docId, fieldVisitor); - fieldVisitor.postProcess(mapperService); - List storedFields = fieldVisitor.fields().get(data.fieldType().name()); - data.fields(singletonMap(fieldName, storedFields)); - } catch (IOException e) { - throw new ElasticsearchParseException("failed to load field [{}]", e, name); + List values; + if (TypeFieldMapper.NAME.equals(data.fieldType().name())) { + values = new ArrayList<>(1); + final DocumentMapper mapper = mapperService.documentMapper(); + if (mapper != null) { + values.add(mapper.type()); + } + } else { + values = new ArrayList(2); + SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values); + try { + reader.document(docId, visitor); + } catch (IOException e) { + throw new ElasticsearchParseException("failed to load field [{}]", e, name); + } } + data.fields(singletonMap(data.fieldType().name(), values)); } return data; } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index f75d01a0233..a1776d04d34 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -36,12 +36,16 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.NavigableSet; import java.util.Set; +import java.util.TreeSet; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -181,12 +185,36 @@ public abstract class RemoteClusterAware extends AbstractComponent { * {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node. */ protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) { - Stream>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings); + final Map>>> remoteSeeds = + buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS); + final Map>>> searchRemoteSeeds = + buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS); + // sort the intersection for predictable output order + final NavigableSet intersection = + new TreeSet<>(Arrays.asList( + searchRemoteSeeds.keySet().stream().filter(s -> remoteSeeds.keySet().contains(s)).sorted().toArray(String[]::new))); + if (intersection.isEmpty() == false) { + final String message = String.format( + Locale.ROOT, + "found duplicate remote cluster configurations for cluster alias%s [%s]", + intersection.size() == 1 ? "" : "es", + String.join(",", intersection)); + throw new IllegalArgumentException(message); + } + return Stream + .concat(remoteSeeds.entrySet().stream(), searchRemoteSeeds.entrySet().stream()) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + private static Map>>> buildRemoteClustersDynamicConfig( + final Settings settings, final Setting.AffixSetting> seedsSetting) { + final Stream>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings); return allConcreteSettings.collect( - Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> { - String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting); + Collectors.toMap(seedsSetting::getNamespace, concreteSetting -> { + String clusterName = seedsSetting.getNamespace(concreteSetting); List addresses = concreteSetting.get(settings); - final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings); + final boolean proxyMode = + REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings); List> nodes = new ArrayList<>(addresses.size()); for (String address : addresses) { nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); @@ -231,9 +259,10 @@ public abstract class RemoteClusterAware extends AbstractComponent { List clusters = clusterNameResolver.resolveClusterNames(remoteClusterNames, remoteClusterName); if (clusters.isEmpty() == false) { if (indexExists.test(index)) { - // we use : as a separator for remote clusters. might conflict if there is an index that is actually named - // remote_cluster_alias:index_name - for this case we fail the request. the user can easily change the cluster alias - // if that happens + //We use ":" as a separator for remote clusters. There may be a conflict if there is an index that is named + //remote_cluster_alias:index_name - for this case we fail the request. The user can easily change the cluster alias + //if that happens. Note that indices and aliases can be created with ":" in their names names up to 6.last, which + //means such names need to be supported until 7.last. It will be possible to remove this check from 8.0 on. throw new IllegalArgumentException("Can not filter indices; index " + index + " exists but there is also a remote cluster named: " + remoteClusterName); } @@ -242,6 +271,9 @@ public abstract class RemoteClusterAware extends AbstractComponent { perClusterIndices.computeIfAbsent(clusterName, k -> new ArrayList<>()).add(indexName); } } else { + //Indices and aliases can be created with ":" in their names up to 6.last (although deprecated), and still be + //around in 7.x. That's why we need to be lenient here and treat the index as local although it contains ":". + //It will be possible to remove such leniency and assume that no local indices contain ":" only from 8.0 on. perClusterIndices.computeIfAbsent(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, k -> new ArrayList<>()).add(index); } } else { diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index e37ea81211a..db14fd015fd 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -733,6 +733,11 @@ public class TransportService extends AbstractLifecycleComponent implements Tran "failed to notify channel of error message for action [{}]", action), inner); } } + + @Override + public String toString() { + return "processing of [" + requestId + "][" + action + "]: " + request; + } }); } @@ -946,7 +951,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran assert responseHandlers.contains(requestId) == false; TimeoutInfoHolder timeoutInfoHolder = timeoutInfoHandlers.remove(requestId); if (timeoutInfoHolder != null) { - long time = System.currentTimeMillis(); + long time = threadPool.relativeTimeInMillis(); logger.warn("Received response for a request that has timed out, sent [{}ms] ago, timed out [{}ms] ago, " + "action [{}], node [{}], id [{}]", time - timeoutInfoHolder.sentTime(), time - timeoutInfoHolder.timeoutTime(), timeoutInfoHolder.action(), timeoutInfoHolder.node(), requestId); @@ -1009,7 +1014,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran final class TimeoutHandler implements Runnable { private final long requestId; - private final long sentTime = System.currentTimeMillis(); + private final long sentTime = threadPool.relativeTimeInMillis(); private final String action; private final DiscoveryNode node; volatile ScheduledFuture future; @@ -1023,7 +1028,7 @@ public class TransportService extends AbstractLifecycleComponent implements Tran @Override public void run() { if (responseHandlers.contains(requestId)) { - long timeoutTime = System.currentTimeMillis(); + long timeoutTime = threadPool.relativeTimeInMillis(); timeoutInfoHandlers.put(requestId, new TimeoutInfoHolder(node, action, sentTime, timeoutTime)); // now that we have the information visible via timeoutInfoHandlers, we try to remove the request id final Transport.ResponseContext holder = responseHandlers.remove(requestId); @@ -1049,6 +1054,11 @@ public class TransportService extends AbstractLifecycleComponent implements Tran "cancel must be called after the requestId [" + requestId + "] has been removed from clientHandlers"; FutureUtils.cancel(future); } + + @Override + public String toString() { + return "timeout handler for [" + requestId + "][" + action + "]"; + } } static class TimeoutInfoHolder { @@ -1176,7 +1186,17 @@ public class TransportService extends AbstractLifecycleComponent implements Tran if (ThreadPool.Names.SAME.equals(executor)) { processResponse(handler, response); } else { - threadPool.executor(executor).execute(() -> processResponse(handler, response)); + threadPool.executor(executor).execute(new Runnable() { + @Override + public void run() { + processResponse(handler, response); + } + + @Override + public String toString() { + return "delivery of response to [" + requestId + "][" + action + "]: " + response; + } + }); } } } @@ -1201,7 +1221,17 @@ public class TransportService extends AbstractLifecycleComponent implements Tran if (ThreadPool.Names.SAME.equals(executor)) { processException(handler, rtx); } else { - threadPool.executor(handler.executor()).execute(() -> processException(handler, rtx)); + threadPool.executor(handler.executor()).execute(new Runnable() { + @Override + public void run() { + processException(handler, rtx); + } + + @Override + public String toString() { + return "delivery of failure response to [" + requestId + "][" + action + "]: " + exception; + } + }); } } } diff --git a/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java new file mode 100644 index 00000000000..6187fc1f7f6 --- /dev/null +++ b/server/src/test/java/org/apache/lucene/queries/SpanMatchNoDocsQueryTests.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.queries; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.spans.SpanNearQuery; +import org.apache.lucene.search.spans.SpanOrQuery; +import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.apache.lucene.store.Directory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +public class SpanMatchNoDocsQueryTests extends ESTestCase { + public void testSimple() throws Exception { + SpanMatchNoDocsQuery query = new SpanMatchNoDocsQuery("field", "a good reason"); + assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); + Query rewrite = query.rewrite(null); + assertTrue(rewrite instanceof SpanMatchNoDocsQuery); + assertEquals(rewrite.toString(), "SpanMatchNoDocsQuery(\"a good reason\")"); + } + + public void testQuery() throws Exception { + Directory dir = newDirectory(); + Analyzer analyzer = new MockAnalyzer(random()); + IndexWriter iw = new IndexWriter(dir, + newIndexWriterConfig(analyzer).setMaxBufferedDocs(2).setMergePolicy(newLogMergePolicy())); + addDoc("one", iw); + addDoc("two", iw); + addDoc("three", iw); + IndexReader ir = DirectoryReader.open(iw); + IndexSearcher searcher = new IndexSearcher(ir); + + Query query = new SpanMatchNoDocsQuery("unkwown", "field not found"); + assertEquals(searcher.count(query), 0); + + ScoreDoc[] hits; + hits = searcher.search(query, 1000).scoreDocs; + assertEquals(0, hits.length); + assertEquals(query.toString(), "SpanMatchNoDocsQuery(\"field not found\")"); + + SpanOrQuery orQuery = new SpanOrQuery( + new SpanMatchNoDocsQuery("unknown", "field not found"), + new SpanTermQuery(new Term("unknown", "one")) + ); + assertEquals(searcher.count(orQuery), 0); + hits = searcher.search(orQuery, 1000).scoreDocs; + assertEquals(0, hits.length); + + orQuery = new SpanOrQuery( + new SpanMatchNoDocsQuery("key", "a good reason"), + new SpanTermQuery(new Term("key", "one")) + ); + assertEquals(searcher.count(orQuery), 1); + hits = searcher.search(orQuery, 1000).scoreDocs; + assertEquals(1, hits.length); + Query rewrite = orQuery.rewrite(ir); + assertEquals(rewrite, orQuery); + + SpanNearQuery nearQuery = new SpanNearQuery( + new SpanQuery[] {new SpanMatchNoDocsQuery("same", ""), new SpanMatchNoDocsQuery("same", "")}, + 0, true); + assertEquals(searcher.count(nearQuery), 0); + hits = searcher.search(nearQuery, 1000).scoreDocs; + assertEquals(0, hits.length); + rewrite = nearQuery.rewrite(ir); + assertEquals(rewrite, nearQuery); + + iw.close(); + ir.close(); + dir.close(); + } + + public void testEquals() { + Query q1 = new SpanMatchNoDocsQuery("key1", "one"); + Query q2 = new SpanMatchNoDocsQuery("key2", "two"); + assertTrue(q1.equals(q2)); + QueryUtils.check(q1); + } + + private void addDoc(String text, IndexWriter iw) throws IOException { + Document doc = new Document(); + Field f = newTextField("key", text, Field.Store.YES); + doc.add(f); + iw.addDocument(doc); + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index 9c81f46911b..b2d8e0bb33a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; @@ -83,6 +84,9 @@ public class ShrinkIndexIT extends ESIntegTestCase { @TestLogging("org.elasticsearch.index.store:DEBUG") public void testCreateShrinkIndexToN() { + + assumeFalse("https://github.com/elastic/elasticsearch/issues/34080", Constants.WINDOWS); + int[][] possibleShardSplits = new int[][] {{8,4,2}, {9, 3, 1}, {4, 2, 1}, {15,5,1}}; int[] shardSplits = randomFrom(possibleShardSplits); assertEquals(shardSplits[0], (shardSplits[0] / shardSplits[1]) * shardSplits[1]); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index 4510956358f..044f3263b4a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.join.ScoreMode; +import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -91,6 +92,9 @@ public class SplitIndexIT extends ESIntegTestCase { } public void testSplitFromOneToN() { + + assumeFalse("https://github.com/elastic/elasticsearch/issues/34080", Constants.WINDOWS); + splitToN(1, 5, 10); client().admin().indices().prepareDelete("*").get(); int randomSplit = randomIntBetween(2, 6); @@ -485,8 +489,6 @@ public class SplitIndexIT extends ESIntegTestCase { ImmutableOpenMap dataNodes = client().admin().cluster().prepareState().get().getState().nodes() .getDataNodes(); assertTrue("at least 2 nodes but was: " + dataNodes.size(), dataNodes.size() >= 2); - DiscoveryNode[] discoveryNodes = dataNodes.values().toArray(DiscoveryNode.class); - String mergeNode = discoveryNodes[0].getName(); // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node // if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due // to the require._name below. diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java index 91c78418683..beae91df77e 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.mapping.get; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentParser; @@ -119,20 +120,15 @@ public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase private static Map randomFieldMapping() { Map mappings = new HashMap<>(); if (randomBoolean()) { - Map regularMapping = new HashMap<>(); - regularMapping.put("type", randomBoolean() ? "text" : "keyword"); - regularMapping.put("index", "analyzed"); - regularMapping.put("analyzer", "english"); - return regularMapping; + mappings.put("type", randomBoolean() ? "text" : "keyword"); + mappings.put("index", "analyzed"); + mappings.put("analyzer", "english"); } else if (randomBoolean()) { - Map numberMapping = new HashMap<>(); - numberMapping.put("type", randomFrom("integer", "float", "long", "double")); - numberMapping.put("index", Objects.toString(randomBoolean())); - return numberMapping; + mappings.put("type", randomFrom("integer", "float", "long", "double")); + mappings.put("index", Objects.toString(randomBoolean())); } else if (randomBoolean()) { - Map objMapping = new HashMap<>(); - objMapping.put("type", "object"); - objMapping.put("dynamic", "strict"); + mappings.put("type", "object"); + mappings.put("dynamic", "strict"); Map properties = new HashMap<>(); Map props1 = new HashMap<>(); props1.put("type", randomFrom("text", "keyword")); @@ -146,12 +142,10 @@ public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase props3.put("index", "false"); prop2properties.put("subsubfield", props3); props2.put("properties", prop2properties); - objMapping.put("properties", properties); - return objMapping; + mappings.put("properties", properties); } else { - Map plainMapping = new HashMap<>(); - plainMapping.put("type", "keyword"); - return plainMapping; + mappings.put("type", "keyword"); } + return mappings; } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 95282e358e1..d3e8c069601 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -52,9 +52,13 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentSet; + public class SearchAsyncActionTests extends ESTestCase { public void testSkipSearchShards() throws InterruptedException { @@ -139,7 +143,7 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { return new SearchPhase("test") { @Override - public void run() throws IOException { + public void run() { latch.countDown(); } }; @@ -260,7 +264,6 @@ public class SearchAsyncActionTests extends ESTestCase { SearchRequest request = new SearchRequest(); request.allowPartialSearchResults(true); request.setMaxConcurrentShardRequests(randomIntBetween(1, 100)); - CountDownLatch latch = new CountDownLatch(1); AtomicReference response = new AtomicReference<>(); ActionListener responseListener = new ActionListener() { @Override @@ -277,7 +280,7 @@ public class SearchAsyncActionTests extends ESTestCase { DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); - Map> nodeToContextMap = new HashMap<>(); + Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); GroupShardsIterator shardsIter = getShardsIter("idx", new OriginalIndices(new String[]{"idx"}, IndicesOptions.strictExpandOpenAndForbidClosed()), @@ -296,6 +299,8 @@ public class SearchAsyncActionTests extends ESTestCase { lookup.put(replicaNode.getId(), new MockConnection(replicaNode)); Map aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors())); + final CountDownLatch latch = new CountDownLatch(1); + final AtomicBoolean latchTriggered = new AtomicBoolean(); AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction( "test", @@ -326,7 +331,7 @@ public class SearchAsyncActionTests extends ESTestCase { Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult(contextIdGenerator.incrementAndGet(), connection.getNode()); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> new HashSet<>()); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); ids.add(testSearchPhaseResult.getRequestId()); if (randomBoolean()) { listener.onResponse(testSearchPhaseResult); @@ -339,15 +344,15 @@ public class SearchAsyncActionTests extends ESTestCase { protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { return new SearchPhase("test") { @Override - public void run() throws IOException { + public void run() { for (int i = 0; i < results.getNumShards(); i++) { TestSearchPhaseResult result = results.getAtomicArray().get(i); assertEquals(result.node.getId(), result.getSearchShardTarget().getNodeId()); sendReleaseSearchContext(result.getRequestId(), new MockConnection(result.node), OriginalIndices.NONE); } responseListener.onResponse(response); - if (latch.getCount() == 0) { - throw new AssertionError("Running a search phase after the latch has reached 0 !!!!"); + if (latchTriggered.compareAndSet(false, true) == false) { + throw new AssertionError("latch triggered twice"); } latch.countDown(); } diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index e85c03411f7..8fa10c4ee26 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -443,6 +443,7 @@ public class ReplicationOperationTests extends ESTestCase { final ShardRouting routing; final long localCheckpoint; final long globalCheckpoint; + final long maxSeqNoOfUpdatesOrDeletes; final Supplier replicationGroupSupplier; final Map knownLocalCheckpoints = new HashMap<>(); final Map knownGlobalCheckpoints = new HashMap<>(); @@ -452,6 +453,7 @@ public class ReplicationOperationTests extends ESTestCase { this.replicationGroupSupplier = replicationGroupSupplier; this.localCheckpoint = random().nextLong(); this.globalCheckpoint = randomNonNegativeLong(); + this.maxSeqNoOfUpdatesOrDeletes = randomNonNegativeLong(); } @Override @@ -515,6 +517,11 @@ public class ReplicationOperationTests extends ESTestCase { return globalCheckpoint; } + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + @Override public ReplicationGroup getReplicationGroup() { return replicationGroupSupplier.get(); @@ -571,6 +578,7 @@ public class ReplicationOperationTests extends ESTestCase { final ShardRouting replica, final Request request, final long globalCheckpoint, + final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { assertTrue("replica request processed twice on [" + replica + "]", request.processedOnReplicas.add(replica)); if (opFailures.containsKey(replica)) { diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 7f1b4adf8df..6c084cb29cd 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -171,6 +171,7 @@ public class TransportReplicationActionTests extends ESTestCase { action = new TestAction(Settings.EMPTY, "internal:testAction", transportService, clusterService, shardStateAction, threadPool); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -511,6 +512,7 @@ public class TransportReplicationActionTests extends ESTestCase { ActionListener> actionListener, TransportReplicationAction.PrimaryShardReference primaryShardReference) { return new NoopReplicationOperation(request, actionListener) { + @Override public void execute() throws Exception { assertPhase(task, "primary"); assertFalse(executed.getAndSet(true)); @@ -567,6 +569,7 @@ public class TransportReplicationActionTests extends ESTestCase { ActionListener> actionListener, TransportReplicationAction.PrimaryShardReference primaryShardReference) { return new NoopReplicationOperation(request, actionListener) { + @Override public void execute() throws Exception { assertPhase(task, "primary"); assertFalse(executed.getAndSet(true)); @@ -625,6 +628,7 @@ public class TransportReplicationActionTests extends ESTestCase { routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState), new Request(), randomNonNegativeLong(), + randomNonNegativeLong(), listener); assertTrue(listener.isDone()); assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class); @@ -633,7 +637,7 @@ public class TransportReplicationActionTests extends ESTestCase { final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream() .filter(ShardRouting::assignedToNode).collect(Collectors.toList())); listener = new PlainActionFuture<>(); - proxy.performOn(replica, new Request(), randomNonNegativeLong(), listener); + proxy.performOn(replica, new Request(), randomNonNegativeLong(), randomNonNegativeLong(), listener); assertFalse(listener.isDone()); CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear(); @@ -696,13 +700,6 @@ public class TransportReplicationActionTests extends ESTestCase { return null; }).when(shard).acquirePrimaryOperationPermit(any(), anyString(), anyObject()); - AtomicBoolean closed = new AtomicBoolean(); - Releasable releasable = () -> { - if (closed.compareAndSet(false, true) == false) { - fail("releasable is closed twice"); - } - }; - TestAction action = new TestAction(Settings.EMPTY, "internal:testSeqNoIsSetOnPrimary", transportService, clusterService, shardStateAction, threadPool) { @@ -805,7 +802,7 @@ public class TransportReplicationActionTests extends ESTestCase { replicaOperationTransportHandler.messageReceived( new TransportReplicationAction.ConcreteReplicaRequest<>( new Request().setShardId(shardId), replicaRouting.allocationId().getId(), randomNonNegativeLong(), - randomNonNegativeLong()), + randomNonNegativeLong(), randomNonNegativeLong()), createTransportChannel(new PlainActionFuture<>()), task); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("simulated")); @@ -895,7 +892,7 @@ public class TransportReplicationActionTests extends ESTestCase { Request request = new Request(shardId).timeout("1ms"); action.new ReplicaOperationTransportHandler().messageReceived( new TransportReplicationAction.ConcreteReplicaRequest<>(request, "_not_a_valid_aid_", randomNonNegativeLong(), - randomNonNegativeLong()), + randomNonNegativeLong(), randomNonNegativeLong()), createTransportChannel(listener), maybeTask() ); try { @@ -939,8 +936,10 @@ public class TransportReplicationActionTests extends ESTestCase { final PlainActionFuture listener = new PlainActionFuture<>(); final Request request = new Request().setShardId(shardId); final long checkpoint = randomNonNegativeLong(); + final long maxSeqNoOfUpdatesOrDeletes = randomNonNegativeLong(); replicaOperationTransportHandler.messageReceived( - new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), primaryTerm, checkpoint), + new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), + primaryTerm, checkpoint, maxSeqNoOfUpdatesOrDeletes), createTransportChannel(listener), task); if (listener.isDone()) { listener.get(); // fail with the exception if there @@ -964,6 +963,8 @@ public class TransportReplicationActionTests extends ESTestCase { assertThat(capturedRequest.request, instanceOf(TransportReplicationAction.ConcreteReplicaRequest.class)); assertThat(((TransportReplicationAction.ConcreteReplicaRequest) capturedRequest.request).getGlobalCheckpoint(), equalTo(checkpoint)); + assertThat(((TransportReplicationAction.ConcreteReplicaRequest) capturedRequest.request).getMaxSeqNoOfUpdatesOrDeletes(), + equalTo(maxSeqNoOfUpdatesOrDeletes)); assertConcreteShardRequest(capturedRequest.request, request, replica.allocationId()); } @@ -1004,8 +1005,10 @@ public class TransportReplicationActionTests extends ESTestCase { final PlainActionFuture listener = new PlainActionFuture<>(); final Request request = new Request().setShardId(shardId); final long checkpoint = randomNonNegativeLong(); + final long maxSeqNoOfUpdates = randomNonNegativeLong(); replicaOperationTransportHandler.messageReceived( - new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), primaryTerm, checkpoint), + new TransportReplicationAction.ConcreteReplicaRequest<>(request, replica.allocationId().getId(), + primaryTerm, checkpoint, maxSeqNoOfUpdates), createTransportChannel(listener), task); if (listener.isDone()) { listener.get(); // fail with the exception if there @@ -1105,8 +1108,6 @@ public class TransportReplicationActionTests extends ESTestCase { private class TestAction extends TransportReplicationAction { - private final boolean withDocumentFailureOnPrimary; - private final boolean withDocumentFailureOnReplica; TestAction(Settings settings, String actionName, TransportService transportService, ClusterService clusterService, ShardStateAction shardStateAction, @@ -1115,8 +1116,6 @@ public class TransportReplicationActionTests extends ESTestCase { shardStateAction, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); - this.withDocumentFailureOnPrimary = false; - this.withDocumentFailureOnReplica = false; } TestAction(Settings settings, String actionName, TransportService transportService, @@ -1126,8 +1125,6 @@ public class TransportReplicationActionTests extends ESTestCase { shardStateAction, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), Request::new, Request::new, ThreadPool.Names.SAME); - this.withDocumentFailureOnPrimary = withDocumentFailureOnPrimary; - this.withDocumentFailureOnReplica = withDocumentFailureOnReplica; } @Override @@ -1166,7 +1163,6 @@ public class TransportReplicationActionTests extends ESTestCase { Index index = (Index) invocation.getArguments()[0]; final ClusterState state = clusterService.state(); if (state.metaData().hasIndex(index.getName())) { - final IndexMetaData indexSafe = state.metaData().getIndexSafe(index); return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService); } else { return null; @@ -1198,7 +1194,7 @@ public class TransportReplicationActionTests extends ESTestCase { }).when(indexShard).acquirePrimaryOperationPermit(any(ActionListener.class), anyString(), anyObject()); doAnswer(invocation -> { long term = (Long)invocation.getArguments()[0]; - ActionListener callback = (ActionListener) invocation.getArguments()[2]; + ActionListener callback = (ActionListener) invocation.getArguments()[3]; final long primaryTerm = indexShard.getPendingPrimaryTerm(); if (term < primaryTerm) { throw new IllegalArgumentException(String.format(Locale.ROOT, "%s operation term [%d] is too old (current [%d])", @@ -1207,7 +1203,8 @@ public class TransportReplicationActionTests extends ESTestCase { count.incrementAndGet(); callback.onResponse(count::decrementAndGet); return null; - }).when(indexShard).acquireReplicaOperationPermit(anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); + }).when(indexShard) + .acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> { final ClusterState state = clusterService.state(); final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 9a571f1e9d8..1f1e9eb2a1e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -109,6 +109,7 @@ public class TransportWriteActionTests extends ESTestCase { clusterService = createClusterService(threadPool); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -277,7 +278,7 @@ public class TransportWriteActionTests extends ESTestCase { TestShardRouting.newShardRouting(shardId, "NOT THERE", routingState == ShardRoutingState.RELOCATING ? state.nodes().iterator().next().getId() : null, false, routingState), new TestRequest(), - randomNonNegativeLong(), listener); + randomNonNegativeLong(), randomNonNegativeLong(), listener); assertTrue(listener.isDone()); assertListenerThrows("non existent node should throw a NoNodeAvailableException", listener, NoNodeAvailableException.class); @@ -285,7 +286,7 @@ public class TransportWriteActionTests extends ESTestCase { final ShardRouting replica = randomFrom(shardRoutings.replicaShards().stream() .filter(ShardRouting::assignedToNode).collect(Collectors.toList())); listener = new PlainActionFuture<>(); - proxy.performOn(replica, new TestRequest(), randomNonNegativeLong(), listener); + proxy.performOn(replica, new TestRequest(), randomNonNegativeLong(), randomNonNegativeLong(), listener); assertFalse(listener.isDone()); CapturingTransport.CapturedRequest[] captures = transport.getCapturedRequestsAndClear(); @@ -430,7 +431,6 @@ public class TransportWriteActionTests extends ESTestCase { Index index = (Index) invocation.getArguments()[0]; final ClusterState state = clusterService.state(); if (state.metaData().hasIndex(index.getName())) { - final IndexMetaData indexSafe = state.metaData().getIndexSafe(index); return mockIndexService(clusterService.state().metaData().getIndexSafe(index), clusterService); } else { return null; @@ -462,7 +462,8 @@ public class TransportWriteActionTests extends ESTestCase { count.incrementAndGet(); callback.onResponse(count::decrementAndGet); return null; - }).when(indexShard).acquireReplicaOperationPermit(anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); + }).when(indexShard) + .acquireReplicaOperationPermit(anyLong(), anyLong(), anyLong(), any(ActionListener.class), anyString(), anyObject()); when(indexShard.routingEntry()).thenAnswer(invocationOnMock -> { final ClusterState state = clusterService.state(); final RoutingNode node = state.getRoutingNodes().node(state.nodes().getLocalNodeId()); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index e7606ec0718..4be46c4fc9e 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -48,7 +48,6 @@ import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.common.inject.internal.Join; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -196,7 +195,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { } Locale aLocale = new Locale("en", "US"); return String.format(aLocale, "(doc: %s\n requested: %s, fields: %s)", doc, requested, - selectedFields == null ? "NULL" : Join.join(",", selectedFields)); + selectedFields == null ? "NULL" : String.join(",", selectedFields)); } } diff --git a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java index 9a964a97bd7..9e552829d81 100644 --- a/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java +++ b/server/src/test/java/org/elasticsearch/bootstrap/MaxMapCountCheckTests.java @@ -21,12 +21,12 @@ package org.elasticsearch.bootstrap; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.util.Constants; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -137,7 +137,7 @@ public class MaxMapCountCheckTests extends ESTestCase { reset(reader); final IOException ioException = new IOException("fatal"); when(reader.readLine()).thenThrow(ioException); - final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountIOException"); + final Logger logger = LogManager.getLogger("testGetMaxMapCountIOException"); final MockLogAppender appender = new MockLogAppender(); appender.start(); appender.addExpectation( @@ -159,7 +159,7 @@ public class MaxMapCountCheckTests extends ESTestCase { { reset(reader); when(reader.readLine()).thenReturn("eof"); - final Logger logger = ESLoggerFactory.getLogger("testGetMaxMapCountNumberFormatException"); + final Logger logger = LogManager.getLogger("testGetMaxMapCountNumberFormatException"); final MockLogAppender appender = new MockLogAppender(); appender.start(); appender.addExpectation( diff --git a/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java b/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java index 274c48748fe..03340e211b4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/ack/AckClusterUpdateSettingsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.ack; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; @@ -49,6 +50,7 @@ import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; +@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33673") @ClusterScope(scope = TEST, minNumDataNodes = 2) public class AckClusterUpdateSettingsIT extends ESIntegTestCase { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java index 776a0a158ae..f78f8495806 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java @@ -63,7 +63,6 @@ import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; -import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.containsString; @@ -298,7 +297,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase { return null; }).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class)); - final TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool, + new TemplateUpgradeService(Settings.EMPTY, mockClient, clusterService, threadPool, Arrays.asList( templates -> { assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template") @@ -415,42 +414,6 @@ public class TemplateUpgradeServiceTests extends ESTestCase { assertThat(finishInvocation.availablePermits(), equalTo(0)); } - private static final int NODE_TEST_ITERS = 100; - - private DiscoveryNodes randomNodes(int dataAndMasterNodes, int clientNodes) { - DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); - String masterNodeId = null; - for (int i = 0; i < dataAndMasterNodes; i++) { - String id = randomAlphaOfLength(10) + "_" + i; - Set roles; - if (i == 0) { - masterNodeId = id; - // The first node has to be master node - if (randomBoolean()) { - roles = EnumSet.of(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA); - } else { - roles = EnumSet.of(DiscoveryNode.Role.MASTER); - } - } else { - if (randomBoolean()) { - roles = EnumSet.of(DiscoveryNode.Role.DATA); - } else { - roles = EnumSet.of(DiscoveryNode.Role.MASTER); - } - } - String node = "node_" + i; - builder.add(new DiscoveryNode(node, id, buildNewFakeTransportAddress(), emptyMap(), roles, randomVersion(random()))); - } - builder.masterNodeId(masterNodeId); // Node 0 is always a master node - - for (int i = 0; i < clientNodes; i++) { - String node = "client_" + i; - builder.add(new DiscoveryNode(node, randomAlphaOfLength(10) + "__" + i, buildNewFakeTransportAddress(), emptyMap(), - EnumSet.noneOf(DiscoveryNode.Role.class), randomVersion(random()))); - } - return builder.build(); - } - public static MetaData randomMetaData(IndexTemplateMetaData... templates) { MetaData.Builder builder = MetaData.builder(); for (IndexTemplateMetaData template : templates) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java index 98c8dc1b2ca..de3223517b9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/OperationRoutingTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.cluster.routing; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterState; @@ -27,6 +26,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.ResponseCollectorService; @@ -530,7 +530,6 @@ public class OperationRoutingTests extends ESTestCase{ indexNames[i] = "test" + i; } ClusterState state = ClusterStateCreationUtils.stateWithAssignedPrimariesAndReplicas(indexNames, numShards, numReplicas); - final int numRepeatedSearches = 4; OperationRouting opRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); opRouting.setUseAdaptiveReplicaSelection(true); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index d4645208071..fbdcadc6ec3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.routing.allocation; import com.carrotsearch.hppc.cursors.ObjectCursor; + import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; @@ -149,8 +150,6 @@ public class FailedNodeRoutingTests extends ESAllocationTestCase { assertTrue(state.metaData().hasIndex(name)); } - ClusterState previousState = state; - logger.info("--> starting shards"); state = cluster.applyStartedShards(state, state.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> starting replicas a random number of times"); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 88766e7943e..711e7401ad2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -91,10 +91,6 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(5).numberOfReplicas(2)) .build(); - RoutingTable routingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) - .build(); - RoutingTable initialRoutingTable = RoutingTable.builder() .addAsNew(metaData.index("test")) .build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java index d226af26f81..ce26e41e053 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/RandomAllocationDeciderTests.java @@ -86,7 +86,6 @@ public class RandomAllocationDeciderTests extends ESAllocationTestCase { int nodeIdCounter = 0; int atMostNodes = scaledRandomIntBetween(Math.max(1, maxNumReplicas), 15); final boolean frequentNodes = randomBoolean(); - AllocationService.CommandsResult routingResult; for (int i = 0; i < numIters; i++) { logger.info("Start iteration [{}]", i); ClusterState.Builder stateBuilder = ClusterState.builder(clusterState); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java index 0668ba41524..25d29d0fca4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/SingleShardNoReplicasRoutingTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; @@ -35,9 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllo import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import java.util.ArrayList; import java.util.HashSet; -import java.util.List; import java.util.Set; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -238,7 +235,6 @@ public class SingleShardNoReplicasRoutingTests extends ESAllocationTestCase { logger.info("Adding " + (numberOfIndices / 2) + " nodes"); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(); - List nodes = new ArrayList<>(); for (int i = 0; i < (numberOfIndices / 2); i++) { nodesBuilder.add(newNode("node" + i)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index 58d19fb61cf..d2e86c13d4f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -666,7 +666,6 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { } public void testFreeDiskPercentageAfterShardAssigned() { - RoutingNode rn = new RoutingNode("node1", newNode("node1")); DiskThresholdDecider decider = makeDecider(Settings.EMPTY); Map usages = new HashMap<>(); diff --git a/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 633e043ddd1..6d23866112d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -20,12 +20,12 @@ package org.elasticsearch.cluster.settings; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.discovery.Discovery; @@ -355,7 +355,7 @@ public class ClusterSettingsIT extends ESIntegTestCase { public void testLoggerLevelUpdate() { assertAcked(prepareCreate("test")); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); final IllegalArgumentException e = expectThrows( @@ -366,8 +366,8 @@ public class ClusterSettingsIT extends ESIntegTestCase { try { final Settings.Builder testSettings = Settings.builder().put("logger.test", "TRACE").put("logger._root", "trace"); client().admin().cluster().prepareUpdateSettings().setTransientSettings(testSettings).execute().actionGet(); - assertEquals(Level.TRACE, ESLoggerFactory.getLogger("test").getLevel()); - assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.TRACE, LogManager.getLogger("test").getLevel()); + assertEquals(Level.TRACE, LogManager.getRootLogger().getLevel()); } finally { if (randomBoolean()) { final Settings.Builder defaultSettings = Settings.builder().putNull("logger.test").putNull("logger._root"); @@ -376,8 +376,8 @@ public class ClusterSettingsIT extends ESIntegTestCase { final Settings.Builder defaultSettings = Settings.builder().putNull("logger.*"); client().admin().cluster().prepareUpdateSettings().setTransientSettings(defaultSettings).execute().actionGet(); } - assertEquals(level, ESLoggerFactory.getLogger("test").getLevel()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(level, LogManager.getLogger("test").getLevel()); + assertEquals(level, LogManager.getRootLogger().getLevel()); } } diff --git a/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java b/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java index 9e50d0afd71..9dc6faaa183 100644 --- a/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java +++ b/server/src/test/java/org/elasticsearch/common/ExponentiallyWeightedMovingAverageTests.java @@ -23,7 +23,6 @@ import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; -import static org.junit.Assert.assertThat; /** * Implements exponentially weighted moving averages (commonly abbreviated EWMA) for a single value. @@ -41,19 +40,11 @@ public class ExponentiallyWeightedMovingAverageTests extends ESTestCase { } public void testInvalidAlpha() { - try { - ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(-0.5, 10); - fail("should have failed to create EWMA"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); - } + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new ExponentiallyWeightedMovingAverage(-0.5, 10)); + assertThat(ex.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); - try { - ExponentiallyWeightedMovingAverage ewma = new ExponentiallyWeightedMovingAverage(1.5, 10); - fail("should have failed to create EWMA"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); - } + ex = expectThrows(IllegalArgumentException.class, () -> new ExponentiallyWeightedMovingAverage(1.5, 10)); + assertThat(ex.getMessage(), equalTo("alpha must be greater or equal to 0 and less than or equal to 1")); } public void testConvergingToValue() { diff --git a/server/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java rename to server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java index 2fad9738cb5..61448ce15ea 100644 --- a/server/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/joda/JodaDateMathParserTests.java @@ -24,17 +24,17 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; -import java.util.TimeZone; +import java.time.ZoneId; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class DateMathParserTests extends ESTestCase { +public class JodaDateMathParserTests extends ESTestCase { FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime||epoch_millis"); - DateMathParser parser = new DateMathParser(formatter); + JodaDateMathParser parser = new JodaDateMathParser(formatter); void assertDateMathEquals(String toTest, String expected) { assertDateMathEquals(toTest, expected, 0, false, null); @@ -145,7 +145,7 @@ public class DateMathParserTests extends ESTestCase { public void testNow() { - final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, null); + final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null); assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null); assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null); @@ -159,13 +159,13 @@ public class DateMathParserTests extends ESTestCase { public void testRoundingPreservesEpochAsBaseDate() { // If a user only specifies times, then the date needs to always be 1970-01-01 regardless of rounding FormatDateTimeFormatter formatter = Joda.forPattern("HH:mm:ss"); - DateMathParser parser = new DateMathParser(formatter); + JodaDateMathParser parser = new JodaDateMathParser(formatter); assertEquals( this.formatter.parser().parseMillis("1970-01-01T04:52:20.000Z"), - parser.parse("04:52:20", () -> 0, false, null)); + parser.parse("04:52:20", () -> 0, false, (ZoneId) null)); assertEquals( this.formatter.parser().parseMillis("1970-01-01T04:52:20.999Z"), - parser.parse("04:52:20", () -> 0, true, null)); + parser.parse("04:52:20", () -> 0, true, (ZoneId) null)); } // Implicit rounding happening when parts of the date are not specified @@ -184,10 +184,10 @@ public class DateMathParserTests extends ESTestCase { // implicit rounding with explicit timezone in the date format FormatDateTimeFormatter formatter = Joda.forPattern("YYYY-MM-ddZ"); - DateMathParser parser = new DateMathParser(formatter); - long time = parser.parse("2011-10-09+01:00", () -> 0, false, null); + JodaDateMathParser parser = new JodaDateMathParser(formatter); + long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); - time = parser.parse("2011-10-09+01:00", () -> 0, true, null); + time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time); } @@ -258,7 +258,7 @@ public class DateMathParserTests extends ESTestCase { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_second||dateOptionalTime")); + JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_second||dateOptionalTime")); long datetime = parser.parse("1418248078", () -> 0); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); @@ -298,16 +298,16 @@ public class DateMathParserTests extends ESTestCase { called.set(true); return 42L; }; - parser.parse("2014-11-18T14:27:32", now, false, null); + parser.parse("2014-11-18T14:27:32", now, false, (ZoneId) null); assertFalse(called.get()); - parser.parse("now/d", now, false, null); + parser.parse("now/d", now, false, (ZoneId) null); assertTrue(called.get()); } public void testThatUnixTimestampMayNotHaveTimeZone() { - DateMathParser parser = new DateMathParser(Joda.forPattern("epoch_millis")); + JodaDateMathParser parser = new JodaDateMathParser(Joda.forPattern("epoch_millis")); try { - parser.parse("1234567890123", () -> 42, false, DateTimeZone.forTimeZone(TimeZone.getTimeZone("CET"))); + parser.parse("1234567890123", () -> 42, false, ZoneId.of("CET")); fail("Expected ElasticsearchParseException"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("failed to parse date field")); diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 419c59e9162..10c58c562ad 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -19,12 +19,12 @@ package org.elasticsearch.common.settings; import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.index.IndexModule; @@ -905,8 +905,8 @@ public class ScopedSettingsTests extends ESTestCase { } public void testLoggingUpdates() { - final Level level = ESLoggerFactory.getRootLogger().getLevel(); - final Level testLevel = ESLoggerFactory.getLogger("test").getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); + final Level testLevel = LogManager.getLogger("test").getLevel(); Level property = randomFrom(Level.values()); Settings.Builder builder = Settings.builder().put("logger.level", property); try { @@ -916,33 +916,33 @@ public class ScopedSettingsTests extends ESTestCase { IllegalArgumentException.class, () -> settings.validate(Settings.builder().put("logger._root", "boom").build(), false)); assertEquals("Unknown level constant [BOOM].", ex.getMessage()); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(level, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); - assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.TRACE, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(property, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(property, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger.test", "TRACE").build()); - assertEquals(Level.TRACE, ESLoggerFactory.getLogger("test").getLevel()); + assertEquals(Level.TRACE, LogManager.getLogger("test").getLevel()); settings.applySettings(Settings.builder().build()); - assertEquals(property, ESLoggerFactory.getLogger("test").getLevel()); + assertEquals(property, LogManager.getLogger("test").getLevel()); } finally { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); - Loggers.setLevel(ESLoggerFactory.getLogger("test"), testLevel); + Loggers.setLevel(LogManager.getRootLogger(), level); + Loggers.setLevel(LogManager.getLogger("test"), testLevel); } } public void testFallbackToLoggerLevel() { - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); try { ClusterSettings settings = new ClusterSettings(Settings.builder().put("logger.level", "ERROR").build(), ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(level, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); - assertEquals(Level.TRACE, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.TRACE, LogManager.getRootLogger().getLevel()); settings.applySettings(Settings.builder().build()); // here we fall back to 'logger.level' which is our default. - assertEquals(Level.ERROR, ESLoggerFactory.getRootLogger().getLevel()); + assertEquals(Level.ERROR, LogManager.getRootLogger().getLevel()); } finally { - Loggers.setLevel(ESLoggerFactory.getRootLogger(), level); + Loggers.setLevel(LogManager.getRootLogger(), level); } } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index d9cecdd604c..70e958b974b 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -439,10 +439,7 @@ public class SettingsTests extends ESTestCase { Settings filteredSettings = builder.build().filter((k) -> false); assertEquals(0, filteredSettings.size()); - for (String k : filteredSettings.keySet()) { - fail("no element"); - } assertFalse(filteredSettings.keySet().contains("a.c")); assertFalse(filteredSettings.keySet().contains("a")); assertFalse(filteredSettings.keySet().contains("a.b")); diff --git a/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java new file mode 100644 index 00000000000..8f36258c5fe --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/time/DateUtilsTests.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; + +import java.time.Instant; +import java.time.ZoneId; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class DateUtilsTests extends ESTestCase { + private static final Set IGNORE = new HashSet<>(Arrays.asList( + "Eire", "Europe/Dublin" // dublin timezone in joda does not account for DST + )); + public void testTimezoneIds() { + assertNull(DateUtils.dateTimeZoneToZoneId(null)); + assertNull(DateUtils.zoneIdToDateTimeZone(null)); + for (String jodaId : DateTimeZone.getAvailableIDs()) { + if (IGNORE.contains(jodaId)) continue; + DateTimeZone jodaTz = DateTimeZone.forID(jodaId); + ZoneId zoneId = DateUtils.dateTimeZoneToZoneId(jodaTz); // does not throw + long now = 0; + assertThat(jodaId, zoneId.getRules().getOffset(Instant.ofEpochMilli(now)).getTotalSeconds() * 1000, + equalTo(jodaTz.getOffset(now))); + if (DateUtils.DEPRECATED_SHORT_TIMEZONES.containsKey(jodaTz.getID())) { + assertWarnings("Use of short timezone id " + jodaId + " is deprecated. Use " + zoneId.getId() + " instead"); + } + // roundtrip does not throw either + assertNotNull(DateUtils.zoneIdToDateTimeZone(zoneId)); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/common/time/DateMathParserTests.java b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/common/time/DateMathParserTests.java rename to server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java index 66e68b0aad0..a543af0445d 100644 --- a/server/src/test/java/org/elasticsearch/common/time/DateMathParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/time/JavaDateMathParserTests.java @@ -33,10 +33,10 @@ import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -public class DateMathParserTests extends ESTestCase { +public class JavaDateMathParserTests extends ESTestCase { private final DateFormatter formatter = DateFormatters.forPattern("dateOptionalTime||epoch_millis"); - private final DateMathParser parser = new DateMathParser(formatter); + private final JavaDateMathParser parser = new JavaDateMathParser(formatter); public void testBasicDates() { assertDateMathEquals("2014", "2014-01-01T00:00:00.000"); @@ -125,7 +125,7 @@ public class DateMathParserTests extends ESTestCase { } public void testNow() { - final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, null); + final long now = parser.parse("2014-11-18T14:27:32", () -> 0, false, (ZoneId) null); assertDateMathEquals("now", "2014-11-18T14:27:32", now, false, null); assertDateMathEquals("now+M", "2014-12-18T14:27:32", now, false, null); @@ -139,14 +139,14 @@ public class DateMathParserTests extends ESTestCase { public void testRoundingPreservesEpochAsBaseDate() { // If a user only specifies times, then the date needs to always be 1970-01-01 regardless of rounding DateFormatter formatter = DateFormatters.forPattern("HH:mm:ss"); - DateMathParser parser = new DateMathParser(formatter); + JavaDateMathParser parser = new JavaDateMathParser(formatter); ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")); assertThat(zonedDateTime.getYear(), is(1970)); long millisStart = zonedDateTime.toInstant().toEpochMilli(); - assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, null)); + assertEquals(millisStart, parser.parse("04:52:20", () -> 0, false, (ZoneId) null)); // due to rounding up, we have to add the number of milliseconds here manually long millisEnd = DateFormatters.toZonedDateTime(formatter.parse("04:52:20")).toInstant().toEpochMilli() + 999; - assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, null)); + assertEquals(millisEnd, parser.parse("04:52:20", () -> 0, true, (ZoneId) null)); } // Implicit rounding happening when parts of the date are not specified @@ -165,10 +165,10 @@ public class DateMathParserTests extends ESTestCase { // implicit rounding with explicit timezone in the date format DateFormatter formatter = DateFormatters.forPattern("yyyy-MM-ddXXX"); - DateMathParser parser = new DateMathParser(formatter); - long time = parser.parse("2011-10-09+01:00", () -> 0, false, null); + JavaDateMathParser parser = new JavaDateMathParser(formatter); + long time = parser.parse("2011-10-09+01:00", () -> 0, false, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T00:00:00.000+01:00", () -> 0), time); - time = parser.parse("2011-10-09+01:00", () -> 0, true, null); + time = parser.parse("2011-10-09+01:00", () -> 0, true, (ZoneId) null); assertEquals(this.parser.parse("2011-10-09T23:59:59.999+01:00", () -> 0), time); } @@ -239,7 +239,7 @@ public class DateMathParserTests extends ESTestCase { assertDateMathEquals("1418248078000||/m", "2014-12-10T21:47:00.000"); // also check other time units - DateMathParser parser = new DateMathParser(DateFormatters.forPattern("epoch_second||dateOptionalTime")); + JavaDateMathParser parser = new JavaDateMathParser(DateFormatters.forPattern("epoch_second||dateOptionalTime")); long datetime = parser.parse("1418248078", () -> 0); assertDateEquals(datetime, "1418248078", "2014-12-10T21:47:58.000"); @@ -279,9 +279,9 @@ public class DateMathParserTests extends ESTestCase { called.set(true); return 42L; }; - parser.parse("2014-11-18T14:27:32", now, false, null); + parser.parse("2014-11-18T14:27:32", now, false, (ZoneId) null); assertFalse(called.get()); - parser.parse("now/d", now, false, null); + parser.parse("now/d", now, false, (ZoneId) null); assertTrue(called.get()); } diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java index 18351d1ea6b..387f15e3f33 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/AsyncIOProcessorTests.java @@ -39,7 +39,8 @@ public class AsyncIOProcessorTests extends ESTestCase { protected void write(List>> candidates) throws IOException { if (blockInternal) { synchronized (this) { - for (Tuple> c :candidates) { + // TODO: check why we need a loop, can't we just use received.addAndGet(candidates.size()) + for (int i = 0; i < candidates.size(); i++) { received.incrementAndGet(); } } diff --git a/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java b/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java index 4d9d89312a3..6501c7caa1d 100644 --- a/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/iterable/IterablesTests.java @@ -19,14 +19,14 @@ package org.elasticsearch.common.util.iterable; +import org.elasticsearch.test.ESTestCase; + import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; -import org.elasticsearch.test.ESTestCase; - import static org.hamcrest.object.HasToString.hasToString; public class IterablesTests extends ESTestCase { @@ -64,7 +64,7 @@ public class IterablesTests extends ESTestCase { Iterable allInts = Iterables.flatten(list); int count = 0; - for(int x : allInts) { + for(@SuppressWarnings("unused") int x : allInts) { count++; } assertEquals(0, count); @@ -73,14 +73,14 @@ public class IterablesTests extends ESTestCase { // changes to the outer list are not seen since flatten pre-caches outer list on init: count = 0; - for(int x : allInts) { + for(@SuppressWarnings("unused") int x : allInts) { count++; } assertEquals(0, count); // but changes to the original inner lists are seen: list.get(0).add(0); - for(int x : allInts) { + for(@SuppressWarnings("unused") int x : allInts) { count++; } assertEquals(1, count); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 3fb5f5996be..38e75b921fa 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -48,6 +48,7 @@ import org.joda.time.format.ISODateTimeFormat; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.math.BigDecimal; import java.math.BigInteger; import java.nio.file.Path; import java.time.DayOfWeek; @@ -266,6 +267,36 @@ public abstract class BaseXContentTestCase extends ESTestCase { .endObject()); } + public void testBigIntegers() throws Exception { + assertResult("{'bigint':null}", () -> builder().startObject().field("bigint", (BigInteger) null).endObject()); + assertResult("{'bigint':[]}", () -> builder().startObject().array("bigint", new BigInteger[]{}).endObject()); + + BigInteger bigInteger = BigInteger.valueOf(Long.MAX_VALUE).add(BigInteger.ONE); + String result = "{'bigint':" + bigInteger.toString() + "}"; + assertResult(result, () -> builder().startObject().field("bigint", bigInteger).endObject()); + + result = "{'bigint':[" + bigInteger.toString() + "," + bigInteger.toString() + "," + bigInteger.toString() +"]}"; + assertResult(result, () -> builder() + .startObject() + .array("bigint", bigInteger, bigInteger, bigInteger) + .endObject()); + } + + public void testBigDecimals() throws Exception { + assertResult("{'bigdecimal':null}", () -> builder().startObject().field("bigdecimal", (BigInteger) null).endObject()); + assertResult("{'bigdecimal':[]}", () -> builder().startObject().array("bigdecimal", new BigInteger[]{}).endObject()); + + BigDecimal bigDecimal = new BigDecimal("234.43"); + String result = "{'bigdecimal':" + bigDecimal.toString() + "}"; + assertResult(result, () -> builder().startObject().field("bigdecimal", bigDecimal).endObject()); + + result = "{'bigdecimal':[" + bigDecimal.toString() + "," + bigDecimal.toString() + "," + bigDecimal.toString() +"]}"; + assertResult(result, () -> builder() + .startObject() + .array("bigdecimal", bigDecimal, bigDecimal, bigDecimal) + .endObject()); + } + public void testStrings() throws IOException { assertResult("{'string':null}", () -> builder().startObject().field("string", (String) null).endObject()); assertResult("{'string':'value'}", () -> builder().startObject().field("string", "value").endObject()); diff --git a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java index c0b01eb5ec5..fa023882df5 100644 --- a/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java +++ b/server/src/test/java/org/elasticsearch/discovery/AbstractDisruptionTestCase.java @@ -87,6 +87,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { private boolean disableBeforeIndexDeletion; + @Override @Before public void setUp() throws Exception { super.setUp(); @@ -240,7 +241,6 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase { public ServiceDisruptionScheme addRandomDisruptionScheme() { // TODO: add partial partitions - NetworkDisruption p; final DisruptedLinks disruptedLinks; if (randomBoolean()) { disruptedLinks = TwoPartitions.random(random(), internalCluster().getNodeNames()); diff --git a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index fab38a2b73b..3b08eb6870e 100644 --- a/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -364,7 +364,7 @@ public class ClusterDisruptionIT extends AbstractDisruptionTestCase { public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Exception { // don't use DEFAULT settings (which can cause node disconnects on a slow CI machine) configureCluster(Settings.EMPTY, 3, null, 1); - final String masterNode = internalCluster().startMasterOnlyNode(); + internalCluster().startMasterOnlyNode(); final String node_1 = internalCluster().startDataOnlyNode(); logger.info("--> creating index [test] with one shard and on replica"); diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 0bf80e52398..0351a10dea3 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -103,7 +103,6 @@ public class MetaDataStateFormatTests extends ESTestCase { final long id = addDummyFiles("foo-", dirs); Format format = new Format("foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - int version = between(0, Integer.MAX_VALUE/2); format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); @@ -117,7 +116,6 @@ public class MetaDataStateFormatTests extends ESTestCase { DummyState read = format.read(NamedXContentRegistry.EMPTY, list[0]); assertThat(read, equalTo(state)); } - final int version2 = between(version, Integer.MAX_VALUE); DummyState state2 = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); format.write(state2, dirs); @@ -145,7 +143,6 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format("foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - int version = between(0, Integer.MAX_VALUE/2); format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); @@ -169,7 +166,6 @@ public class MetaDataStateFormatTests extends ESTestCase { final long id = addDummyFiles("foo-", dirs); Format format = new Format("foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - int version = between(0, Integer.MAX_VALUE/2); format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); diff --git a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java index 1f2526b2e28..2c75437ee35 100644 --- a/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -30,7 +30,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.index.Term; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.join.BitSetProducer; @@ -38,9 +37,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BitSet; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -89,7 +88,6 @@ public class BitSetFilterCacheTests extends ESTestCase { DirectoryReader reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); - IndexSearcher searcher = new IndexSearcher(reader); BitsetFilterCache cache = new BitsetFilterCache(INDEX_SETTINGS, new BitsetFilterCache.Listener() { @Override @@ -114,7 +112,6 @@ public class BitSetFilterCacheTests extends ESTestCase { reader.close(); reader = DirectoryReader.open(writer); reader = ElasticsearchDirectoryReader.wrap(reader, new ShardId("test", "_na_", 0)); - searcher = new IndexSearcher(reader); assertThat(matchCount(filter, reader), equalTo(3)); diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index ddb2b857486..c225b090816 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.codec; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; @@ -30,7 +31,6 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.store.Directory; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -95,7 +95,7 @@ public class CodecTests extends ESTestCase { MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER); MapperService service = new MapperService(settings, indexAnalyzers, xContentRegistry(), similarityService, mapperRegistry, () -> null); - return new CodecService(service, ESLoggerFactory.getLogger("test")); + return new CodecService(service, LogManager.getLogger("test")); } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index e77203b83fe..26c2453a271 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -19,42 +19,9 @@ package org.elasticsearch.index.engine; -import java.io.Closeable; -import java.io.IOException; -import java.io.UncheckedIOException; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Base64; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Set; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; -import java.util.function.Function; -import java.util.function.LongSupplier; -import java.util.function.Supplier; -import java.util.function.ToLongBiFunction; -import java.util.stream.Collectors; -import java.util.stream.LongStream; - import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -162,6 +129,40 @@ import org.elasticsearch.test.IndexSettingsModule; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; +import java.io.Closeable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Base64; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.Set; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.function.LongSupplier; +import java.util.function.Supplier; +import java.util.function.ToLongBiFunction; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + import static java.util.Collections.emptyMap; import static java.util.Collections.shuffle; import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY; @@ -2863,11 +2864,7 @@ public class InternalEngineTests extends EngineTestCase { IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm::get, tombstoneDocSupplier()); - try { - InternalEngine internalEngine = new InternalEngine(brokenConfig); - fail("translog belongs to a different engine"); - } catch (EngineCreationFailureException ex) { - } + expectThrows(EngineCreationFailureException.class, () -> new InternalEngine(brokenConfig)); engine = createEngine(store, primaryTranslogDir); // and recover again! assertVisibleCount(engine, numDocs, false); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index ad9d0c41494..3a185620f7b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -29,12 +29,12 @@ import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; @@ -121,7 +121,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { DirectoryReader reader = DirectoryReader.open(w); DateFieldType ft = new DateFieldType(); ft.setName("my_date"); - DateMathParser alternateFormat = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + DateMathParser alternateFormat = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); doTestIsFieldWithinQuery(ft, reader, null, null); doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java index af29edcef30..07a80a31deb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java @@ -35,9 +35,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; @@ -90,20 +93,26 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } public void testFieldCapabilities() { + List allFields = new ArrayList<>(ALL_FLAT_FIELDS); + allFields.addAll(ALL_OBJECT_FIELDS); FieldCapabilitiesResponse index1 = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("index1")).actionGet(); - assertFieldCaps(index1, ALL_FLAT_FIELDS); + assertFieldCaps(index1, allFields); FieldCapabilitiesResponse filtered = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("filtered")).actionGet(); - assertFieldCaps(filtered, FILTERED_FLAT_FIELDS); + List filteredFields = new ArrayList<>(FILTERED_FLAT_FIELDS); + filteredFields.addAll(ALL_OBJECT_FIELDS); + assertFieldCaps(filtered, filteredFields); //double check that submitting the filtered mappings to an unfiltered index leads to the same field_caps output //as the one coming from a filtered index with same mappings GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); ImmutableOpenMap filteredMapping = getMappingsResponse.getMappings().get("filtered"); assertAcked(client().admin().indices().prepareCreate("test").addMapping("_doc", filteredMapping.get("_doc").getSourceAsMap())); FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); - assertFieldCaps(test, FILTERED_FLAT_FIELDS); + // properties.value is an object field in the new mapping + filteredFields.add("properties.value"); + assertFieldCaps(test, filteredFields); } - private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesResponse, String[] expectedFields) { + private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesResponse, Collection expectedFields) { Map> responseMap = fieldCapabilitiesResponse.get(); Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); for (String field : builtInMetaDataFields) { @@ -118,7 +127,7 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } private static void assertFieldMappings(Map> mappings, - String[] expectedFields) { + Collection expectedFields) { assertEquals(1, mappings.size()); Map fields = new HashMap<>(mappings.get("_doc")); Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); @@ -245,14 +254,18 @@ public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { } } - private static final String[] ALL_FLAT_FIELDS = new String[]{ + private static final Collection ALL_FLAT_FIELDS = Arrays.asList( "name.first", "name.last_visible", "birth", "age_visible", "address.street", "address.location", "address.area_visible", "properties.key_visible", "properties.key_visible.keyword", "properties.value", "properties.value.keyword_visible" - }; + ); - private static final String[] FILTERED_FLAT_FIELDS = new String[]{ - "name.last_visible", "age_visible", "address.area_visible", "properties.key_visible", "properties.value.keyword_visible" - }; + private static final Collection ALL_OBJECT_FIELDS = Arrays.asList( + "name", "address", "properties" + ); + + private static final Collection FILTERED_FLAT_FIELDS = Arrays.asList( + "name.last_visible", "age_visible", "address.area_visible", "properties.key_visible", "properties.value.keyword_visible" + ); private static final String TEST_ITEM = "{\n" + " \"_doc\": {\n" + diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java index 97a72d75e3a..d2971034fd7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperTests.java @@ -37,7 +37,7 @@ public class MapperTests extends ESTestCase { } public void testBuilderContextWithIndexSettingsAsNull() { - NullPointerException e = expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1))); + expectThrows(NullPointerException.class, () -> new Mapper.BuilderContext(null, new ContentPath(1))); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java index 0aa8565ea57..34e7081d51d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/RangeFieldQueryStringQueryBuilderTests.java @@ -31,8 +31,8 @@ import org.apache.lucene.search.Query; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 4736cbe4712..1211488e466 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -621,11 +621,6 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { } public void testIndexPrefixIndexTypes() throws IOException { - QueryShardContext queryShardContext = indexService.newQueryShardContext( - randomInt(20), null, () -> { - throw new UnsupportedOperationException(); - }, null); - { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") diff --git a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java index fe39345dadd..5e443ec41ed 100644 --- a/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/CommonTermsQueryBuilderTests.java @@ -179,7 +179,6 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase query.parse(Type.PHRASE, STRING_FIELD_NAME, "")); + query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); + expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, STRING_FIELD_NAME, "")); + } + + private static class MockGraphAnalyzer extends Analyzer { + final CannedBinaryTokenStream.BinaryToken[] tokens; + + private MockGraphAnalyzer(CannedBinaryTokenStream.BinaryToken[] tokens ) { + this.tokens = tokens; + } + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true); + return new TokenStreamComponents(tokenizer) { + @Override + public TokenStream getTokenStream() { + return new CannedBinaryTokenStream(tokens); + } + + @Override + protected void setReader(final Reader reader) { + } + }; + } + } + + /** + * Creates a graph token stream with 2 side paths at each position. + **/ + private static CannedBinaryTokenStream.BinaryToken[] createGiantGraph(int numPos) { + List tokens = new ArrayList<>(); + BytesRef term1 = new BytesRef("foo"); + BytesRef term2 = new BytesRef("bar"); + for (int i = 0; i < numPos;) { + if (i % 2 == 0) { + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); + i += 2; + } else { + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + i++; + } + } + return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); + } + + /** + * Creates a graph token stream with {@link BooleanQuery#getMaxClauseCount()} + * expansions at the last position. + **/ + private static CannedBinaryTokenStream.BinaryToken[] createGiantGraphMultiTerms() { + List tokens = new ArrayList<>(); + BytesRef term1 = new BytesRef("foo"); + BytesRef term2 = new BytesRef("bar"); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 2)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + tokens.add(new CannedBinaryTokenStream.BinaryToken(term2, 1, 1)); + for (int i = 0; i < BooleanQuery.getMaxClauseCount(); i++) { + tokens.add(new CannedBinaryTokenStream.BinaryToken(term1, 0, 1)); + } + return tokens.toArray(new CannedBinaryTokenStream.BinaryToken[0]); + } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 7c459737c77..a4bbd1989da 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.SpanMatchNoDocsQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MultiTermQuery; @@ -81,6 +82,9 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase((MultiTermQuery)multiTermQuery).getWrappedQuery())); + equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery) multiTermQuery).getWrappedQuery())); } public void testIllegalArgument() { @@ -154,6 +158,11 @@ public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase operations, int totalTranslogOps, - long maxSeenAutoIdTimestampOnPrimary) throws IOException { + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdates) throws IOException { opsSent.set(true); - return super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestampOnPrimary); + return super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdates); } }; }); @@ -560,7 +564,7 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC (indexShard, node) -> new RecoveryTarget(indexShard, node, recoveryListener, l -> {}) { @Override public long indexTranslogOperations(final List operations, final int totalTranslogOps, - final long maxAutoIdTimestamp) + final long maxAutoIdTimestamp, long maxSeqNoOfUpdates) throws IOException { // index a doc which is not part of the snapshot, but also does not complete on replica replicaEngineFactory.latchIndexers(1); @@ -588,7 +592,7 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC } catch (InterruptedException e) { throw new AssertionError(e); } - return super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp); + return super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates); } }); pendingDocActiveWithExtraDocIndexed.await(); @@ -718,11 +722,11 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC @Override public long indexTranslogOperations(List operations, int totalTranslogOps, - long maxAutoIdTimestamp) throws IOException { + long maxAutoIdTimestamp, long maxSeqNoOfUpdates) throws IOException { if (hasBlocked() == false) { blockIfNeeded(RecoveryState.Stage.TRANSLOG); } - return super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp); + return super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdates); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index e9f52d7c319..a0bf75ddb13 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -73,7 +73,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { final int sourceRefCount = open.getRefCount(); final AtomicInteger count = new AtomicInteger(); final AtomicInteger outerCount = new AtomicInteger(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, s -> {}, logger)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertEquals(1, wrap.reader().getRefCount()); ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { @@ -121,7 +121,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { } }; final ConcurrentHashMap cache = new ConcurrentHashMap<>(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, s -> {}, logger)) { try (Engine.Searcher wrap = wrapper.wrap(engineSearcher)) { ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { cache.remove(key); @@ -151,7 +151,7 @@ public class IndexSearcherWrapperTests extends ESTestCase { assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); IndexSearcherWrapper wrapper = new IndexSearcherWrapper(); - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { + try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher, logger)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertSame(wrap, engineSearcher); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index e8715f9e8ec..487ac7e0694 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -315,8 +315,8 @@ public class IndexShardTests extends IndexShardTestCase { // expected } try { - indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, null, - ThreadPool.Names.WRITE, ""); + indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, + randomNonNegativeLong(), null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected @@ -327,7 +327,7 @@ public class IndexShardTests extends IndexShardTestCase { IndexShard indexShard = newShard(false); expectThrows(IndexShardNotStartedException.class, () -> indexShard.acquireReplicaOperationPermit(indexShard.getPendingPrimaryTerm() + randomIntBetween(1, 100), - SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, "")); + SequenceNumbers.UNASSIGNED_SEQ_NO, randomNonNegativeLong(), null, ThreadPool.Names.WRITE, "")); closeShards(indexShard); } @@ -351,6 +351,7 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.acquireReplicaOperationPermit( indexShard.getPendingPrimaryTerm(), indexShard.getGlobalCheckpoint(), + indexShard.getMaxSeqNoOfUpdatesOrDeletes(), new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -484,7 +485,6 @@ public class IndexShardTests extends IndexShardTestCase { final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final int maxSeqNo = result.maxSeqNo; - final boolean gap = result.gap; // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); @@ -602,7 +602,7 @@ public class IndexShardTests extends IndexShardTestCase { if (Assertions.ENABLED && indexShard.routingEntry().isRelocationTarget() == false) { assertThat(expectThrows(AssertionError.class, () -> indexShard.acquireReplicaOperationPermit(primaryTerm, - indexShard.getGlobalCheckpoint(), new ActionListener() { + indexShard.getGlobalCheckpoint(), indexShard.getMaxSeqNoOfUpdatesOrDeletes(), new ActionListener() { @Override public void onResponse(Releasable releasable) { fail(); @@ -628,7 +628,8 @@ public class IndexShardTests extends IndexShardTestCase { private Releasable acquireReplicaOperationPermitBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { PlainActionFuture fut = new PlainActionFuture<>(); - indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), fut, ThreadPool.Names.WRITE, ""); + indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), + randomNonNegativeLong(), fut, ThreadPool.Names.WRITE, ""); return fut.get(); } @@ -712,8 +713,8 @@ public class IndexShardTests extends IndexShardTestCase { } }; - indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, onLockAcquired, - ThreadPool.Names.WRITE, ""); + indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, + randomNonNegativeLong(), onLockAcquired, ThreadPool.Names.WRITE, ""); assertFalse(onResponse.get()); assertTrue(onFailure.get()); @@ -785,6 +786,7 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.acquireReplicaOperationPermit( newPrimaryTerm, newGlobalCheckPoint, + randomNonNegativeLong(), listener, ThreadPool.Names.SAME, ""); } catch (Exception e) { @@ -836,6 +838,22 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(indexShard); } + public void testAcquireReplicaPermitAdvanceMaxSeqNoOfUpdates() throws Exception { + IndexShard replica = newStartedShard(false); + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); + long currentMaxSeqNoOfUpdates = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + replica.advanceMaxSeqNoOfUpdatesOrDeletes(currentMaxSeqNoOfUpdates); + + long newMaxSeqNoOfUpdates = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, Long.MAX_VALUE); + PlainActionFuture fut = new PlainActionFuture<>(); + replica.acquireReplicaOperationPermit(replica.operationPrimaryTerm, replica.getGlobalCheckpoint(), + newMaxSeqNoOfUpdates, fut, ThreadPool.Names.WRITE, ""); + try (Releasable ignored = fut.actionGet()) { + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, newMaxSeqNoOfUpdates))); + } + closeShards(replica); + } + public void testGlobalCheckpointSync() throws IOException { // create the primary shard with a callback that sets a boolean when the global checkpoint sync is invoked final ShardId shardId = new ShardId("index", "_na_", 0); @@ -906,11 +924,14 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final long globalCheckpoint = randomLongBetween(SequenceNumbers.UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long currentMaxSeqNoOfUpdates = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + final long maxSeqNoOfUpdatesOrDeletes = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo); final Set docsBeforeRollback = getShardDocUIDs(indexShard); final CountDownLatch latch = new CountDownLatch(1); indexShard.acquireReplicaOperationPermit( indexShard.getPendingPrimaryTerm() + 1, globalCheckpoint, + maxSeqNoOfUpdatesOrDeletes, new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -926,6 +947,9 @@ public class IndexShardTests extends IndexShardTestCase { ThreadPool.Names.SAME, ""); latch.await(); + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Collections.max( + Arrays.asList(maxSeqNoOfUpdatesOrDeletes, globalCheckpoint, globalCheckpointOnReplica)) + )); final ShardRouting newRouting = indexShard.routingEntry().moveActiveReplicaToPrimary(); final CountDownLatch resyncLatch = new CountDownLatch(1); @@ -941,7 +965,9 @@ public class IndexShardTests extends IndexShardTestCase { assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); assertThat(getShardDocUIDs(indexShard), equalTo(docsBeforeRollback)); - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNo)); + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Collections.max( + Arrays.asList(currentMaxSeqNoOfUpdates, maxSeqNoOfUpdatesOrDeletes, globalCheckpoint, globalCheckpointOnReplica)) + )); closeShard(indexShard, false); } @@ -961,9 +987,11 @@ public class IndexShardTests extends IndexShardTestCase { final boolean shouldRollback = Math.max(globalCheckpoint, globalCheckpointOnReplica) < indexShard.seqNoStats().getMaxSeqNo() && indexShard.seqNoStats().getMaxSeqNo() != SequenceNumbers.NO_OPS_PERFORMED; final Engine beforeRollbackEngine = indexShard.getEngine(); + final long newMaxSeqNoOfUpdates = randomLongBetween(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), Long.MAX_VALUE); indexShard.acquireReplicaOperationPermit( indexShard.pendingPrimaryTerm + 1, globalCheckpoint, + newMaxSeqNoOfUpdates, new ActionListener() { @Override public void onResponse(final Releasable releasable) { @@ -990,6 +1018,7 @@ public class IndexShardTests extends IndexShardTestCase { } else { assertThat(indexShard.getEngine(), sameInstance(beforeRollbackEngine)); } + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(newMaxSeqNoOfUpdates)); // ensure that after the local checkpoint throw back and indexing again, the local checkpoint advances final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(indexShard.getLocalCheckpoint())); assertThat(indexShard.getLocalCheckpoint(), equalTo((long) result.localCheckpoint)); @@ -1016,6 +1045,7 @@ public class IndexShardTests extends IndexShardTestCase { indexShard.acquireReplicaOperationPermit( primaryTerm + increment, indexShard.getGlobalCheckpoint(), + randomNonNegativeLong(), new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -1628,6 +1658,7 @@ public class IndexShardTests extends IndexShardTestCase { * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); + shard.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, @@ -1748,7 +1779,7 @@ public class IndexShardTests extends IndexShardTestCase { public void testRecoverFromStoreWithNoOps() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); - Engine.IndexResult test = indexDoc(shard, "_doc", "1"); + indexDoc(shard, "_doc", "1"); // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); @@ -2193,9 +2224,10 @@ public class IndexShardTests extends IndexShardTestCase { new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override - public long indexTranslogOperations(List operations, int totalTranslogOps, - long maxSeenAutoIdTimestamp) throws IOException { - final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps, maxSeenAutoIdTimestamp); + public long indexTranslogOperations(List operations, int totalTranslogOps, long maxSeenAutoIdTimestamp, + long maxSeqNoOfUpdatesOrDeletes) throws IOException { + final long localCheckpoint = super.indexTranslogOperations( + operations, totalTranslogOps, maxSeenAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); assertFalse(replica.isSyncNeeded()); return localCheckpoint; } @@ -2302,8 +2334,9 @@ public class IndexShardTests extends IndexShardTestCase { }) { @Override public long indexTranslogOperations(List operations, int totalTranslogOps, - long maxAutoIdTimestamp) throws IOException { - final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp); + long maxAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException { + final long localCheckpoint = super.indexTranslogOperations( + operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); // Shard should now be active since we did recover: assertTrue(replica.isActive()); return localCheckpoint; @@ -2350,8 +2383,9 @@ public class IndexShardTests extends IndexShardTestCase { @Override public long indexTranslogOperations(List operations, int totalTranslogOps, - long maxAutoIdTimestamp) throws IOException { - final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps, maxAutoIdTimestamp); + long maxAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) throws IOException { + final long localCheckpoint = super.indexTranslogOperations( + operations, totalTranslogOps, maxAutoIdTimestamp, maxSeqNoOfUpdatesOrDeletes); assertListenerCalled.accept(replica); return localCheckpoint; } @@ -2842,12 +2876,10 @@ public class IndexShardTests extends IndexShardTestCase { class Result { private final int localCheckpoint; private final int maxSeqNo; - private final boolean gap; - Result(final int localCheckpoint, final int maxSeqNo, final boolean gap) { + Result(final int localCheckpoint, final int maxSeqNo) { this.localCheckpoint = localCheckpoint; this.maxSeqNo = maxSeqNo; - this.gap = gap; } } @@ -2886,7 +2918,7 @@ public class IndexShardTests extends IndexShardTestCase { } assert localCheckpoint == indexShard.getLocalCheckpoint(); assert !gap || (localCheckpoint != max); - return new Result(localCheckpoint, max, gap); + return new Result(localCheckpoint, max); } /** A dummy repository for testing which just needs restore overridden */ @@ -3434,6 +3466,7 @@ public class IndexShardTests extends IndexShardTestCase { assertThat(getShardDocUIDs(shard), equalTo(docBelowGlobalCheckpoint)); assertThat(shard.seqNoStats().getMaxSeqNo(), equalTo(globalCheckpoint)); assertThat(shard.translogStats().estimatedNumberOfOperations(), equalTo(translogStats.estimatedNumberOfOperations())); + assertThat(shard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(globalCheckpoint)); closeShard(shard, false); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index cbc08b19e8a..25f6bb75cc8 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineConfig; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; -import org.elasticsearch.index.fieldvisitor.SingleFieldsVisitor; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; @@ -324,9 +323,9 @@ public class RefreshListenersTests extends ESTestCase { try (Engine.GetResult getResult = engine.get(get, engine::acquireSearcher)) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - SingleFieldsVisitor visitor = new SingleFieldsVisitor("test"); - getResult.docIdAndVersion().reader.document(getResult.docIdAndVersion().docId, visitor); - assertEquals(Arrays.asList(testFieldValue), visitor.fields().get("test")); + org.apache.lucene.document.Document document = + getResult.docIdAndVersion().reader.document(getResult.docIdAndVersion().docId); + assertEquals(new String[] {testFieldValue}, document.getValues("test")); } } catch (Exception t) { throw new RuntimeException("failure on the [" + iteration + "] iteration of thread [" + threadId + "]", t); diff --git a/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java b/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java index 9a889801c6c..7d63286c44e 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/StoreRecoveryTests.java @@ -39,12 +39,12 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.engine.InternalEngine; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -118,7 +118,7 @@ public class StoreRecoveryTests extends ESTestCase { final Map userData = segmentCommitInfos.getUserData(); assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(maxSeqNo))); - assertThat(userData.get(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); + assertThat(userData.get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); for (SegmentCommitInfo info : segmentCommitInfos) { // check that we didn't merge assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush"); @@ -142,7 +142,6 @@ public class StoreRecoveryTests extends ESTestCase { } else { indexSort = null; } - int id = 0; IndexWriterConfig iwc = newIndexWriterConfig() .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); @@ -179,7 +178,7 @@ public class StoreRecoveryTests extends ESTestCase { final Map userData = segmentCommitInfos.getUserData(); assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(maxSeqNo))); - assertThat(userData.get(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); + assertThat(userData.get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); for (SegmentCommitInfo info : segmentCommitInfos) { // check that we didn't merge assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush"); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 584ce9b0642..6546e6ebc8c 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -364,10 +364,8 @@ public class StoreTests extends ESTestCase { } - final long luceneChecksum; try (IndexInput indexInput = dir.openInput("lucene_checksum.bin", IOContext.DEFAULT)) { assertEquals(luceneFileLength, indexInput.length()); - luceneChecksum = CodecUtil.retrieveChecksum(indexInput); } dir.close(); @@ -663,7 +661,6 @@ public class StoreTests extends ESTestCase { if (randomBoolean()) { store.cleanupAndVerify("test", firstMeta); String[] strings = store.directory().listAll(); - int numChecksums = 0; int numNotFound = 0; for (String file : strings) { if (file.startsWith("extra")) { @@ -679,7 +676,6 @@ public class StoreTests extends ESTestCase { } else { store.cleanupAndVerify("test", secondMeta); String[] strings = store.directory().listAll(); - int numChecksums = 0; int numNotFound = 0; for (String file : strings) { if (file.startsWith("extra")) { diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 45bf7a700aa..9d18845a05e 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -639,12 +639,8 @@ public class TranslogTests extends ESTestCase { assertTrue(Files.exists(translogDir.resolve(Translog.getFilename(1)))); translog.add(new Translog.Index("test", "1", 0, primaryTerm.get(), new byte[]{1})); translog.close(); - try { - Translog.Snapshot snapshot = translog.newSnapshot(); - fail("translog is closed"); - } catch (AlreadyClosedException ex) { - assertEquals(ex.getMessage(), "translog is already closed"); - } + AlreadyClosedException ex = expectThrows(AlreadyClosedException.class, () -> translog.newSnapshot()); + assertEquals(ex.getMessage(), "translog is already closed"); } public void testSnapshotFromMinGen() throws Exception { @@ -845,7 +841,7 @@ public class TranslogTests extends ESTestCase { try (Translog translog = openTranslog(config, uuid)) { try (Translog.Snapshot snapshot = translog.newSnapshot()) { - for (Location loc : locations) { + for (int i = 0; i < locations.size(); i++) { snapshot.next(); } } @@ -871,7 +867,7 @@ public class TranslogTests extends ESTestCase { AtomicInteger truncations = new AtomicInteger(0); try (Translog.Snapshot snap = translog.newSnapshot()) { - for (Translog.Location location : locations) { + for (int i = 0; i < locations.size(); i++) { try { assertNotNull(snap.next()); } catch (EOFException e) { @@ -2378,6 +2374,7 @@ public class TranslogTests extends ESTestCase { } + @Override public int write(ByteBuffer src) throws IOException { if (fail.fail()) { if (partialWrite) { @@ -2486,14 +2483,9 @@ public class TranslogTests extends ESTestCase { // don't copy the new file Files.createFile(config.getTranslogPath().resolve("translog-" + (read.generation + 1) + ".tlog")); - try { - Translog tlog = new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); - fail("file already exists?"); - } catch (TranslogException ex) { - // all is well - assertEquals(ex.getMessage(), "failed to create new translog file"); - assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); - } + TranslogException ex = expectThrows(TranslogException.class, () -> new Translog(config, translog.getTranslogUUID(), translog.getDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)); + assertEquals(ex.getMessage(), "failed to create new translog file"); + assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); } public void testRecoverWithUnbackedNextGenAndFutureFile() throws IOException { @@ -2521,14 +2513,10 @@ public class TranslogTests extends ESTestCase { tlog.add(new Translog.Index("test", "" + 1, 1, primaryTerm.get(), Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } - try { - Translog tlog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get); - fail("file already exists?"); - } catch (TranslogException ex) { - // all is well - assertEquals(ex.getMessage(), "failed to create new translog file"); - assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); - } + TranslogException ex = expectThrows(TranslogException.class, + () -> new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)); + assertEquals(ex.getMessage(), "failed to create new translog file"); + assertEquals(ex.getCause().getClass(), FileAlreadyExistsException.class); } /** diff --git a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index 01d7dc2a535..e5a23b155e8 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -295,7 +295,6 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { .put("indices.memory.index_buffer_size", "4mb").build()); IndexShard shard0 = test.getShard(0); IndexShard shard1 = test.getShard(1); - IndexShard shard2 = test.getShard(2); controller.simulateIndexing(shard0); controller.simulateIndexing(shard0); controller.simulateIndexing(shard0); diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index c68e4870aae..e3c15ceda1d 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -324,7 +324,6 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC * Mock for {@link IndexShard} */ protected class MockIndexShard implements IndicesClusterStateService.Shard { - private volatile long clusterStateVersion; private volatile ShardRouting shardRouting; private volatile RecoveryState recoveryState; private volatile Set inSyncAllocationIds; @@ -372,7 +371,6 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC this.shardRouting = shardRouting; if (shardRouting.primary()) { term = newPrimaryTerm; - this.clusterStateVersion = applyingClusterStateVersion; this.inSyncAllocationIds = inSyncAllocationIds; this.routingTable = routingTable; } diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index af9bf9910ec..2e54490ed78 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -74,7 +74,6 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); int recCount = randomIntBetween(200, 600); - int numberOfTypes = randomIntBetween(1, 5); List indexRequests = new ArrayList<>(); for (int rec = 0; rec < recCount; rec++) { String type = "type"; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 6a6970675eb..324b32fa6a9 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -57,7 +57,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.test.store.MockFSDirectoryService; import org.elasticsearch.test.store.MockFSIndexStore; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; @@ -550,7 +549,6 @@ public class IndexRecoveryIT extends ESIntegTestCase { final Settings nodeSettings = Settings.builder() .put(RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), "100ms") .put(RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(), "1s") - .put(MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING.getKey(), false) // restarted recoveries will delete temp files and write them again .build(); // start a master node internalCluster().startNode(nodeSettings); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 9b17962f91b..7791e51445a 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -207,11 +207,12 @@ public class RecoverySourceHandlerTests extends ESTestCase { public Translog.Operation next() throws IOException { return operations.get(counter++); } - }, randomNonNegativeLong()); + }, randomNonNegativeLong(), randomNonNegativeLong()); final int expectedOps = (int) (endingSeqNo - startingSeqNo + 1); assertThat(result.totalOperations, equalTo(expectedOps)); final ArgumentCaptor shippedOpsCaptor = ArgumentCaptor.forClass(List.class); - verify(recoveryTarget).indexTranslogOperations(shippedOpsCaptor.capture(), ArgumentCaptor.forClass(Integer.class).capture(), ArgumentCaptor.forClass(Long.class).capture()); + verify(recoveryTarget).indexTranslogOperations(shippedOpsCaptor.capture(), ArgumentCaptor.forClass(Integer.class).capture(), + ArgumentCaptor.forClass(Long.class).capture(), ArgumentCaptor.forClass(Long.class).capture()); List shippedOps = new ArrayList<>(); for (List list: shippedOpsCaptor.getAllValues()) { shippedOps.addAll(list); @@ -249,7 +250,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { } while (op != null && opsToSkip.contains(op)); return op; } - }, randomNonNegativeLong())); + }, randomNonNegativeLong(), randomNonNegativeLong())); } } @@ -421,7 +422,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { @Override long phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot, - long maxSeenAutoIdTimestamp) { + long maxSeenAutoIdTimestamp, long maxSeqNoOfUpdatesOrDeletes) { phase2Called.set(true); return SequenceNumbers.UNASSIGNED_SEQ_NO; } diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 45535e19672..2a53c79448d 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -127,6 +127,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 + orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); getTranslog(orgReplica).rollGeneration(); // isolate the delete in it's own generation // index #0 @@ -190,6 +191,7 @@ public class RecoveryTests extends ESIndexLevelReplicationTestCase { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 + orgReplica.advanceMaxSeqNoOfUpdatesOrDeletes(1); // manually advance msu for this delete orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); orgReplica.flush(new FlushRequest().force(true)); // isolate delete#1 in its own translog generation and lucene segment // index #0 diff --git a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 7e99ccbbe61..f9028a51a3c 100644 --- a/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -94,7 +94,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } public void testIndexCleanup() throws Exception { - final String masterNode = internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); + internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false)); final String node_1 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); final String node_2 = internalCluster().startNode(Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false)); logger.info("--> creating index [test] with one shard and on replica"); @@ -325,7 +325,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } public void testShardActiveElsewhereDoesNotDeleteAnother() throws Exception { - final String masterNode = internalCluster().startMasterOnlyNode(); + internalCluster().startMasterOnlyNode(); final List nodes = internalCluster().startDataOnlyNodes(4); final String node1 = nodes.get(0); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java index e3f52f35b79..140c09a93fd 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -812,6 +812,45 @@ public class IngestServiceTests extends ESTestCase { assertThat(ingestService.stats().getStatsPerPipeline(), not(hasKey("_id2"))); } + public void testExecuteWithDrop() { + Map factories = new HashMap<>(); + factories.put("drop", new DropProcessor.Factory()); + factories.put("mock", (processorFactories, tag, config) -> new Processor() { + @Override + public IngestDocument execute(final IngestDocument ingestDocument) { + throw new AssertionError("Document should have been dropped but reached this processor"); + } + + @Override + public String getType() { + return null; + } + + @Override + public String getTag() { + return null; + } + }); + IngestService ingestService = createWithProcessors(factories); + PutPipelineRequest putRequest = new PutPipelineRequest("_id", + new BytesArray("{\"processors\": [{\"drop\" : {}}, {\"mock\" : {}}]}"), XContentType.JSON); + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")).build(); // Start empty + ClusterState previousClusterState = clusterState; + clusterState = IngestService.innerPut(putRequest, clusterState); + ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState)); + final IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(emptyMap()).setPipeline("_id"); + @SuppressWarnings("unchecked") + final BiConsumer failureHandler = mock(BiConsumer.class); + @SuppressWarnings("unchecked") + final Consumer completionHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") + final Consumer dropHandler = mock(Consumer.class); + ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, dropHandler); + verify(failureHandler, never()).accept(any(), any()); + verify(completionHandler, times(1)).accept(null); + verify(dropHandler, times(1)).accept(indexRequest); + } + private IngestDocument eqIndexTypeId(final Map source) { return argThat(new IngestDocumentMatcher("_index", "_type", "_id", source)); } diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java index 906ecf23205..50bcf594926 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksNodeServiceTests.java @@ -31,16 +31,16 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; +import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams; +import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData.PersistentTask; -import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestParams; -import org.elasticsearch.persistent.TestPersistentTasksPlugin.TestPersistentTasksExecutor; import org.junit.After; import org.junit.Before; @@ -334,13 +334,11 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { private final PersistentTaskParams params; private final AllocatedPersistentTask task; private final PersistentTaskState state; - private final PersistentTasksExecutor holder; - Execution(PersistentTaskParams params, AllocatedPersistentTask task, PersistentTaskState state, PersistentTasksExecutor holder) { + Execution(PersistentTaskParams params, AllocatedPersistentTask task, PersistentTaskState state) { this.params = params; this.task = task; this.state = state; - this.holder = holder; } } @@ -356,7 +354,7 @@ public class PersistentTasksNodeServiceTests extends ESTestCase { final PersistentTaskState state, final AllocatedPersistentTask task, final PersistentTasksExecutor executor) { - executions.add(new Execution(params, task, state, executor)); + executions.add(new Execution(params, task, state)); } public Execution get(int i) { diff --git a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 50035e1027b..6624d4eb8de 100644 --- a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -148,7 +148,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { } internalCluster().restartRandomDataNode(); ensureGreen(); - ClusterState afterState = client().admin().cluster().prepareState().get().getState(); + client().admin().cluster().prepareState().get().getState(); recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java index 05c9746aa49..28537022e3f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java @@ -54,7 +54,6 @@ public class RepositoriesServiceIT extends ESIntegTestCase { final Client client = client(); final RepositoriesService repositoriesService = cluster.getDataOrMasterNodeInstances(RepositoriesService.class).iterator().next(); - final Settings settings = cluster.getDefaultSettings(); final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index cbf55428971..d35a8b5d249 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -212,7 +212,6 @@ public class RestControllerTests extends ESTestCase { }; final RestController restController = new RestController(Settings.EMPTY, Collections.emptySet(), wrapper, null, circuitBreakerService, usageService); - final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); restController.dispatchRequest(new FakeRestRequest.Builder(xContentRegistry()).build(), null, null, Optional.of(handler)); assertTrue(wrapperCalled.get()); assertFalse(handlerCalled.get()); diff --git a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java index ebe8ae00ac0..e5e8bce6d6d 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestHttpResponseHeadersTests.java @@ -40,7 +40,6 @@ import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; -import static org.mockito.Mockito.mock; public class RestHttpResponseHeadersTests extends ESTestCase { @@ -114,7 +113,6 @@ public class RestHttpResponseHeadersTests extends ESTestCase { // Send the request and verify the response status code FakeRestChannel restChannel = new FakeRestChannel(restRequest, false, 1); - NodeClient client = mock(NodeClient.class); restController.dispatchRequest(restRequest, restChannel, new ThreadContext(Settings.EMPTY)); assertThat(restChannel.capturedResponse().status().getStatus(), is(405)); diff --git a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java index 79d5c67bc78..d628561e2c6 100644 --- a/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java +++ b/server/src/test/java/org/elasticsearch/script/StoredScriptSourceTests.java @@ -35,7 +35,6 @@ public class StoredScriptSourceTests extends AbstractSerializingTestCase testSearchCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), histogram -> {}, 2)); - exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), histogram -> {}, 2)); - exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), histogram -> {}, 100)); - exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)) .field(DATE_FIELD) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 40c3bfb500e..e1aa1dfce3f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -439,8 +439,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { } public void testBadSigmaAsSubAgg() throws Exception { - try { - SearchResponse response = client() + Exception ex = expectThrows(Exception.class, () -> client() .prepareSearch("idx") .addAggregation( terms("terms") @@ -451,21 +450,18 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase { .extendedBounds(minRandomValue, maxRandomValue) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum") - .sigma(-1.0))).execute().actionGet(); - fail("Illegal sigma was provided but no exception was thrown."); - } catch (Exception e) { - Throwable cause = ExceptionsHelper.unwrapCause(e); - if (cause == null) { - throw e; - } else if (cause instanceof SearchPhaseExecutionException) { - SearchPhaseExecutionException spee = (SearchPhaseExecutionException) e; - Throwable rootCause = spee.getRootCause(); - if (!(rootCause instanceof IllegalArgumentException)) { - throw e; - } - } else if (!(cause instanceof IllegalArgumentException)) { - throw e; + .sigma(-1.0))).execute().actionGet()); + Throwable cause = ExceptionsHelper.unwrapCause(ex); + if (cause == null) { + throw ex; + } else if (cause instanceof SearchPhaseExecutionException) { + SearchPhaseExecutionException spee = (SearchPhaseExecutionException) ex; + Throwable rootCause = spee.getRootCause(); + if (!(rootCause instanceof IllegalArgumentException)) { + throw ex; } + } else if (!(cause instanceof IllegalArgumentException)) { + throw ex; } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java index c3075da8271..4841c5e596a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java @@ -21,15 +21,26 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.filter.Filter; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; @@ -475,4 +486,56 @@ public class MaxBucketIT extends ESIntegTestCase { assertThat(maxBucketValue.value(), equalTo(maxTermsValue)); assertThat(maxBucketValue.keys(), equalTo(maxTermsKeys.toArray(new String[maxTermsKeys.size()]))); } + + /** + * https://github.com/elastic/elasticsearch/issues/33514 + * + * This bug manifests as the max_bucket agg ("peak") being added to the response twice, because + * the pipeline agg is run twice. This makes invalid JSON and breaks conversion to maps. + * The bug was caused by an UnmappedTerms being the chosen as the first reduction target. UnmappedTerms + * delegated reduction to the first non-unmapped agg, which would reduce and run pipeline aggs. But then + * execution returns to the UnmappedTerms and _it_ runs pipelines as well, doubling up on the values. + * + * Applies to any pipeline agg, not just max. + */ + public void testFieldIsntWrittenOutTwice() throws Exception { + // you need to add an additional index with no fields in order to trigger this (or potentially a shard) + // so that there is an UnmappedTerms in the list to reduce. + createIndex("foo_1"); + + XContentBuilder builder = jsonBuilder().startObject().startObject("properties") + .startObject("@timestamp").field("type", "date").endObject() + .startObject("license").startObject("properties") + .startObject("count").field("type", "long").endObject() + .startObject("partnumber").field("type", "text").startObject("fields").startObject("keyword") + .field("type", "keyword").field("ignore_above", 256) + .endObject().endObject().endObject() + .endObject().endObject().endObject().endObject(); + assertAcked(client().admin().indices().prepareCreate("foo_2") + .addMapping("doc", builder).get()); + + XContentBuilder docBuilder = jsonBuilder().startObject() + .startObject("license").field("partnumber", "foobar").field("count", 2).endObject() + .field("@timestamp", "2018-07-08T08:07:00.599Z") + .endObject(); + + client().prepareIndex("foo_2", "doc").setSource(docBuilder).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + + client().admin().indices().prepareRefresh(); + + TermsAggregationBuilder groupByLicenseAgg = AggregationBuilders.terms("group_by_license_partnumber") + .field("license.partnumber.keyword"); + MaxBucketPipelineAggregationBuilder peakPipelineAggBuilder = + PipelineAggregatorBuilders.maxBucket("peak", "licenses_per_day>total_licenses"); + SumAggregationBuilder sumAggBuilder = AggregationBuilders.sum("total_licenses").field("license.count"); + DateHistogramAggregationBuilder licensePerDayBuilder = + AggregationBuilders.dateHistogram("licenses_per_day").field("@timestamp").dateHistogramInterval(DateHistogramInterval.DAY); + licensePerDayBuilder.subAggregation(sumAggBuilder); + groupByLicenseAgg.subAggregation(licensePerDayBuilder); + groupByLicenseAgg.subAggregation(peakPipelineAggBuilder); + + SearchResponse response = client().prepareSearch("foo_*").setSize(0).addAggregation(groupByLicenseAgg).get(); + BytesReference bytes = XContentHelper.toXContent(response, XContentType.JSON, false); + XContentHelper.convertToMap(bytes, false, XContentType.JSON); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index d14f93b7a51..41bbf053ff1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -411,7 +412,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts","_count") .window(windowSize) @@ -459,7 +460,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -507,7 +508,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -555,7 +556,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -604,7 +605,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -708,7 +709,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "the_metric") .window(0) @@ -746,7 +747,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "_count") .window(-10) @@ -810,7 +811,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) @@ -831,7 +832,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(randomMetric("the_metric", VALUE_FIELD)) .subAggregation(movingAvg("movavg_counts", "the_metric") .window(windowSize) @@ -846,13 +847,13 @@ public class MovAvgIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/34046") public void testHoltWintersNotEnoughData() { - try { - SearchResponse response = client() - .prepareSearch("idx").setTypes("type") + Client client = client(); + expectThrows(SearchPhaseExecutionException.class, () -> client.prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(10) @@ -864,11 +865,7 @@ public class MovAvgIT extends ESIntegTestCase { .modelBuilder(new HoltWintersModel.HoltWintersModelBuilder() .alpha(alpha).beta(beta).gamma(gamma).period(20).seasonalityType(seasonalityType)) .gapPolicy(gapPolicy)) - ).execute().actionGet(); - } catch (SearchPhaseExecutionException e) { - // All good - } - + ).execute().actionGet()); } public void testTwoMovAvgsWithPredictions() { @@ -982,23 +979,19 @@ public class MovAvgIT extends ESIntegTestCase { } } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/34046") public void testBadModelParams() { - try { - SearchResponse response = client() + expectThrows(SearchPhaseExecutionException.class, () -> client() .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(10) .modelBuilder(randomModelBuilder(100)) .gapPolicy(gapPolicy)) - ).execute().actionGet(); - } catch (SearchPhaseExecutionException e) { - // All good - } - + ).execute().actionGet()); } public void testHoltWintersMinimization() { @@ -1006,7 +999,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(windowSize) @@ -1092,7 +1085,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) @@ -1146,7 +1139,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) @@ -1164,7 +1157,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) @@ -1194,7 +1187,7 @@ public class MovAvgIT extends ESIntegTestCase { .prepareSearch("idx").setTypes("type") .addAggregation( histogram("histo").field(INTERVAL_FIELD).interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .extendedBounds(0L, interval * (numBuckets - 1)) .subAggregation(metric) .subAggregation(movingAvg("movavg_counts", "_count") .window(numBuckets) diff --git a/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 0912236e018..20f73b5903d 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.fetch; - +import org.apache.logging.log4j.LogManager; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; @@ -30,7 +30,6 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.termvectors.TermVectorsService; @@ -146,7 +145,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { } hitField.getValues().add(tv); } catch (IOException e) { - ESLoggerFactory.getLogger(FetchSubPhasePluginIT.class.getName()).info("Swallowed exception", e); + LogManager.getLogger(FetchSubPhasePluginIT.class).info("Swallowed exception", e); } } } diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 8d2f7cd6993..ce6c00f359f 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.geo; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; @@ -42,7 +43,6 @@ import org.elasticsearch.common.geo.builders.LineStringBuilder; import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -471,8 +471,7 @@ public class GeoFilterIT extends ESIntegTestCase { return true; } catch (UnsupportedSpatialOperation e) { final SpatialOperation finalRelation = relation; - ESLoggerFactory - .getLogger(GeoFilterIT.class.getName()) + LogManager.getLogger(GeoFilterIT.class) .info(() -> new ParameterizedMessage("Unsupported spatial operation {}", finalRelation), e); return false; } diff --git a/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java index 1195893a28a..e3ae802baba 100644 --- a/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java +++ b/server/src/test/java/org/elasticsearch/search/lookup/LeafFieldsLookupTests.java @@ -31,7 +31,6 @@ import org.junit.Before; import java.util.Collections; import java.util.List; -import static org.mockito.AdditionalAnswers.returnsFirstArg; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.anyObject; @@ -48,7 +47,9 @@ public class LeafFieldsLookupTests extends ESTestCase { MappedFieldType fieldType = mock(MappedFieldType.class); when(fieldType.name()).thenReturn("field"); - when(fieldType.valueForDisplay(anyObject())).then(returnsFirstArg()); + // Add 10 when valueForDisplay is called so it is easy to be sure it *was* called + when(fieldType.valueForDisplay(anyObject())).then(invocation -> + (Double) invocation.getArguments()[0] + 10); MapperService mapperService = mock(MapperService.class); when(mapperService.fullName("field")).thenReturn(fieldType); @@ -77,7 +78,7 @@ public class LeafFieldsLookupTests extends ESTestCase { List values = fieldLookup.getValues(); assertNotNull(values); assertEquals(1, values.size()); - assertEquals(2.718, values.get(0)); + assertEquals(12.718, values.get(0)); } public void testLookupWithFieldAlias() { @@ -87,6 +88,6 @@ public class LeafFieldsLookupTests extends ESTestCase { List values = fieldLookup.getValues(); assertNotNull(values); assertEquals(1, values.size()); - assertEquals(2.718, values.get(0)); + assertEquals(12.718, values.get(0)); } } diff --git a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index dedd0f03664..607133ea8f2 100644 --- a/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/test/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -213,7 +212,6 @@ public class MoreLikeThisIT extends ESIntegTestCase { } public void testMoreLikeThisIssue2197() throws Exception { - Client client = client(); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("bar") .startObject("properties") .endObject() diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index ba58a79953b..b7a9c8cb69a 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -82,7 +82,7 @@ public class QueryProfilerTests extends ESTestCase { } reader = w.getReader(); w.close(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), null); searcher = new ContextIndexSearcher(engineSearcher, IndexSearcher.getDefaultQueryCache(), MAYBE_CACHE_POLICY); } @@ -363,7 +363,7 @@ public class QueryProfilerTests extends ESTestCase { public void testApproximations() throws IOException { QueryProfiler profiler = new QueryProfiler(); - Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader)); + Engine.Searcher engineSearcher = new Engine.Searcher("test", new IndexSearcher(reader), logger); // disable query caching since we want to test approximations, which won't // be exposed on a cached entry ContextIndexSearcher searcher = new ContextIndexSearcher(engineSearcher, null, MAYBE_CACHE_POLICY); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 860c3e074f3..a861cc76655 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1744,7 +1744,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); // When we use long values, it means we have ms since epoch UTC based so we don't apply any transformation - Exception e = expectThrows(SearchPhaseExecutionException.class, () -> + expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test") .setQuery(QueryBuilders.rangeQuery("date").from(1388534400000L).to(1388537940999L).timeZone("+01:00")) .get()); diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 700b3949fac..24621a12d39 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -201,12 +201,10 @@ public class QueryRescorerBuilderTests extends ESTestCase { rescoreBuilder.setRescoreQueryWeight(randomFloat()); rescoreBuilder.setScoreMode(QueryRescoreMode.Max); - QueryRescoreContext rescoreContext = (QueryRescoreContext) rescoreBuilder.buildContext(mockShardContext); QueryRescorerBuilder rescoreRewritten = rescoreBuilder.rewrite(mockShardContext); assertEquals(rescoreRewritten.getQueryWeight(), rescoreBuilder.getQueryWeight(), 0.01f); assertEquals(rescoreRewritten.getRescoreQueryWeight(), rescoreBuilder.getRescoreQueryWeight(), 0.01f); assertEquals(rescoreRewritten.getScoreMode(), rescoreBuilder.getScoreMode()); - } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index 995a2c10fe5..98ed6a4a598 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -972,11 +972,8 @@ public class SuggestSearchIT extends ESIntegTestCase { assertSuggestionSize(searchSuggest, 0, 25480, "title"); // Just to prove that we've run through a ton of options suggest.size(1); - long start = System.currentTimeMillis(); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", "title", suggest); - long total = System.currentTimeMillis() - start; assertSuggestion(searchSuggest, 0, 0, "title", "united states house of representatives elections in washington 2006"); - // assertThat(total, lessThan(1000L)); // Takes many seconds without fix - just for debugging } public void testSuggestWithFieldAlias() throws Exception { @@ -1168,7 +1165,7 @@ public class SuggestSearchIT extends ESIntegTestCase { .endObject() .endObject()); - PhraseSuggestionBuilder in = suggest.collateQuery(filterStr); + suggest.collateQuery(filterStr); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Post filter error has been swallowed"); @@ -1186,7 +1183,6 @@ public class SuggestSearchIT extends ESIntegTestCase { .endObject()); - PhraseSuggestionBuilder phraseSuggestWithNoParams = suggest.collateQuery(collateWithParams); try { searchSuggest("united states house of representatives elections in washington 2006", numShards.numPrimaries, namedSuggestion); fail("Malformed query (lack of additional params) should fail"); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 88e6ce64666..f7423d3f55a 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -45,7 +45,6 @@ import static org.hamcrest.Matchers.instanceOf; public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTestCase { private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[] { CompletionSuggestionBuilder.CONTEXTS_FIELD.getPreferredName() }; - private static final Map> contextMap = new HashMap<>(); private static String categoryContextName; private static String geoQueryContextName; private static List> contextMappings = new ArrayList<>(); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 632a1ecbee1..8e907fe8a1a 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -2819,7 +2819,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas Predicate isRestorableIndex = index -> corruptedIndex.getName().equals(index) == false; - RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") + client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap") .setIndices(nbDocsPerIndex.keySet().stream().filter(isRestorableIndex).toArray(String[]::new)) .setRestoreGlobalState(randomBoolean()) .setWaitForCompletion(true) diff --git a/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index cec5f9b1be2..5f286a5ff0a 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -55,7 +55,7 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { } } logger.info("pre node threads are {}", preNodeStartThreadNames); - String node = internalCluster().startNode(); + internalCluster().startNode(); logger.info("do some indexing, flushing, optimize, and searches"); int numDocs = randomIntBetween(2, 100); IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs]; diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index ea281f7d9ae..c004ed9b3bc 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -61,7 +61,6 @@ public class UpdateThreadPoolSettingsTests extends ESThreadPoolTestCase { } public void testWriteThreadPoolsMaxSize() throws InterruptedException { - final String name = Names.WRITE; final int maxSize = 1 + EsExecutors.numberOfProcessors(Settings.EMPTY); final int tooBig = randomIntBetween(1 + maxSize, Integer.MAX_VALUE); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index f5d23c4f3f8..e77180508b6 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -834,6 +835,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } + @TestLogging("_root:DEBUG, org.elasticsearch.transport:TRACE") public void testCloseWhileConcurrentlyConnecting() throws IOException, InterruptedException, BrokenBarrierException { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 6e92e70e4ae..94ac7e963c1 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -62,7 +62,10 @@ import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.startsWith; public class RemoteClusterServiceTests extends ESTestCase { @@ -120,17 +123,19 @@ public class RemoteClusterServiceTests extends ESTestCase { public void testBuildRemoteClustersDynamicConfig() throws Exception { Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig( - Settings.builder().put("cluster.remote.foo.seeds", "192.168.0.1:8080") - .put("cluster.remote.bar.seeds", "[::1]:9090") - .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") - .put("cluster.remote.boom.proxy", "foo.bar.com:1234").build()); - assertEquals(3, map.size()); - assertTrue(map.containsKey("foo")); - assertTrue(map.containsKey("bar")); - assertTrue(map.containsKey("boom")); - assertEquals(1, map.get("foo").v2().size()); - assertEquals(1, map.get("bar").v2().size()); - assertEquals(1, map.get("boom").v2().size()); + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "[::1]:9090") + .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") + .put("cluster.remote.boom.proxy", "foo.bar.com:1234") + .put("search.remote.quux.seeds", "quux:9300") + .put("search.remote.quux.proxy", "quux-proxy:19300") + .build()); + assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux"))); + assertThat(map.get("foo").v2(), hasSize(1)); + assertThat(map.get("bar").v2(), hasSize(1)); + assertThat(map.get("boom").v2(), hasSize(1)); + assertThat(map.get("quux").v2(), hasSize(1)); DiscoveryNode foo = map.get("foo").v2().get(0).get(); assertEquals("", map.get("foo").v1()); @@ -150,6 +155,41 @@ public class RemoteClusterServiceTests extends ESTestCase { assertEquals(boom.getId(), "boom#boom-node1.internal:1000"); assertEquals("foo.bar.com:1234", map.get("boom").v1()); assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); + + DiscoveryNode quux = map.get("quux").v2().get(0).get(); + assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); + assertEquals("quux", quux.getHostName()); + assertEquals(quux.getId(), "quux#quux:9300"); + assertEquals("quux-proxy:19300", map.get("quux").v1()); + assertEquals(quux.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); + + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.quux.seeds", "search.remote.quux.proxy"}); + } + + public void testBuildRemoteClustersDynamicConfigWithDuplicate() { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("search.remote.foo.seeds", "192.168.0.1:8080") + .build())); + assertThat(e, hasToString(containsString("found duplicate remote cluster configurations for cluster alias [foo]"))); + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.foo.seeds"}); + } + + public void testBuildRemoteClustersDynamicConfigWithDuplicates() { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("search.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "192.168.0.1:8080") + .put("search.remote.bar.seeds", "192.168.0.1:8080") + .build())); + assertThat(e, hasToString(containsString("found duplicate remote cluster configurations for cluster aliases [bar,foo]"))); + assertSettingDeprecationsAndWarnings(new String[]{"search.remote.bar.seeds", "search.remote.foo.seeds"}); } public void testGroupClusterIndices() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index bc7ef0fd5d2..c6fb1f406cf 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -223,6 +223,7 @@ public class TcpTransportTests extends ESTestCase { StreamInput streamIn = reference.streamInput(); streamIn.skip(TcpHeader.MARKER_BYTES_SIZE); + @SuppressWarnings("unused") int len = streamIn.readInt(); long requestId = streamIn.readLong(); assertEquals(42, requestId); diff --git a/server/src/test/java/org/elasticsearch/update/UpdateIT.java b/server/src/test/java/org/elasticsearch/update/UpdateIT.java index 85ebf01ef28..70489e5c1de 100644 --- a/server/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/server/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -19,19 +19,6 @@ package org.elasticsearch.update; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; - import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; @@ -56,6 +43,19 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; @@ -586,15 +586,13 @@ public class UpdateIT extends ESIntegTestCase { final class UpdateThread extends Thread { final Map failedMap = new HashMap<>(); final int numberOfIds; - final int updatesPerId; final int maxUpdateRequests = numberOfIdsPerThread*numberOfUpdatesPerId; final int maxDeleteRequests = numberOfIdsPerThread*numberOfUpdatesPerId; private final Semaphore updateRequestsOutstanding = new Semaphore(maxUpdateRequests); private final Semaphore deleteRequestsOutstanding = new Semaphore(maxDeleteRequests); - UpdateThread(int numberOfIds, int updatesPerId) { + UpdateThread(int numberOfIds) { this.numberOfIds = numberOfIds; - this.updatesPerId = updatesPerId; } final class UpdateListener implements ActionListener { @@ -725,7 +723,7 @@ public class UpdateIT extends ESIntegTestCase { final List threads = new ArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { - UpdateThread ut = new UpdateThread(numberOfIdsPerThread, numberOfUpdatesPerId); + UpdateThread ut = new UpdateThread(numberOfIdsPerThread); ut.start(); threads.add(ut); } @@ -749,7 +747,7 @@ public class UpdateIT extends ESIntegTestCase { //This means that we add 1 to the expected versions and attempts //All the previous operations should be complete or failed at this point for (int i = 0; i < numberOfIdsPerThread; ++i) { - UpdateResponse ur = client().prepareUpdate("test", "type1", Integer.toString(i)) + client().prepareUpdate("test", "type1", Integer.toString(i)) .setScript(fieldIncScript) .setRetryOnConflict(Integer.MAX_VALUE) .setUpsert(jsonBuilder().startObject().field("field", 1).endObject()) diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 588118db4ae..e8d9dd0fc2c 100644 --- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -20,8 +20,8 @@ package org.elasticsearch.versioning; import org.apache.lucene.util.TestUtil; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; @@ -358,7 +358,6 @@ public class SimpleVersioningIT extends ESIntegTestCase { // zero-pad sequential logger.info("--> use zero-padded sequential ids"); ids = new IDSource() { - final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); final String zeroPad = String.format(Locale.ROOT, "%0" + TestUtil.nextInt(random, 4, 20) + "d", 0); int upto; @@ -374,7 +373,6 @@ public class SimpleVersioningIT extends ESIntegTestCase { logger.info("--> use random long ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); - int upto; @Override public String next() { @@ -387,8 +385,6 @@ public class SimpleVersioningIT extends ESIntegTestCase { logger.info("--> use zero-padded random long ids"); ids = new IDSource() { final int radix = TestUtil.nextInt(random, Character.MIN_RADIX, Character.MAX_RADIX); - final String zeroPad = String.format(Locale.ROOT, "%015d", 0); - int upto; @Override public String next() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 69a9f51ab69..1bbfb6fa73d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -503,7 +503,7 @@ public abstract class EngineTestCase extends ESTestCase { @Nullable final ToLongBiFunction seqNoForOperation, final EngineConfig config) { if (localCheckpointTrackerSupplier == null) { - return new InternalEngine(config) { + return new InternalTestEngine(config) { @Override IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { return (indexWriterFactory != null) ? @@ -519,7 +519,7 @@ public abstract class EngineTestCase extends ESTestCase { } }; } else { - return new InternalEngine(config, localCheckpointTrackerSupplier) { + return new InternalTestEngine(config, localCheckpointTrackerSupplier) { @Override IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { return (indexWriterFactory != null) ? diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/InternalTestEngine.java b/test/framework/src/main/java/org/elasticsearch/index/engine/InternalTestEngine.java new file mode 100644 index 00000000000..8c52d57aabc --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/InternalTestEngine.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.engine; + +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.SequenceNumbers; + +import java.io.IOException; +import java.util.Map; +import java.util.function.BiFunction; + +/** + * An alternative of {@link InternalEngine} that allows tweaking internals to reduce noise in engine tests. + */ +class InternalTestEngine extends InternalEngine { + private final Map idToMaxSeqNo = ConcurrentCollections.newConcurrentMap(); + + InternalTestEngine(EngineConfig engineConfig) { + super(engineConfig); + } + + InternalTestEngine(EngineConfig engineConfig, BiFunction localCheckpointTrackerSupplier) { + super(engineConfig, localCheckpointTrackerSupplier); + } + + @Override + public IndexResult index(Index index) throws IOException { + if (index.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + idToMaxSeqNo.compute(index.id(), (id, existing) -> { + if (existing == null) { + return index.seqNo(); + } else { + long maxSeqNo = Math.max(index.seqNo(), existing); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNo); + return maxSeqNo; + } + }); + } + return super.index(index); + } + + @Override + public DeleteResult delete(Delete delete) throws IOException { + if (delete.seqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + final long maxSeqNo = idToMaxSeqNo.compute(delete.id(), (id, existing) -> { + if (existing == null) { + return delete.seqNo(); + } else { + return Math.max(delete.seqNo(), existing); + } + }); + advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNo); + } + return super.delete(delete); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 42eab104d6a..8914bad5c41 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -21,9 +21,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.search.Query; import org.apache.lucene.search.similarities.BM25Similarity; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.QueryShardContext; @@ -37,8 +34,6 @@ import java.util.List; /** Base test case for subclasses of MappedFieldType */ public abstract class FieldTypeTestCase extends ESTestCase { - private static final Settings INDEX_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); - /** Abstraction for mutating a property of a MappedFieldType */ public abstract static class Modifier { /** The name of the property that is being modified. Used in test failure messages. */ diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index f590b99b481..9021fd1efbb 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -97,6 +97,7 @@ import java.util.stream.StreamSupport; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase { @@ -444,6 +445,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase for (IndexShard replica : replicas) { try { assertThat(replica.getMaxSeenAutoIdTimestamp(), equalTo(primary.getMaxSeenAutoIdTimestamp())); + assertThat(replica.getMaxSeqNoOfUpdatesOrDeletes(), greaterThanOrEqualTo(primary.getMaxSeqNoOfUpdatesOrDeletes())); } catch (AlreadyClosedException ignored) { } } @@ -563,6 +565,11 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase return replicationGroup.getPrimary().getGlobalCheckpoint(); } + @Override + public long maxSeqNoOfUpdatesOrDeletes() { + return replicationGroup.getPrimary().getMaxSeqNoOfUpdatesOrDeletes(); + } + @Override public org.elasticsearch.index.shard.ReplicationGroup getReplicationGroup() { return replicationGroup.primary.getReplicationGroup(); @@ -577,12 +584,14 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase final ShardRouting replicaRouting, final ReplicaRequest request, final long globalCheckpoint, + final long maxSeqNoOfUpdatesOrDeletes, final ActionListener listener) { IndexShard replica = replicationGroup.replicas.stream() .filter(s -> replicaRouting.isSameAllocation(s.routingEntry())).findFirst().get(); replica.acquireReplicaOperationPermit( replicationGroup.primary.getPendingPrimaryTerm(), globalCheckpoint, + maxSeqNoOfUpdatesOrDeletes, new ActionListener() { @Override public void onResponse(Releasable releasable) { @@ -659,7 +668,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase @Override protected void performOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { - executeShardBulkOnReplica(request, replica, getPrimaryShard().getPendingPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint()); + executeShardBulkOnReplica(request, replica, getPrimaryShard().getPendingPrimaryTerm(), + getPrimaryShard().getGlobalCheckpoint(), getPrimaryShard().getMaxSeqNoOfUpdatesOrDeletes()); } } @@ -690,10 +700,10 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase } private void executeShardBulkOnReplica(BulkShardRequest request, IndexShard replica, long operationPrimaryTerm, - long globalCheckpointOnPrimary) throws Exception { + long globalCheckpointOnPrimary, long maxSeqNoOfUpdatesOrDeletes) throws Exception { final PlainActionFuture permitAcquiredFuture = new PlainActionFuture<>(); - replica.acquireReplicaOperationPermit( - operationPrimaryTerm, globalCheckpointOnPrimary, permitAcquiredFuture, ThreadPool.Names.SAME, request); + replica.acquireReplicaOperationPermit(operationPrimaryTerm, globalCheckpointOnPrimary, + maxSeqNoOfUpdatesOrDeletes, permitAcquiredFuture, ThreadPool.Names.SAME, request); final Translog.Location location; try (Releasable ignored = permitAcquiredFuture.actionGet()) { location = TransportShardBulkAction.performOnReplica(request, replica); @@ -723,14 +733,16 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase } void indexOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica, long term) throws Exception { - executeShardBulkOnReplica(request, replica, term, group.primary.getGlobalCheckpoint()); + executeShardBulkOnReplica(request, replica, term, + group.primary.getGlobalCheckpoint(), group.primary.getMaxSeqNoOfUpdatesOrDeletes()); } /** * Executes the delete request on the given replica shard. */ void deleteOnReplica(BulkShardRequest request, ReplicationGroup group, IndexShard replica) throws Exception { - executeShardBulkOnReplica(request, replica, group.primary.getPendingPrimaryTerm(), group.primary.getGlobalCheckpoint()); + executeShardBulkOnReplica(request, replica, group.primary.getPendingPrimaryTerm(), + group.primary.getGlobalCheckpoint(), group.primary.getMaxSeqNoOfUpdatesOrDeletes()); } class GlobalCheckpointSync extends ReplicationAction< @@ -774,7 +786,8 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase @Override protected void performOnReplica(ResyncReplicationRequest request, IndexShard replica) throws Exception { - executeResyncOnReplica(replica, request, getPrimaryShard().getPendingPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint()); + executeResyncOnReplica(replica, request, getPrimaryShard().getPendingPrimaryTerm(), + getPrimaryShard().getGlobalCheckpoint(), getPrimaryShard().getMaxSeqNoOfUpdatesOrDeletes()); } } @@ -787,12 +800,12 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase return result; } - private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request, - long operationPrimaryTerm, long globalCheckpointOnPrimary) throws Exception { + private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request, long operationPrimaryTerm, + long globalCheckpointOnPrimary, long maxSeqNoOfUpdatesOrDeletes) throws Exception { final Translog.Location location; final PlainActionFuture acquirePermitFuture = new PlainActionFuture<>(); - replica.acquireReplicaOperationPermit( - operationPrimaryTerm, globalCheckpointOnPrimary, acquirePermitFuture, ThreadPool.Names.SAME, request); + replica.acquireReplicaOperationPermit(operationPrimaryTerm, globalCheckpointOnPrimary, + maxSeqNoOfUpdatesOrDeletes, acquirePermitFuture, ThreadPool.Names.SAME, request); try (Releasable ignored = acquirePermitFuture.actionGet()) { location = TransportResyncReplicationAction.performOnReplica(request, replica); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 370c29740b1..c9ef79720a2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -699,8 +699,9 @@ public abstract class IndexShardTestCase extends ESTestCase { shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getLocalCheckpoint()); } else { - result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; + shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates + result = shard.applyIndexOperationOnReplica(seqNo, 0, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { throw new TransportReplicationAction.RetryOnReplicaException(shard.shardId, "Mappings are not available on the replica yet, triggered update: " + result.getRequiredMappingUpdate()); @@ -720,7 +721,9 @@ public abstract class IndexShardTestCase extends ESTestCase { result = shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL); shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), shard.getEngine().getLocalCheckpoint()); } else { - result = shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id); + final long seqNo = shard.seqNoStats().getMaxSeqNo() + 1; + shard.advanceMaxSeqNoOfUpdatesOrDeletes(seqNo); // manually replicate max_seq_no_of_updates + result = shard.applyDeleteOperationOnReplica(seqNo, 0L, type, id); } return result; } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java index 9f12c369991..3e4e639dd01 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/ESBlobStoreContainerTestCase.java @@ -103,7 +103,7 @@ public abstract class ESBlobStoreContainerTestCase extends ESTestCase { int length = randomIntBetween(10, 100); String name = "bar-0-"; generatedBlobs.put(name, (long) length); - byte[] data = writeRandomBlob(container, name, length); + writeRandomBlob(container, name, length); Map blobs = container.listBlobs(); assertThat(blobs.size(), equalTo(numberOfFooBlobs + numberOfBarBlobs)); diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index be38ae95a32..1b609e81711 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -117,9 +117,9 @@ public class MockScriptEngine implements ScriptEngine { }; return context.factoryClazz.cast(factory); } else if (context.instanceClazz.equals(UpdateScript.class)) { - UpdateScript.Factory factory = parameters -> new UpdateScript(parameters) { + UpdateScript.Factory factory = (parameters, ctx) -> new UpdateScript(parameters, ctx) { @Override - public void execute(Map ctx) { + public void execute() { final Map vars = new HashMap<>(); vars.put("ctx", ctx); vars.put("params", parameters); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 17202839a65..6f9c46b4dc4 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -240,7 +240,7 @@ public abstract class AggregatorTestCase extends ESTestCase { } protected SearchContext createSearchContext(IndexSearcher indexSearcher, IndexSettings indexSettings) { - Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher); + Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher, logger); QueryCache queryCache = new DisabledQueryCache(indexSettings); QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index c8b8de13d6c..b2c562d43a6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -20,6 +20,7 @@ package org.elasticsearch.test; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.store.ChecksumIndexInput; @@ -27,7 +28,6 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; -import org.elasticsearch.common.logging.ESLoggerFactory; import java.io.IOException; import java.nio.ByteBuffer; @@ -45,7 +45,7 @@ import static org.junit.Assert.assertTrue; public final class CorruptionUtils { - private static Logger logger = ESLoggerFactory.getLogger("test"); + private static final Logger logger = LogManager.getLogger(CorruptionUtils.class); private CorruptionUtils() {} public static void corruptIndex(Random random, Path indexPath, boolean corruptSegments) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 922a6e0d276..ffa7c601184 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -408,7 +408,7 @@ public abstract class ESTestCase extends LuceneTestCase { } try { final List actualWarnings = threadContext.getResponseHeaders().get("Warning"); - assertNotNull(actualWarnings); + assertNotNull("no warnings, expected: " + Arrays.asList(expectedWarnings), actualWarnings); final Set actualWarningValues = actualWarnings.stream().map(DeprecationLogger::extractWarningValueFromWarningHeader).collect(Collectors.toSet()); for (String msg : expectedWarnings) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 1149c7b0941..facbc6ec84b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -86,36 +86,36 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedAvg; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedMin; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedSum; import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedSum; -import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; -import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; @@ -134,6 +134,7 @@ import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivativ import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -153,6 +154,16 @@ public abstract class InternalAggregationTestCase public static final int DEFAULT_MAX_BUCKETS = 100000; protected static final double TOLERANCE = 1e-10; + private static final Comparator INTERNAL_AGG_COMPARATOR = (agg1, agg2) -> { + if (agg1.isMapped() == agg2.isMapped()) { + return 0; + } else if (agg1.isMapped() && agg2.isMapped() == false) { + return -1; + } else { + return 1; + } + }; + private final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( new SearchModule(Settings.EMPTY, false, emptyList()).getNamedWriteables()); @@ -239,6 +250,8 @@ public abstract class InternalAggregationTestCase inputs.add(t); toReduce.add(t); } + // Sort aggs so that unmapped come last. This mimicks the behavior of InternalAggregations.reduce() + inputs.sort(INTERNAL_AGG_COMPARATOR); ScriptService mockScriptService = mockScriptService(); MockBigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); if (randomBoolean() && toReduce.size() > 1) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 69142dba638..794c7fef783 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.SysGlobals; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.logging.log4j.Logger; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ElasticsearchException; @@ -165,10 +166,6 @@ public final class InternalTestCluster extends TestCluster { private final Logger logger = Loggers.getLogger(getClass()); - - private static final AtomicInteger clusterOrdinal = new AtomicInteger(); - - public static final int DEFAULT_LOW_NUM_MASTER_NODES = 1; public static final int DEFAULT_HIGH_NUM_MASTER_NODES = 3; @@ -317,7 +314,6 @@ public final class InternalTestCluster extends TestCluster { this.mockPlugins = mockPlugins; - sharedNodesSeeds = new long[numSharedDedicatedMasterNodes + numSharedDataNodes + numSharedCoordOnlyNodes]; for (int i = 0; i < sharedNodesSeeds.length; i++) { sharedNodesSeeds[i] = random.nextLong(); @@ -2062,6 +2058,7 @@ public final class InternalTestCluster extends TestCluster { return null; } + @Override public synchronized Iterable getClients() { ensureOpen(); return () -> { diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java index cf2d69e36d5..0dbdaa55e33 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/AssertingSearcher.java @@ -38,10 +38,8 @@ class AssertingSearcher extends Engine.Searcher { private final Logger logger; private final AtomicBoolean closed = new AtomicBoolean(false); - AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher, - ShardId shardId, - Logger logger) { - super(wrappedSearcher.source(), indexSearcher); + AssertingSearcher(IndexSearcher indexSearcher, final Engine.Searcher wrappedSearcher, ShardId shardId, Logger logger) { + super(wrappedSearcher.source(), indexSearcher, s -> {throw new AssertionError();}, logger); // we only use the given index searcher here instead of the IS of the wrapped searcher. the IS might be a wrapped searcher // with a wrapped reader. this.wrappedSearcher = wrappedSearcher; @@ -52,11 +50,6 @@ class AssertingSearcher extends Engine.Searcher { "IndexReader#getRefCount() was [" + initialRefCount + "] expected a value > [0] - reader is already closed"; } - @Override - public String source() { - return wrappedSearcher.source(); - } - @Override public void close() { synchronized (lock) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java index e021df52c60..eb300ba302b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/LoggingListener.java @@ -20,7 +20,7 @@ package org.elasticsearch.test.junit.listeners; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.junit.annotations.TestLogging; import org.junit.runner.Description; @@ -78,7 +78,7 @@ public class LoggingListener extends RunListener { */ private static Logger resolveLogger(String loggerName) { if (loggerName.equalsIgnoreCase("_root")) { - return ESLoggerFactory.getRootLogger(); + return LogManager.getRootLogger(); } return Loggers.getLogger(loggerName); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java index a8cc9646129..85796494ba9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuite.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.rest.yaml.section; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; @@ -73,9 +74,10 @@ public class ClientYamlTestSuite { } public static ClientYamlTestSuite parse(String api, String suiteName, XContentParser parser) throws IOException { - parser.nextToken(); - assert parser.currentToken() == XContentParser.Token.START_OBJECT : "expected token to be START_OBJECT but was " - + parser.currentToken(); + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new XContentParseException(parser.getTokenLocation(), + "expected token to be START_OBJECT but was " + parser.currentToken()); + } ClientYamlTestSuite restTestSuite = new ClientYamlTestSuite(api, suiteName); diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 98eb0b10502..cdc33b38b86 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.store; import com.carrotsearch.randomizedtesting.SeedUtils; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.store.BaseDirectoryWrapper; @@ -62,10 +63,6 @@ public class MockFSDirectoryService extends FsDirectoryService { Setting.doubleSetting("index.store.mock.random.io_exception_rate_on_open", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); public static final Setting RANDOM_IO_EXCEPTION_RATE_SETTING = Setting.doubleSetting("index.store.mock.random.io_exception_rate", 0.0d, 0.0d, Property.IndexScope, Property.NodeScope); - public static final Setting RANDOM_PREVENT_DOUBLE_WRITE_SETTING = - Setting.boolSetting("index.store.mock.random.prevent_double_write", true, Property.IndexScope, Property.NodeScope); - public static final Setting RANDOM_NO_DELETE_OPEN_FILE_SETTING = - Setting.boolSetting("index.store.mock.random.no_delete_open_file", true, Property.IndexScope, Property.NodeScope); public static final Setting CRASH_INDEX_SETTING = Setting.boolSetting("index.store.mock.random.crash_index", true, Property.IndexScope, Property.NodeScope); @@ -74,8 +71,6 @@ public class MockFSDirectoryService extends FsDirectoryService { private final double randomIOExceptionRate; private final double randomIOExceptionRateOnOpen; private final MockDirectoryWrapper.Throttling throttle; - private final boolean preventDoubleWrite; - private final boolean noDeleteOpenFile; private final boolean crashIndex; @Inject @@ -87,9 +82,6 @@ public class MockFSDirectoryService extends FsDirectoryService { randomIOExceptionRate = RANDOM_IO_EXCEPTION_RATE_SETTING.get(indexSettings); randomIOExceptionRateOnOpen = RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING.get(indexSettings); - preventDoubleWrite = RANDOM_PREVENT_DOUBLE_WRITE_SETTING.get(indexSettings); - noDeleteOpenFile = RANDOM_NO_DELETE_OPEN_FILE_SETTING.exists(indexSettings) ? - RANDOM_NO_DELETE_OPEN_FILE_SETTING.get(indexSettings) : random.nextBoolean(); random.nextInt(shardId.getId() + 1); // some randomness per shard throttle = MockDirectoryWrapper.Throttling.NEVER; crashIndex = CRASH_INDEX_SETTING.get(indexSettings); diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java index 3b876f3c383..82ab9fc4121 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSIndexStore.java @@ -61,8 +61,6 @@ public class MockFSIndexStore extends IndexStore { return Arrays.asList(INDEX_CHECK_INDEX_ON_CLOSE_SETTING, MockFSDirectoryService.CRASH_INDEX_SETTING, MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_SETTING, - MockFSDirectoryService.RANDOM_PREVENT_DOUBLE_WRITE_SETTING, - MockFSDirectoryService.RANDOM_NO_DELETE_OPEN_FILE_SETTING, MockFSDirectoryService.RANDOM_IO_EXCEPTION_RATE_ON_OPEN_SETTING); } @@ -86,6 +84,7 @@ public class MockFSIndexStore extends IndexStore { super(indexSettings); } + @Override public DirectoryService newDirectoryService(ShardPath path) { return new MockFSDirectoryService(indexSettings, this, path); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java index 0845fc2546f..fe5dedb2d5a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/LoggingListenerTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.test.test; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -51,7 +51,7 @@ public class LoggingListenerTests extends ESTestCase { Logger xyzLogger = Loggers.getLogger("xyz"); Logger abcLogger = Loggers.getLogger("abc"); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); assertThat(xyzLogger.getLevel(), equalTo(level)); assertThat(abcLogger.getLevel(), equalTo(level)); @@ -88,7 +88,7 @@ public class LoggingListenerTests extends ESTestCase { Logger fooLogger = Loggers.getLogger("foo"); Logger fooBarLogger = Loggers.getLogger("foo.bar"); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); assertThat(xyzLogger.getLevel(), equalTo(level)); assertThat(abcLogger.getLevel(), equalTo(level)); @@ -128,7 +128,7 @@ public class LoggingListenerTests extends ESTestCase { Logger abcLogger = Loggers.getLogger("abc"); Logger xyzLogger = Loggers.getLogger("xyz"); - final Level level = ESLoggerFactory.getRootLogger().getLevel(); + final Level level = LogManager.getRootLogger().getLevel(); assertThat(xyzLogger.getLevel(), equalTo(level)); assertThat(abcLogger.getLevel(), equalTo(level)); diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index f027493b0ab..59d89024db5 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -92,7 +92,6 @@ buildRestTests.docs = fileTree(projectDir) { exclude 'build' // These file simply doesn't pass yet. We should figure out how to fix them. exclude 'en/watcher/reference/actions.asciidoc' - exclude 'en/rest-api/graph/explore.asciidoc' } Map setups = buildRestTests.setups diff --git a/x-pack/docs/en/security/configuring-es.asciidoc b/x-pack/docs/en/security/configuring-es.asciidoc index 7bdfbef08de..c79824287b4 100644 --- a/x-pack/docs/en/security/configuring-es.asciidoc +++ b/x-pack/docs/en/security/configuring-es.asciidoc @@ -85,7 +85,7 @@ user API. . Set up roles and users to control access to {es}. For example, to grant _John Doe_ full access to all indices that match the pattern `events*` and enable him to create visualizations and dashboards -for those indices in {kib}, you could create an `events_admin` role and +for those indices in {kib}, you could create an `events_admin` role and assign the role to a new `johndoe` user. + -- diff --git a/x-pack/docs/en/watcher/index.asciidoc b/x-pack/docs/en/watcher/index.asciidoc index 2be36389719..5f51c948ebf 100644 --- a/x-pack/docs/en/watcher/index.asciidoc +++ b/x-pack/docs/en/watcher/index.asciidoc @@ -1,5 +1,5 @@ [[xpack-alerting]] -= Alerting on Cluster and Index Events += Alerting on cluster and index events [partintro] -- diff --git a/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java b/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java index 31b458489d4..8743bc708f4 100644 --- a/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java +++ b/x-pack/license-tools/src/test/java/org/elasticsearch/license/licensor/TestUtils.java @@ -6,9 +6,9 @@ package org.elasticsearch.license.licensor; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -38,8 +38,7 @@ public class TestUtils { private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd"); - private static final DateMathParser dateMathParser = - new DateMathParser(formatDateTimeFormatter); + private static final DateMathParser dateMathParser = formatDateTimeFormatter.toDateMathParser(); private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer(); public static String dumpLicense(License license) throws Exception { diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 85913c26114..723d4cddc3a 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -152,6 +152,10 @@ public class FollowIndexSecurityIT extends ESRestTestCase { verifyDocuments(adminClient(), allowedIndex, 5); }); assertThat(indexExists(adminClient(), disallowedIndex), is(false)); + assertBusy(() -> { + verifyCcrMonitoring(allowedIndex, allowedIndex); + verifyAutoFollowMonitoring(); + }); // Cleanup by deleting auto follow pattern and unfollowing: request = new Request("DELETE", "/_ccr/auto_follow/leader_cluster"); @@ -309,4 +313,30 @@ public class FollowIndexSecurityIT extends ESRestTestCase { assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); } + private static void verifyAutoFollowMonitoring() throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); + Map response; + try { + response = toMap(adminClient().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int numberOfSuccessfulFollowIndices = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + + int foundNumberOfOperationsReceived = + (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); + numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); + } + + assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); + } + } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 514e9f261f7..eaacd8c5ae7 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -125,7 +125,10 @@ public class FollowIndexIT extends ESRestTestCase { ensureYellow("logs-20190101"); verifyDocuments("logs-20190101", 5); }); - assertBusy(() -> verifyCcrMonitoring("logs-20190101", "logs-20190101")); + assertBusy(() -> { + verifyCcrMonitoring("logs-20190101", "logs-20190101"); + verifyAutoFollowMonitoring(); + }); } private static void index(RestClient client, String index, String id, Object... fields) throws IOException { @@ -213,6 +216,32 @@ public class FollowIndexIT extends ESRestTestCase { assertThat(numberOfOperationsIndexed, greaterThanOrEqualTo(1)); } + private static void verifyAutoFollowMonitoring() throws IOException { + Request request = new Request("GET", "/.monitoring-*/_search"); + request.setJsonEntity("{\"query\": {\"term\": {\"type\": \"ccr_auto_follow_stats\"}}}"); + Map response; + try { + response = toMap(client().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } + + int numberOfSuccessfulFollowIndices = 0; + + List hits = (List) XContentMapValues.extractValue("hits.hits", response); + assertThat(hits.size(), greaterThanOrEqualTo(1)); + + for (int i = 0; i < hits.size(); i++) { + Map hit = (Map) hits.get(i); + + int foundNumberOfOperationsReceived = + (int) XContentMapValues.extractValue("_source.ccr_auto_follow_stats.number_of_successful_follow_indices", hit); + numberOfSuccessfulFollowIndices = Math.max(numberOfSuccessfulFollowIndices, foundNumberOfOperationsReceived); + } + + assertThat(numberOfSuccessfulFollowIndices, greaterThanOrEqualTo(1)); + } + private static Map toMap(Response response) throws IOException { return toMap(EntityUtils.toString(response.getEntity())); } diff --git a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml index f4cf79fb558..2bf7820c10f 100644 --- a/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml +++ b/x-pack/plugin/ccr/qa/rest/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -5,9 +5,26 @@ leader_cluster_alias: _local_ body: leader_index_patterns: ['logs-*'] + max_concurrent_read_batches: 2 - is_true: acknowledged + - do: + ccr.get_auto_follow_pattern: + leader_cluster_alias: _local_ + - match: { _local_.leader_index_patterns: ['logs-*'] } + - match: { _local_.max_concurrent_read_batches: 2 } + + - do: + ccr.get_auto_follow_pattern: {} + - match: { _local_.leader_index_patterns: ['logs-*'] } + - match: { _local_.max_concurrent_read_batches: 2 } + - do: ccr.delete_auto_follow_pattern: leader_cluster_alias: _local_ - is_true: acknowledged + + - do: + catch: missing + ccr.get_auto_follow_pattern: + leader_cluster_alias: _local_ diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 2efd86fca64..1ef66d56985 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -40,10 +40,13 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; +import org.elasticsearch.xpack.ccr.action.TransportGetAutoFollowPatternAction; +import org.elasticsearch.xpack.ccr.rest.RestGetAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.TransportAutoFollowStatsAction; import org.elasticsearch.xpack.ccr.rest.RestAutoFollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; @@ -163,7 +166,8 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E new ActionHandler<>(UnfollowIndexAction.INSTANCE, TransportUnfollowIndexAction.class), // auto-follow actions new ActionHandler<>(DeleteAutoFollowPatternAction.INSTANCE, TransportDeleteAutoFollowPatternAction.class), - new ActionHandler<>(PutAutoFollowPatternAction.INSTANCE, TransportPutAutoFollowPatternAction.class)); + new ActionHandler<>(PutAutoFollowPatternAction.INSTANCE, TransportPutAutoFollowPatternAction.class), + new ActionHandler<>(GetAutoFollowPatternAction.INSTANCE, TransportGetAutoFollowPatternAction.class)); } public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, @@ -184,7 +188,8 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E new RestUnfollowIndexAction(settings, restController), // auto-follow APIs new RestDeleteAutoFollowPatternAction(settings, restController), - new RestPutAutoFollowPatternAction(settings, restController)); + new RestPutAutoFollowPatternAction(settings, restController), + new RestGetAutoFollowPatternAction(settings, restController)); } public List getNamedWriteables() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 51aa44105ec..f855bc8e1c0 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; @@ -259,8 +260,8 @@ public class AutoFollowCoordinator implements ClusterStateApplier { if (leaderClusterState != null) { assert e == null; final List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); - final List leaderIndicesToFollow = - getLeaderIndicesToFollow(autoFollowPattern, leaderClusterState, followerClusterState, followedIndices); + final List leaderIndicesToFollow = getLeaderIndicesToFollow(clusterAlias, autoFollowPattern, + leaderClusterState, followerClusterState, followedIndices); if (leaderIndicesToFollow.isEmpty()) { finalise(slot, new AutoFollowResult(clusterAlias)); } else { @@ -337,12 +338,21 @@ public class AutoFollowCoordinator implements ClusterStateApplier { } } - static List getLeaderIndicesToFollow(AutoFollowPattern autoFollowPattern, + static List getLeaderIndicesToFollow(String clusterAlias, + AutoFollowPattern autoFollowPattern, ClusterState leaderClusterState, ClusterState followerClusterState, List followedIndexUUIDs) { List leaderIndicesToFollow = new ArrayList<>(); for (IndexMetaData leaderIndexMetaData : leaderClusterState.getMetaData()) { + // If an auto follow pattern has been set up for the local cluster then + // we should not automatically follow a leader index that is also a follow index because + // this can result into an index creation explosion. + if (leaderIndexMetaData.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY) != null && + clusterAlias.equals("_local_")) { + continue; + } + if (autoFollowPattern.match(leaderIndexMetaData.getIndex().getName())) { if (followedIndexUUIDs.contains(leaderIndexMetaData.getIndex().getUUID()) == false) { // TODO: iterate over the indices in the followerClusterState and check whether a IndexMetaData @@ -368,18 +378,19 @@ public class AutoFollowCoordinator implements ClusterStateApplier { Index indexToFollow) { return currentState -> { AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); - - Map> newFollowedIndexUUIDS = - new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); - newFollowedIndexUUIDS.get(clusterAlias).add(indexToFollow.getUUID()); - - ClusterState.Builder newState = ClusterState.builder(currentState); - AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(currentAutoFollowMetadata.getPatterns(), + Map> newFollowedIndexUUIDS = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + newFollowedIndexUUIDS.compute(clusterAlias, (key, existingUUIDs) -> { + assert existingUUIDs != null; + List newUUIDs = new ArrayList<>(existingUUIDs); + newUUIDs.add(indexToFollow.getUUID()); + return Collections.unmodifiableList(newUUIDs); + }); + final AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(currentAutoFollowMetadata.getPatterns(), newFollowedIndexUUIDS, currentAutoFollowMetadata.getHeaders()); - newState.metaData(MetaData.builder(currentState.getMetaData()) - .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata) - .build()); - return newState.build(); + return ClusterState.builder(currentState) + .metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata).build()) + .build(); }; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index 937ca0a0096..0cf700dee4e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -207,6 +207,12 @@ public class ShardChangesAction extends Action { return maxSeqNo; } + private long maxSeqNoOfUpdatesOrDeletes; + + public long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + private Translog.Operation[] operations; public Translog.Operation[] getOperations() { @@ -220,11 +226,13 @@ public class ShardChangesAction extends Action { final long mappingVersion, final long globalCheckpoint, final long maxSeqNo, + final long maxSeqNoOfUpdatesOrDeletes, final Translog.Operation[] operations) { this.mappingVersion = mappingVersion; this.globalCheckpoint = globalCheckpoint; this.maxSeqNo = maxSeqNo; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; this.operations = operations; } @@ -234,6 +242,7 @@ public class ShardChangesAction extends Action { mappingVersion = in.readVLong(); globalCheckpoint = in.readZLong(); maxSeqNo = in.readZLong(); + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); } @@ -243,6 +252,7 @@ public class ShardChangesAction extends Action { out.writeVLong(mappingVersion); out.writeZLong(globalCheckpoint); out.writeZLong(maxSeqNo); + out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeArray(Translog.Operation::writeOperation, operations); } @@ -254,12 +264,13 @@ public class ShardChangesAction extends Action { return mappingVersion == that.mappingVersion && globalCheckpoint == that.globalCheckpoint && maxSeqNo == that.maxSeqNo && + maxSeqNoOfUpdatesOrDeletes == that.maxSeqNoOfUpdatesOrDeletes && Arrays.equals(operations, that.operations); } @Override public int hashCode() { - return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, Arrays.hashCode(operations)); + return Objects.hash(mappingVersion, globalCheckpoint, maxSeqNo, maxSeqNoOfUpdatesOrDeletes, Arrays.hashCode(operations)); } } @@ -294,7 +305,9 @@ public class ShardChangesAction extends Action { request.getMaxOperationCount(), request.getExpectedHistoryUUID(), request.getMaxOperationSizeInBytes()); - return getResponse(mappingVersion, seqNoStats, operations); + // must capture after after snapshotting operations to ensure this MUS is at least the highest MUS of any of these operations. + final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + return getResponse(mappingVersion, seqNoStats, maxSeqNoOfUpdatesOrDeletes, operations); } @Override @@ -358,7 +371,8 @@ public class ShardChangesAction extends Action { final long mappingVersion = clusterService.state().metaData().index(shardId.getIndex()).getMappingVersion(); final SeqNoStats latestSeqNoStats = indexShard.seqNoStats(); - listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, EMPTY_OPERATIONS_ARRAY)); + final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); + listener.onResponse(getResponse(mappingVersion, latestSeqNoStats, maxSeqNoOfUpdatesOrDeletes, EMPTY_OPERATIONS_ARRAY)); } catch (final Exception caught) { caught.addSuppressed(e); listener.onFailure(caught); @@ -433,8 +447,9 @@ public class ShardChangesAction extends Action { return operations.toArray(EMPTY_OPERATIONS_ARRAY); } - static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, final Translog.Operation[] operations) { - return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), operations); + static Response getResponse(final long mappingVersion, final SeqNoStats seqNoStats, + final long maxSeqNoOfUpdates, final Translog.Operation[] operations) { + return new Response(mappingVersion, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), maxSeqNoOfUpdates, operations); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 777efdd654b..45daff9b036 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.transport.NetworkExceptionHelper; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -56,6 +57,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { private long leaderGlobalCheckpoint; private long leaderMaxSeqNo; + private long leaderMaxSeqNoOfUpdatesOrDeletes = SequenceNumbers.UNASSIGNED_SEQ_NO; private long lastRequestedSeqNo; private long followerGlobalCheckpoint = 0; private long followerMaxSeqNo = 0; @@ -201,7 +203,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { numConcurrentWrites++; LOGGER.trace("{}[{}] write [{}/{}] [{}]", params.getFollowShardId(), numConcurrentWrites, ops.get(0).seqNo(), ops.get(ops.size() - 1).seqNo(), ops.size()); - sendBulkShardOperationsRequest(ops); + sendBulkShardOperationsRequest(ops, leaderMaxSeqNoOfUpdatesOrDeletes, new AtomicInteger(0)); } } @@ -262,6 +264,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { onOperationsFetched(response.getOperations()); leaderGlobalCheckpoint = Math.max(leaderGlobalCheckpoint, response.getGlobalCheckpoint()); leaderMaxSeqNo = Math.max(leaderMaxSeqNo, response.getMaxSeqNo()); + leaderMaxSeqNoOfUpdatesOrDeletes = SequenceNumbers.max(leaderMaxSeqNoOfUpdatesOrDeletes, response.getMaxSeqNoOfUpdatesOrDeletes()); final long newFromSeqNo; if (response.getOperations().length == 0) { newFromSeqNo = from; @@ -291,13 +294,11 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { } } - private void sendBulkShardOperationsRequest(List operations) { - sendBulkShardOperationsRequest(operations, new AtomicInteger(0)); - } - - private void sendBulkShardOperationsRequest(List operations, AtomicInteger retryCounter) { + private void sendBulkShardOperationsRequest(List operations, long leaderMaxSeqNoOfUpdatesOrDeletes, + AtomicInteger retryCounter) { + assert leaderMaxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "mus is not replicated"; final long startTime = relativeTimeProvider.getAsLong(); - innerSendBulkShardOperationsRequest(operations, + innerSendBulkShardOperationsRequest(operations, leaderMaxSeqNoOfUpdatesOrDeletes, response -> { synchronized (ShardFollowNodeTask.this) { totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); @@ -311,7 +312,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); numberOfFailedBulkOperations++; } - handleFailure(e, retryCounter, () -> sendBulkShardOperationsRequest(operations, retryCounter)); + handleFailure(e, retryCounter, + () -> sendBulkShardOperationsRequest(operations, leaderMaxSeqNoOfUpdatesOrDeletes, retryCounter)); } ); } @@ -383,8 +385,8 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { // These methods are protected for testing purposes: protected abstract void innerUpdateMapping(LongConsumer handler, Consumer errorHandler); - protected abstract void innerSendBulkShardOperationsRequest( - List operations, Consumer handler, Consumer errorHandler); + protected abstract void innerSendBulkShardOperationsRequest(List operations, long leaderMaxSeqNoOfUpdatesOrDeletes, + Consumer handler, Consumer errorHandler); protected abstract void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer handler, Consumer errorHandler); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java index 2a01f72ca77..381858f3269 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTask.java @@ -280,6 +280,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams { @Override public Version getMinimalSupportedVersion() { - return Version.V_6_4_0; + return Version.V_6_5_0; } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index d473091f80c..c5dab5360da 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -133,9 +133,11 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor operations, + final long maxSeqNoOfUpdatesOrDeletes, final Consumer handler, final Consumer errorHandler) { - final BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(), operations); + final BulkShardOperationsRequest request = new BulkShardOperationsRequest( + params.getFollowShardId(), operations, maxSeqNoOfUpdatesOrDeletes); followerClient.execute(BulkShardOperationsAction.INSTANCE, request, ActionListener.wrap(response -> handler.accept(response), errorHandler)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java new file mode 100644 index 00000000000..b5b13873157 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternAction.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; + +import java.util.Collections; +import java.util.Map; + +public class TransportGetAutoFollowPatternAction + extends TransportMasterNodeReadAction { + + @Inject + public TransportGetAutoFollowPatternAction(Settings settings, + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, GetAutoFollowPatternAction.NAME, transportService, clusterService, threadPool, actionFilters, + GetAutoFollowPatternAction.Request::new, indexNameExpressionResolver); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected GetAutoFollowPatternAction.Response newResponse() { + return new GetAutoFollowPatternAction.Response(); + } + + @Override + protected void masterOperation(GetAutoFollowPatternAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + Map autoFollowPatterns = getAutoFollowPattern(state.metaData(), request.getLeaderClusterAlias()); + listener.onResponse(new GetAutoFollowPatternAction.Response(autoFollowPatterns)); + } + + @Override + protected ClusterBlockException checkBlock(GetAutoFollowPatternAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } + + static Map getAutoFollowPattern(MetaData metaData, String leaderClusterAlias) { + AutoFollowMetadata autoFollowMetadata = metaData.custom(AutoFollowMetadata.TYPE); + if (autoFollowMetadata == null) { + throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); + } + + if (leaderClusterAlias == null) { + return autoFollowMetadata.getPatterns(); + } + + AutoFollowPattern autoFollowPattern = autoFollowMetadata.getPatterns().get(leaderClusterAlias); + if (autoFollowPattern == null) { + throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", leaderClusterAlias); + } + return Collections.singletonMap(leaderClusterAlias, autoFollowPattern); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index 05ba40310a9..199b1215653 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -135,12 +135,13 @@ public class TransportPutAutoFollowPatternAction extends } AutoFollowPattern previousPattern = patterns.get(request.getLeaderClusterAlias()); - List followedIndexUUIDs = followedLeaderIndices.get(request.getLeaderClusterAlias()); - if (followedIndexUUIDs == null) { + final List followedIndexUUIDs; + if (followedLeaderIndices.containsKey(request.getLeaderClusterAlias())) { + followedIndexUUIDs = new ArrayList<>(followedLeaderIndices.get(request.getLeaderClusterAlias())); + } else { followedIndexUUIDs = new ArrayList<>(); - followedLeaderIndices.put(request.getLeaderClusterAlias(), followedIndexUUIDs); } - + followedLeaderIndices.put(request.getLeaderClusterAlias(), followedIndexUUIDs); // Mark existing leader indices as already auto followed: if (previousPattern != null) { markExistingIndicesAsAutoFollowedForNewPatterns(request.getLeaderIndexPatterns(), leaderClusterState.metaData(), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java index c28789fb580..80efba7831e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsRequest.java @@ -17,29 +17,37 @@ import java.util.List; public final class BulkShardOperationsRequest extends ReplicatedWriteRequest { private List operations; + private long maxSeqNoOfUpdatesOrDeletes; public BulkShardOperationsRequest() { } - public BulkShardOperationsRequest(final ShardId shardId, final List operations) { + public BulkShardOperationsRequest(ShardId shardId, List operations, long maxSeqNoOfUpdatesOrDeletes) { super(shardId); setRefreshPolicy(RefreshPolicy.NONE); this.operations = operations; + this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes; } public List getOperations() { return operations; } + public long getMaxSeqNoOfUpdatesOrDeletes() { + return maxSeqNoOfUpdatesOrDeletes; + } + @Override public void readFrom(final StreamInput in) throws IOException { super.readFrom(in); + maxSeqNoOfUpdatesOrDeletes = in.readZLong(); operations = in.readList(Translog.Operation::readOperation); } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); + out.writeZLong(maxSeqNoOfUpdatesOrDeletes); out.writeVInt(operations.size()); for (Translog.Operation operation : operations) { Translog.Operation.writeOperation(out, operation); @@ -50,6 +58,7 @@ public final class BulkShardOperationsRequest extends ReplicatedWriteRequest shardOperationOnPrimary( final BulkShardOperationsRequest request, final IndexShard primary) throws Exception { - return shardOperationOnPrimary(request.shardId(), request.getOperations(), primary, logger); + return shardOperationOnPrimary( + request.shardId(), request.getOperations(), request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger); } // public for testing purposes only public static WritePrimaryResult shardOperationOnPrimary( final ShardId shardId, final List sourceOperations, + final long maxSeqNoOfUpdatesOrDeletes, final IndexShard primary, final Logger logger) throws IOException { final List targetOperations = sourceOperations.stream().map(operation -> { @@ -103,14 +106,19 @@ public class TransportBulkShardOperationsAction } return operationWithPrimaryTerm; }).collect(Collectors.toList()); + assert maxSeqNoOfUpdatesOrDeletes >= SequenceNumbers.NO_OPS_PERFORMED : "invalid msu [" + maxSeqNoOfUpdatesOrDeletes + "]"; + primary.advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes); final Translog.Location location = applyTranslogOperations(targetOperations, primary, Engine.Operation.Origin.PRIMARY); - final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest(shardId, targetOperations); + final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest( + shardId, targetOperations, maxSeqNoOfUpdatesOrDeletes); return new CcrWritePrimaryResult(replicaRequest, location, primary, logger); } @Override protected WriteReplicaResult shardOperationOnReplica( final BulkShardOperationsRequest request, final IndexShard replica) throws Exception { + assert replica.getMaxSeqNoOfUpdatesOrDeletes() >= request.getMaxSeqNoOfUpdatesOrDeletes() : + "mus on replica [" + replica + "] < mus of request [" + request.getMaxSeqNoOfUpdatesOrDeletes() + "]"; final Translog.Location location = applyTranslogOperations(request.getOperations(), replica, Engine.Operation.Origin.REPLICA); return new WriteReplicaResult<>(request, location, null, replica, logger); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java new file mode 100644 index 00000000000..e9c3073f6c7 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestGetAutoFollowPatternAction.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction.Request; + +import java.io.IOException; + +import static org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction.INSTANCE; + +public class RestGetAutoFollowPatternAction extends BaseRestHandler { + + public RestGetAutoFollowPatternAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow/{leader_cluster_alias}", this); + controller.registerHandler(RestRequest.Method.GET, "/_ccr/auto_follow", this); + } + + @Override + public String getName() { + return "ccr_get_auto_follow_pattern_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + Request request = new Request(); + request.setLeaderClusterAlias(restRequest.param("leader_cluster_alias")); + return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java index 78715654a05..c491b0231be 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ccr; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; @@ -30,11 +31,15 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -59,6 +64,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -221,6 +227,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertBusy(assertExpectedDocumentRunnable(i)); } unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), numberOfPrimaryShards); } public void testSyncMappings() throws Exception { @@ -258,6 +265,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetaData.sourceAsMap()), equalTo("integer")); assertThat(XContentMapValues.extractValue("properties.k.type", mappingMetaData.sourceAsMap()), equalTo("long")); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 2); } public void testNoMappingDefined() throws Exception { @@ -284,7 +292,8 @@ public class ShardChangesIT extends ESIntegTestCase { } public void testFollowIndex_backlog() throws Exception { - String leaderIndexSettings = getIndexSettings(between(1, 5), between(0, 1), + int numberOfShards = between(1, 5); + String leaderIndexSettings = getIndexSettings(numberOfShards, between(0, 1), singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); assertAcked(client().admin().indices().prepareCreate("index1").setSource(leaderIndexSettings, XContentType.JSON)); BulkProcessor.Listener listener = new BulkProcessor.Listener() { @@ -334,6 +343,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertSameDocCount("index1", "index2"); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), numberOfShards); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33337") @@ -379,6 +389,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertSameDocCount("index1", "index2"); unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 3); } public void testFollowIndexWithNestedField() throws Exception { @@ -419,6 +430,7 @@ public class ShardChangesIT extends ESIntegTestCase { }); } unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 1); } public void testUnfollowNonExistingIndex() { @@ -482,6 +494,7 @@ public class ShardChangesIT extends ESIntegTestCase { assertBusy(assertExpectedDocumentRunnable(i)); } unfollowIndex("index2"); + assertMaxSeqNoOfUpdatesIsTransferred(resolveIndex("index1"), resolveIndex("index2"), 1); } public void testDontFollowTheWrongIndex() throws Exception { @@ -715,6 +728,44 @@ public class ShardChangesIT extends ESIntegTestCase { }, 60, TimeUnit.SECONDS); } + private void assertMaxSeqNoOfUpdatesIsTransferred(Index leaderIndex, Index followerIndex, int numberOfShards) throws Exception { + assertBusy(() -> { + long[] msuOnLeader = new long[numberOfShards]; + for (int i = 0; i < msuOnLeader.length; i++) { + msuOnLeader[i] = SequenceNumbers.UNASSIGNED_SEQ_NO; + } + Set leaderNodes = internalCluster().nodesInclude(leaderIndex.getName()); + for (String leaderNode : leaderNodes) { + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, leaderNode); + for (int i = 0; i < numberOfShards; i++) { + IndexShard shard = indicesService.getShardOrNull(new ShardId(leaderIndex, i)); + if (shard != null) { + try { + msuOnLeader[i] = SequenceNumbers.max(msuOnLeader[i], shard.getMaxSeqNoOfUpdatesOrDeletes()); + } catch (AlreadyClosedException ignored) { + return; + } + } + } + } + + Set followerNodes = internalCluster().nodesInclude(followerIndex.getName()); + for (String followerNode : followerNodes) { + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, followerNode); + for (int i = 0; i < numberOfShards; i++) { + IndexShard shard = indicesService.getShardOrNull(new ShardId(leaderIndex, i)); + if (shard != null) { + try { + assertThat(shard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(msuOnLeader[i])); + } catch (AlreadyClosedException ignored) { + + } + } + } + } + }); + } + public static FollowIndexAction.Request createFollowRequest(String leaderIndex, String followerIndex) { FollowIndexAction.Request request = new FollowIndexAction.Request(); request.setLeaderIndex(leaderIndex); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 8da5a75b6bb..3f31f8a2df7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ccr.Ccr; import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; @@ -85,7 +86,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { void getLeaderClusterState(Map headers, String leaderClusterAlias, BiConsumer handler) { - assertThat(headers, sameInstance(autoFollowHeaders.get("remote"))); + assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); handler.accept(leaderState, null); } @@ -94,7 +95,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { FollowIndexAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { - assertThat(headers, sameInstance(autoFollowHeaders.get("remote"))); + assertThat(headers, equalTo(autoFollowHeaders.get("remote"))); assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); successHandler.run(); @@ -318,7 +319,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .metaData(imdBuilder) .build(); - List result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, leaderState, followerState, Collections.emptyList()); + List result = AutoFollower.getLeaderIndicesToFollow("remote", autoFollowPattern, leaderState, followerState, + Collections.emptyList()); result.sort(Comparator.comparing(Index::getName)); assertThat(result.size(), equalTo(5)); assertThat(result.get(0).getName(), equalTo("metrics-0")); @@ -328,7 +330,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(result.get(4).getName(), equalTo("metrics-4")); List followedIndexUUIDs = Collections.singletonList(leaderState.metaData().index("metrics-2").getIndexUUID()); - result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, leaderState, followerState, followedIndexUUIDs); + result = AutoFollower.getLeaderIndicesToFollow("remote", autoFollowPattern, leaderState, followerState, followedIndexUUIDs); result.sort(Comparator.comparing(Index::getName)); assertThat(result.size(), equalTo(4)); assertThat(result.get(0).getName(), equalTo("metrics-0")); @@ -337,6 +339,34 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(result.get(3).getName(), equalTo("metrics-4")); } + public void testGetLeaderIndicesToFollowDoNotSelectFollowIndicesInTheSameCluster() { + MetaData.Builder imdBuilder = MetaData.builder(); + imdBuilder.put(IndexMetaData.builder("metrics-0") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + imdBuilder.put(IndexMetaData.builder("metrics-1") + .putCustom(Ccr.CCR_CUSTOM_METADATA_KEY, new HashMap<>()) + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); + imdBuilder.putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(Collections.singletonMap("remote", autoFollowPattern), + Collections.emptyMap(), Collections.emptyMap())); + + ClusterState clusterState = ClusterState.builder(new ClusterName("name")) + .metaData(imdBuilder) + .build(); + + List result = AutoFollower.getLeaderIndicesToFollow("_local_", autoFollowPattern, clusterState, + clusterState, Collections.emptyList()); + result.sort(Comparator.comparing(Index::getName)); + assertThat(result.size(), equalTo(1)); + assertThat(result.get(0).getName(), equalTo("metrics-0")); + } + public void testGetFollowerIndexName() { AutoFollowPattern autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java new file mode 100644 index 00000000000..05b731127cc --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternRequestTests.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; + +public class GetAutoFollowPatternRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return GetAutoFollowPatternAction.Request::new; + } + + @Override + protected GetAutoFollowPatternAction.Request createTestInstance() { + GetAutoFollowPatternAction.Request request = new GetAutoFollowPatternAction.Request(); + if (randomBoolean()) { + request.setLeaderClusterAlias(randomAlphaOfLength(4)); + } + return request; + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java new file mode 100644 index 00000000000..7be2e43bde4 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/GetAutoFollowPatternResponseTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class GetAutoFollowPatternResponseTests extends AbstractStreamableTestCase { + + @Override + protected GetAutoFollowPatternAction.Response createBlankInstance() { + return new GetAutoFollowPatternAction.Response(); + } + + @Override + protected GetAutoFollowPatternAction.Response createTestInstance() { + int numPatterns = randomIntBetween(1, 8); + Map patterns = new HashMap<>(numPatterns); + for (int i = 0; i < numPatterns; i++) { + AutoFollowPattern autoFollowPattern = new AutoFollowPattern( + Collections.singletonList(randomAlphaOfLength(4)), + randomAlphaOfLength(4), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + randomNonNegativeLong(), + randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), + TimeValue.timeValueMillis(500), + TimeValue.timeValueMillis(500)); + patterns.put(randomAlphaOfLength(4), autoFollowPattern); + } + return new GetAutoFollowPatternAction.Response(patterns); + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java index 04cf45f9d2c..a99e930188c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardChangesResponseTests.java @@ -15,6 +15,7 @@ public class ShardChangesResponseTests extends AbstractStreamableTestCase operations, + long maxSeqNoOfUpdates, Consumer handler, Consumer errorHandler) { for(Translog.Operation op : operations) { @@ -157,7 +158,8 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { assert from >= testRun.finalExpectedGlobalCheckpoint; final long globalCheckpoint = tracker.getCheckpoint(); final long maxSeqNo = tracker.getMaxSeqNo(); - handler.accept(new ShardChangesAction.Response(0L,globalCheckpoint, maxSeqNo, new Translog.Operation[0])); + handler.accept(new ShardChangesAction.Response( + 0L, globalCheckpoint, maxSeqNo, randomNonNegativeLong(), new Translog.Operation[0])); } }; threadPool.generic().execute(task); @@ -231,6 +233,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { mappingVersion, nextGlobalCheckPoint, nextGlobalCheckPoint, + randomNonNegativeLong(), ops.toArray(EMPTY)) ) ); @@ -253,6 +256,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { mappingVersion, prevGlobalCheckpoint, prevGlobalCheckpoint, + randomNonNegativeLong(), EMPTY ); item.add(new TestResponse(null, mappingVersion, response)); @@ -269,6 +273,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase { mappingVersion, localLeaderGCP, localLeaderGCP, + randomNonNegativeLong(), ops.toArray(EMPTY) ); item.add(new TestResponse(null, mappingVersion, response)); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index 7e813ae4cf6..68ed46d9541 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -407,7 +407,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { assertThat(shardChangesRequests.get(0)[1], equalTo(64L)); shardChangesRequests.clear(); - task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, new Translog.Operation[0])); + task.innerHandleReadResponse(0L, 63L, new ShardChangesAction.Response(0, 0, 0, 100, new Translog.Operation[0])); assertThat(shardChangesRequests.size(), equalTo(1)); assertThat(shardChangesRequests.get(0)[0], equalTo(0L)); @@ -714,6 +714,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { @Override protected void innerSendBulkShardOperationsRequest( final List operations, + final long maxSeqNoOfUpdates, final Consumer handler, final Consumer errorHandler) { bulkShardOperationRequests.add(operations); @@ -749,6 +750,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersions.poll(), leaderGlobalCheckpoints.poll(), maxSeqNos.poll(), + randomNonNegativeLong(), operations ); handler.accept(response); @@ -785,6 +787,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase { mappingVersion, leaderGlobalCheckPoint, leaderGlobalCheckPoint, + randomNonNegativeLong(), ops.toArray(new Translog.Operation[0]) ); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index 0bb263d3c44..2009d74f7c7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -240,10 +240,12 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest @Override protected void innerSendBulkShardOperationsRequest( final List operations, + final long maxSeqNoOfUpdates, final Consumer handler, final Consumer errorHandler) { Runnable task = () -> { - BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(), operations); + BulkShardOperationsRequest request = new BulkShardOperationsRequest( + params.getFollowShardId(), operations, maxSeqNoOfUpdates); ActionListener listener = ActionListener.wrap(handler::accept, errorHandler); new CCRAction(request, listener, followerGroup).execute(); }; @@ -262,8 +264,10 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest for (IndexShard indexShard : indexShards) { try { final SeqNoStats seqNoStats = indexShard.seqNoStats(); + final long maxSeqNoOfUpdatesOrDeletes = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); if (from > seqNoStats.getGlobalCheckpoint()) { - handler.accept(ShardChangesAction.getResponse(1L, seqNoStats, ShardChangesAction.EMPTY_OPERATIONS_ARRAY)); + handler.accept(ShardChangesAction.getResponse(1L, seqNoStats, + maxSeqNoOfUpdatesOrDeletes, ShardChangesAction.EMPTY_OPERATIONS_ARRAY)); return; } Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, @@ -273,6 +277,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest 1L, seqNoStats.getGlobalCheckpoint(), seqNoStats.getMaxSeqNo(), + maxSeqNoOfUpdatesOrDeletes, ops ); handler.accept(response); @@ -315,6 +320,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest for (IndexShard followingShard : follower) { assertThat(followingShard.estimateNumberOfHistoryOperations("test", 0), equalTo(totalOps)); } + for (IndexShard followingShard : follower) { + assertThat(followingShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(leader.getPrimary().getMaxSeqNoOfUpdatesOrDeletes())); + } } class CCRAction extends ReplicationAction { @@ -327,7 +335,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardOperationsRequest request) throws Exception { TransportWriteAction.WritePrimaryResult result = TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getOperations(), - primary, logger); + request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger); return new PrimaryResult(result.replicaRequest(), result.finalResponseIfSuccessful); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java new file mode 100644 index 00000000000..187e404abbb --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportGetAutoFollowPatternActionTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.collection.IsMapContaining.hasEntry; + +public class TransportGetAutoFollowPatternActionTests extends ESTestCase { + + public void testGetAutoFollowPattern() { + Map patterns = new HashMap<>(); + patterns.put("test_alias1", + new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + patterns.put("test_alias2", + new AutoFollowPattern(Collections.singletonList("index-*"), null, null, null, null, null, null, null, null)); + MetaData metaData = MetaData.builder() + .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap())) + .build(); + + Map result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias1"); + assertThat(result.size(), equalTo(1)); + assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); + + result = TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, null); + assertThat(result.size(), equalTo(2)); + assertThat(result, hasEntry("test_alias1", patterns.get("test_alias1"))); + assertThat(result, hasEntry("test_alias2", patterns.get("test_alias2"))); + + expectThrows(ResourceNotFoundException.class, + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "another_alias")); + } + + public void testGetAutoFollowPatternNoAutoFollowPatterns() { + AutoFollowMetadata autoFollowMetadata = + new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()); + MetaData metaData = MetaData.builder() + .putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata) + .build(); + expectThrows(ResourceNotFoundException.class, + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); + } + + public void testGetAutoFollowPatternNoAutoFollowMetadata() { + MetaData metaData = MetaData.builder().build(); + expectThrows(ResourceNotFoundException.class, + () -> TransportGetAutoFollowPatternAction.getAutoFollowPattern(metaData, "test_alias")); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java index 4c6c0c060e4..88e6d4113d3 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/bulk/BulkShardOperationsTests.java @@ -59,7 +59,8 @@ public class BulkShardOperationsTests extends IndexShardTestCase { } final TransportWriteAction.WritePrimaryResult result = - TransportBulkShardOperationsAction.shardOperationOnPrimary(followerPrimary.shardId(), operations, followerPrimary, logger); + TransportBulkShardOperationsAction.shardOperationOnPrimary(followerPrimary.shardId(), operations, + numOps - 1, followerPrimary, logger); try (Translog.Snapshot snapshot = followerPrimary.getHistoryOperations("test", 0)) { assertThat(snapshot.totalOperations(), equalTo(operations.size())); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index c9a4de8f03c..c4a929969d2 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -127,6 +127,7 @@ public class FollowingEngineTests extends ESTestCase { final VersionType versionType = randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE); final List ops = EngineTestCase.generateSingleDocHistory(true, versionType, 2, 2, 20, "id"); + ops.stream().mapToLong(op -> op.seqNo()).max().ifPresent(followingEngine::advanceMaxSeqNoOfUpdatesOrDeletes); EngineTestCase.assertOpsOnReplica(ops, followingEngine, true, logger); } } @@ -160,6 +161,7 @@ public class FollowingEngineTests extends ESTestCase { seqNo, Engine.Operation.Origin.PRIMARY, (followingEngine, delete) -> { + followingEngine.advanceMaxSeqNoOfUpdatesOrDeletes(randomLongBetween(seqNo, Long.MAX_VALUE)); final Engine.DeleteResult result = followingEngine.delete(delete); assertThat(result.getSeqNo(), equalTo(seqNo)); }); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollectorTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollectorTestCase.java new file mode 100644 index 00000000000..f98e541a9d9 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollectorTestCase.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.monitoring.BaseCollectorTestCase; + +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public abstract class AbstractCcrCollectorTestCase extends BaseCollectorTestCase { + + public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { + final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); + final boolean ccrAllowed = randomBoolean(); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + // this controls the blockage + when(licenseState.isMonitoringAllowed()).thenReturn(false); + when(licenseState.isCcrAllowed()).thenReturn(ccrAllowed); + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsFalseIfNotMaster() { + // regardless of CCR being enabled + final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); + // this controls the blockage + final boolean isElectedMaster = false; + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + } + + public void testShouldCollectReturnsFalseIfCCRIsDisabled() { + // this is controls the blockage + final Settings settings = ccrDisabledSettings(); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); + + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsFalseIfCCRIsNotAllowed() { + final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); + + when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); + // this is controls the blockage + when(licenseState.isCcrAllowed()).thenReturn(false); + final boolean isElectedMaster = randomBoolean(); + whenLocalNodeElectedMaster(isElectedMaster); + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(false)); + + if (isElectedMaster) { + verify(licenseState).isMonitoringAllowed(); + } + } + + public void testShouldCollectReturnsTrue() { + final Settings settings = ccrEnabledSettings(); + + when(licenseState.isMonitoringAllowed()).thenReturn(true); + when(licenseState.isCcrAllowed()).thenReturn(true); + final boolean isElectedMaster = true; + + final AbstractCcrCollector collector = createCollector(settings, clusterService, licenseState, client); + + assertThat(collector.shouldCollect(isElectedMaster), is(true)); + + verify(licenseState).isMonitoringAllowed(); + } + + abstract AbstractCcrCollector createCollector(Settings settings, + ClusterService clusterService, + XPackLicenseState licenseState, + Client client); + + private Settings ccrEnabledSettings() { + // since it's the default, we want to ensure we test both with/without it + return randomBoolean() ? Settings.EMPTY : Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), true).build(); + } + + private Settings ccrDisabledSettings() { + return Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), false).build(); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java new file mode 100644 index 00000000000..ce1c0136677 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDocTests.java @@ -0,0 +1,163 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; +import org.elasticsearch.xpack.monitoring.exporter.BaseMonitoringDocTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase { + + private AutoFollowStats autoFollowStats; + + @Before + public void instantiateAutoFollowStats() { + autoFollowStats = new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), + Collections.emptyNavigableMap()); + } + + @Override + protected AutoFollowStatsMonitoringDoc createMonitoringDoc(String cluster, + long timestamp, + long interval, + MonitoringDoc.Node node, + MonitoredSystem system, + String type, + String id) { + return new AutoFollowStatsMonitoringDoc(cluster, timestamp, interval, node, autoFollowStats); + } + + @Override + protected void assertMonitoringDoc(AutoFollowStatsMonitoringDoc document) { + assertThat(document.getSystem(), is(MonitoredSystem.ES)); + assertThat(document.getType(), is(AutoFollowStatsMonitoringDoc.TYPE)); + assertThat(document.getId(), nullValue()); + assertThat(document.stats(), is(autoFollowStats)); + } + + @Override + public void testToXContent() throws IOException { + final long timestamp = System.currentTimeMillis(); + final long intervalMillis = System.currentTimeMillis(); + final long nodeTimestamp = System.currentTimeMillis(); + final MonitoringDoc.Node node = new MonitoringDoc.Node("_uuid", "_host", "_addr", "_ip", "_name", nodeTimestamp); + + final NavigableMap recentAutoFollowExceptions = + new TreeMap<>(Collections.singletonMap( + randomAlphaOfLength(4), + new ElasticsearchException("cannot follow index"))); + final AutoFollowStats autoFollowStats = + new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions); + + final AutoFollowStatsMonitoringDoc document = + new AutoFollowStatsMonitoringDoc("_cluster", timestamp, intervalMillis, node, autoFollowStats); + final BytesReference xContent = XContentHelper.toXContent(document, XContentType.JSON, false); + assertThat( + xContent.utf8ToString(), + equalTo( + "{" + + "\"cluster_uuid\":\"_cluster\"," + + "\"timestamp\":\"" + new DateTime(timestamp, DateTimeZone.UTC).toString() + "\"," + + "\"interval_ms\":" + intervalMillis + "," + + "\"type\":\"ccr_auto_follow_stats\"," + + "\"source_node\":{" + + "\"uuid\":\"_uuid\"," + + "\"host\":\"_host\"," + + "\"transport_address\":\"_addr\"," + + "\"ip\":\"_ip\"," + + "\"name\":\"_name\"," + + "\"timestamp\":\"" + new DateTime(nodeTimestamp, DateTimeZone.UTC).toString() + "\"" + + "}," + + "\"ccr_auto_follow_stats\":{" + + "\"number_of_failed_follow_indices\":" + autoFollowStats.getNumberOfFailedFollowIndices() + "," + + "\"number_of_failed_remote_cluster_state_requests\":" + + autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + "," + + "\"number_of_successful_follow_indices\":" + autoFollowStats.getNumberOfSuccessfulFollowIndices() + "," + + "\"recent_auto_follow_errors\":[" + + "{" + + "\"leader_index\":\"" + recentAutoFollowExceptions.keySet().iterator().next() + "\"," + + "\"auto_follow_exception\":{" + + "\"type\":\"exception\"," + + "\"reason\":\"cannot follow index\"" + + "}" + + "}" + + "]" + + "}" + + "}")); + } + + public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { + final NavigableMap fetchExceptions = + new TreeMap<>(Collections.singletonMap("leader_index", new ElasticsearchException("cannot follow index"))); + final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions); + XContentBuilder builder = jsonBuilder(); + builder.value(status); + Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); + + Map template = + XContentHelper.convertToMap(XContentType.JSON.xContent(), MonitoringTemplateUtils.loadTemplate("es"), false); + Map autoFollowStatsMapping = + (Map) XContentMapValues.extractValue("mappings.doc.properties.ccr_auto_follow_stats.properties", template); + + assertThat(serializedStatus.size(), equalTo(autoFollowStatsMapping.size())); + for (Map.Entry entry : serializedStatus.entrySet()) { + String fieldName = entry.getKey(); + Map fieldMapping = (Map) autoFollowStatsMapping.get(fieldName); + assertThat(fieldMapping, notNullValue()); + + Object fieldValue = entry.getValue(); + String fieldType = (String) fieldMapping.get("type"); + if (fieldValue instanceof Long || fieldValue instanceof Integer) { + assertThat("expected long field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("long"), equalTo("integer"))); + } else if (fieldValue instanceof String) { + assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, + anyOf(equalTo("keyword"), equalTo("text"))); + } else { + // Manual test specific object fields and if not just fail: + if (fieldName.equals("recent_auto_follow_errors")) { + assertThat(fieldType, equalTo("nested")); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("properties.leader_index.type", fieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("properties.auto_follow_exception.type", fieldMapping), equalTo("object")); + + Map exceptionFieldMapping = + (Map) XContentMapValues.extractValue("properties.auto_follow_exception.properties", fieldMapping); + assertThat(exceptionFieldMapping.size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); + } else { + fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); + } + } + } + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollectorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollectorTests.java new file mode 100644 index 00000000000..7a302503d2d --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollectorTests.java @@ -0,0 +1,85 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; +import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.util.Collection; + +import static org.elasticsearch.xpack.monitoring.MonitoringTestUtils.randomMonitoringNode; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class CcrAutoFollowStatsCollectorTests extends AbstractCcrCollectorTestCase { + + @Override + AbstractCcrCollector createCollector(Settings settings, ClusterService clusterService, XPackLicenseState licenseState, Client client) { + return new CcrAutoFollowStatsCollector(settings, clusterService, licenseState, client); + } + + public void testDoCollect() throws Exception { + final String clusterUuid = randomAlphaOfLength(5); + whenClusterStateWithUUID(clusterUuid); + + final MonitoringDoc.Node node = randomMonitoringNode(random()); + final CcrClient client = mock(CcrClient.class); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + final TimeValue timeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); + withCollectionTimeout(CcrAutoFollowStatsCollector.CCR_AUTO_FOLLOW_STATS_TIMEOUT, timeout); + + final CcrAutoFollowStatsCollector collector = + new CcrAutoFollowStatsCollector(Settings.EMPTY, clusterService, licenseState, client, threadContext); + assertEquals(timeout, collector.getCollectionTimeout()); + + final AutoFollowStats autoFollowStats = mock(AutoFollowStats.class); + + @SuppressWarnings("unchecked") + final ActionFuture future = (ActionFuture)mock(ActionFuture.class); + final AutoFollowStatsAction.Response response = new AutoFollowStatsAction.Response(autoFollowStats); + + when(client.autoFollowStats(any())).thenReturn(future); + when(future.actionGet(timeout)).thenReturn(response); + + final long interval = randomNonNegativeLong(); + + final Collection documents = collector.doCollect(node, interval, clusterState); + verify(clusterState).metaData(); + verify(metaData).clusterUUID(); + + assertThat(documents, hasSize(1)); + final AutoFollowStatsMonitoringDoc document = (AutoFollowStatsMonitoringDoc) documents.iterator().next(); + + assertThat(document.getCluster(), is(clusterUuid)); + assertThat(document.getTimestamp(), greaterThan(0L)); + assertThat(document.getIntervalMillis(), equalTo(interval)); + assertThat(document.getNode(), equalTo(node)); + assertThat(document.getSystem(), is(MonitoredSystem.ES)); + assertThat(document.getType(), is(AutoFollowStatsMonitoringDoc.TYPE)); + assertThat(document.getId(), nullValue()); + assertThat(document.stats(), is(autoFollowStats)); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java index aaf3a61643b..b0f2a00d2dc 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollectorTests.java @@ -7,17 +7,18 @@ package org.elasticsearch.xpack.monitoring.collector.ccr; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.client.CcrClient; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; -import org.elasticsearch.xpack.monitoring.BaseCollectorTestCase; import org.mockito.ArgumentMatcher; import java.util.ArrayList; @@ -38,89 +39,11 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -public class CcrStatsCollectorTests extends BaseCollectorTestCase { +public class CcrStatsCollectorTests extends AbstractCcrCollectorTestCase { - public void testShouldCollectReturnsFalseIfMonitoringNotAllowed() { - final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); - final boolean ccrAllowed = randomBoolean(); - final boolean isElectedMaster = randomBoolean(); - whenLocalNodeElectedMaster(isElectedMaster); - - // this controls the blockage - when(licenseState.isMonitoringAllowed()).thenReturn(false); - when(licenseState.isCcrAllowed()).thenReturn(ccrAllowed); - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - if (isElectedMaster) { - verify(licenseState).isMonitoringAllowed(); - } - } - - public void testShouldCollectReturnsFalseIfNotMaster() { - // regardless of CCR being enabled - final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); - - when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); - when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); - // this controls the blockage - final boolean isElectedMaster = false; - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - } - - public void testShouldCollectReturnsFalseIfCCRIsDisabled() { - // this is controls the blockage - final Settings settings = ccrDisabledSettings(); - - when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); - when(licenseState.isCcrAllowed()).thenReturn(randomBoolean()); - - final boolean isElectedMaster = randomBoolean(); - whenLocalNodeElectedMaster(isElectedMaster); - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - - if (isElectedMaster) { - verify(licenseState).isMonitoringAllowed(); - } - } - - public void testShouldCollectReturnsFalseIfCCRIsNotAllowed() { - final Settings settings = randomFrom(ccrEnabledSettings(), ccrDisabledSettings()); - - when(licenseState.isMonitoringAllowed()).thenReturn(randomBoolean()); - // this is controls the blockage - when(licenseState.isCcrAllowed()).thenReturn(false); - final boolean isElectedMaster = randomBoolean(); - whenLocalNodeElectedMaster(isElectedMaster); - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(false)); - - if (isElectedMaster) { - verify(licenseState).isMonitoringAllowed(); - } - } - - public void testShouldCollectReturnsTrue() { - final Settings settings = ccrEnabledSettings(); - - when(licenseState.isMonitoringAllowed()).thenReturn(true); - when(licenseState.isCcrAllowed()).thenReturn(true); - final boolean isElectedMaster = true; - - final CcrStatsCollector collector = new CcrStatsCollector(settings, clusterService, licenseState, client); - - assertThat(collector.shouldCollect(isElectedMaster), is(true)); - - verify(licenseState).isMonitoringAllowed(); + @Override + AbstractCcrCollector createCollector(Settings settings, ClusterService clusterService, XPackLicenseState licenseState, Client client) { + return new CcrStatsCollector(settings, clusterService, licenseState, client); } public void testDoCollect() throws Exception { @@ -186,15 +109,6 @@ public class CcrStatsCollectorTests extends BaseCollectorTestCase { return statuses; } - private Settings ccrEnabledSettings() { - // since it's the default, we want to ensure we test both with/without it - return randomBoolean() ? Settings.EMPTY : Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), true).build(); - } - - private Settings ccrDisabledSettings() { - return Settings.builder().put(XPackSettings.CCR_ENABLED_SETTING.getKey(), false).build(); - } - private static CcrStatsAction.StatsRequest statsRequestEq(CcrStatsAction.StatsRequest expected) { return argThat(new StatsRequestMatches(expected)); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java index a879dc9ed18..918067e6766 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseVerifier.java @@ -43,6 +43,7 @@ public class LicenseVerifier { try { byte[] signatureBytes = Base64.getDecoder().decode(license.signature()); ByteBuffer byteBuffer = ByteBuffer.wrap(signatureBytes); + @SuppressWarnings("unused") int version = byteBuffer.getInt(); int magicLen = byteBuffer.getInt(); byte[] magic = new byte[magicLen]; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java index d4440068a57..951320518b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -29,6 +29,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; /** * Custom metadata that contains auto follow patterns and what leader indices an auto follow pattern has already followed. @@ -79,16 +80,19 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public AutoFollowMetadata(Map patterns, Map> followedLeaderIndexUUIDs, Map> headers) { - this.patterns = patterns; - this.followedLeaderIndexUUIDs = followedLeaderIndexUUIDs; - this.headers = Collections.unmodifiableMap(headers); + this.patterns = Collections.unmodifiableMap(patterns); + this.followedLeaderIndexUUIDs = Collections.unmodifiableMap(followedLeaderIndexUUIDs.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> Collections.unmodifiableList(e.getValue())))); + this.headers = Collections.unmodifiableMap(headers.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> Collections.unmodifiableMap(e.getValue())))); } public AutoFollowMetadata(StreamInput in) throws IOException { - patterns = in.readMap(StreamInput::readString, AutoFollowPattern::new); - followedLeaderIndexUUIDs = in.readMapOfLists(StreamInput::readString, StreamInput::readString); - headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, - valIn -> Collections.unmodifiableMap(valIn.readMap(StreamInput::readString, StreamInput::readString)))); + this( + in.readMap(StreamInput::readString, AutoFollowPattern::new), + in.readMapOfLists(StreamInput::readString, StreamInput::readString), + in.readMap(StreamInput::readString, valIn -> valIn.readMap(StreamInput::readString, StreamInput::readString)) + ); } public Map getPatterns() { @@ -170,8 +174,8 @@ public class AutoFollowMetadata extends AbstractNamedDiffable i public static class AutoFollowPattern implements Writeable, ToXContentObject { - private static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_patterns"); - private static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_pattern"); + public static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_index_patterns"); + public static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_index_pattern"); public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); public static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java index 7133a201f4e..6f28c450f04 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java @@ -121,28 +121,33 @@ public class AutoFollowStats implements Writeable, ToXContentObject { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); { - builder.field(NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfFailedFollowIndices); - builder.field(NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS.getPreferredName(), numberOfFailedRemoteClusterStateRequests); - builder.field(NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfSuccessfulFollowIndices); - builder.startArray(RECENT_AUTO_FOLLOW_ERRORS.getPreferredName()); - { - for (final Map.Entry entry : recentAutoFollowErrors.entrySet()) { + toXContentFragment(builder, params); + } + builder.endObject(); + return builder; + } + + public XContentBuilder toXContentFragment(final XContentBuilder builder, final Params params) throws IOException { + builder.field(NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfFailedFollowIndices); + builder.field(NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS.getPreferredName(), numberOfFailedRemoteClusterStateRequests); + builder.field(NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfSuccessfulFollowIndices); + builder.startArray(RECENT_AUTO_FOLLOW_ERRORS.getPreferredName()); + { + for (final Map.Entry entry : recentAutoFollowErrors.entrySet()) { + builder.startObject(); + { + builder.field(LEADER_INDEX.getPreferredName(), entry.getKey()); + builder.field(AUTO_FOLLOW_EXCEPTION.getPreferredName()); builder.startObject(); { - builder.field(LEADER_INDEX.getPreferredName(), entry.getKey()); - builder.field(AUTO_FOLLOW_EXCEPTION.getPreferredName()); - builder.startObject(); - { - ElasticsearchException.generateThrowableXContent(builder, params, entry.getValue()); - } - builder.endObject(); + ElasticsearchException.generateThrowableXContent(builder, params, entry.getValue()); } builder.endObject(); } + builder.endObject(); } - builder.endArray(); } - builder.endObject(); + builder.endArray(); return builder; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java new file mode 100644 index 00000000000..ff7f50e66c8 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/GetAutoFollowPatternAction.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ccr.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.master.MasterNodeReadRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +public class GetAutoFollowPatternAction extends Action { + + public static final String NAME = "cluster:admin/xpack/ccr/auto_follow_pattern/get"; + public static final GetAutoFollowPatternAction INSTANCE = new GetAutoFollowPatternAction(); + + private GetAutoFollowPatternAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends MasterNodeReadRequest { + + private String leaderClusterAlias; + + public Request() { + } + + public Request(StreamInput in) throws IOException { + super(in); + this.leaderClusterAlias = in.readOptionalString(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public String getLeaderClusterAlias() { + return leaderClusterAlias; + } + + public void setLeaderClusterAlias(String leaderClusterAlias) { + this.leaderClusterAlias = leaderClusterAlias; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(leaderClusterAlias); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(leaderClusterAlias, request.leaderClusterAlias); + } + + @Override + public int hashCode() { + return Objects.hash(leaderClusterAlias); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private Map autoFollowPatterns; + + public Response(Map autoFollowPatterns) { + this.autoFollowPatterns = autoFollowPatterns; + } + + public Response() { + } + + public Map getAutoFollowPatterns() { + return autoFollowPatterns; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + autoFollowPatterns = in.readMap(StreamInput::readString, AutoFollowPattern::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(autoFollowPatterns, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (Map.Entry entry : autoFollowPatterns.entrySet()) { + builder.startObject(entry.getKey()); + entry.getValue().toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(autoFollowPatterns, response.autoFollowPatterns); + } + + @Override + public int hashCode() { + return Objects.hash(autoFollowPatterns); + } + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java index 93d8d1fb7d1..03fcf2c153e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/action/PutAutoFollowPatternAction.java @@ -42,15 +42,13 @@ public class PutAutoFollowPatternAction extends Action { public static class Request extends AcknowledgedRequest implements ToXContentObject { static final ParseField LEADER_CLUSTER_ALIAS_FIELD = new ParseField("leader_cluster_alias"); - static final ParseField LEADER_INDEX_PATTERNS_FIELD = new ParseField("leader_index_patterns"); - static final ParseField FOLLOW_INDEX_NAME_PATTERN_FIELD = new ParseField("follow_index_name_pattern"); private static final ObjectParser PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new); static { PARSER.declareString(Request::setLeaderClusterAlias, LEADER_CLUSTER_ALIAS_FIELD); - PARSER.declareStringArray(Request::setLeaderIndexPatterns, LEADER_INDEX_PATTERNS_FIELD); - PARSER.declareString(Request::setFollowIndexNamePattern, FOLLOW_INDEX_NAME_PATTERN_FIELD); + PARSER.declareStringArray(Request::setLeaderIndexPatterns, AutoFollowPattern.LEADER_PATTERNS_FIELD); + PARSER.declareString(Request::setFollowIndexNamePattern, AutoFollowPattern.FOLLOW_PATTERN_FIELD); PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); PARSER.declareInt(Request::setMaxConcurrentReadBatches, AutoFollowPattern.MAX_CONCURRENT_READ_BATCHES); PARSER.declareLong(Request::setMaxOperationSizeInBytes, AutoFollowPattern.MAX_BATCH_SIZE_IN_BYTES); @@ -98,7 +96,7 @@ public class PutAutoFollowPatternAction extends Action { "] is missing", validationException); } if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { - validationException = addValidationError("[" + LEADER_INDEX_PATTERNS_FIELD.getPreferredName() + + validationException = addValidationError("[" + AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName() + "] is missing", validationException); } if (maxRetryDelay != null) { @@ -232,9 +230,9 @@ public class PutAutoFollowPatternAction extends Action { builder.startObject(); { builder.field(LEADER_CLUSTER_ALIAS_FIELD.getPreferredName(), leaderClusterAlias); - builder.field(LEADER_INDEX_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); + builder.field(AutoFollowPattern.LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); if (followIndexNamePattern != null) { - builder.field(FOLLOW_INDEX_NAME_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); + builder.field(AutoFollowPattern.FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); } if (maxBatchOperationCount != null) { builder.field(AutoFollowPattern.MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java index 3100dae9edf..f2f76bcb846 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/client/CcrClient.java @@ -11,10 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; import org.elasticsearch.xpack.core.ccr.action.DeleteAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.core.ccr.action.GetAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.core.ccr.action.UnfollowIndexAction; @@ -62,6 +64,17 @@ public class CcrClient { return listener; } + public void autoFollowStats(final AutoFollowStatsAction.Request request, + final ActionListener listener) { + client.execute(AutoFollowStatsAction.INSTANCE, request, listener); + } + + public ActionFuture autoFollowStats(final AutoFollowStatsAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + autoFollowStats(request, listener); + return listener; + } + public void unfollow(final UnfollowIndexAction.Request request, final ActionListener listener) { client.execute(UnfollowIndexAction.INSTANCE, request, listener); } @@ -96,4 +109,16 @@ public class CcrClient { return listener; } + public void getAutoFollowPattern( + final GetAutoFollowPatternAction.Request request, + final ActionListener listener) { + client.execute(GetAutoFollowPatternAction.INSTANCE, request, listener); + } + + public ActionFuture getAutoFollowPattern(final GetAutoFollowPatternAction.Request request) { + final PlainActionFuture listener = PlainActionFuture.newFuture(); + client.execute(GetAutoFollowPatternAction.INSTANCE, request, listener); + return listener; + } + } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index e6ace63f44a..ef284e13942 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; @@ -88,7 +88,7 @@ public class GetOverallBucketsAction extends Action { public static final ParseField IGNORE_DOWNTIME = new ParseField("ignore_downtime"); public static final ParseField TIMEOUT = new ParseField("timeout"); - public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, JobParams::new); + public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, true, JobParams::new); static { PARSER.declareString(JobParams::setJobId, Job.ID); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 9c4a67ec61f..57a602b4cf0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -17,7 +17,7 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; @@ -141,7 +141,7 @@ public class StartDatafeedAction extends Action { public static class DatafeedParams implements XPackPlugin.XPackPersistentTaskParams { - public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, DatafeedParams::new); + public static ObjectParser PARSER = new ObjectParser<>(TASK_NAME, true, DatafeedParams::new); static { PARSER.declareString((params, datafeedId) -> params.datafeedId = datafeedId, DatafeedConfig.ID); @@ -153,7 +153,7 @@ public class StartDatafeedAction extends Action { } static long parseDateOrThrow(String date, ParseField paramName, LongSupplier now) { - DateMathParser dateMathParser = new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + DateMathParser dateMathParser = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.toDateMathParser(); try { return dateMathParser.parse(date, now); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java index db5f29f3b1b..1ac9f081ebe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java @@ -99,7 +99,8 @@ public class FileStructure implements ToXContentObject, Writeable { public static final ParseField SHOULD_TRIM_FIELDS = new ParseField("should_trim_fields"); public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp_field"); - public static final ParseField TIMESTAMP_FORMATS = new ParseField("timestamp_formats"); + public static final ParseField JODA_TIMESTAMP_FORMATS = new ParseField("joda_timestamp_formats"); + public static final ParseField JAVA_TIMESTAMP_FORMATS = new ParseField("java_timestamp_formats"); public static final ParseField NEED_CLIENT_TIMEZONE = new ParseField("need_client_timezone"); public static final ParseField MAPPINGS = new ParseField("mappings"); public static final ParseField FIELD_STATS = new ParseField("field_stats"); @@ -123,7 +124,8 @@ public class FileStructure implements ToXContentObject, Writeable { PARSER.declareBoolean(Builder::setShouldTrimFields, SHOULD_TRIM_FIELDS); PARSER.declareString(Builder::setGrokPattern, GROK_PATTERN); PARSER.declareString(Builder::setTimestampField, TIMESTAMP_FIELD); - PARSER.declareStringArray(Builder::setTimestampFormats, TIMESTAMP_FORMATS); + PARSER.declareStringArray(Builder::setJodaTimestampFormats, JODA_TIMESTAMP_FORMATS); + PARSER.declareStringArray(Builder::setJavaTimestampFormats, JAVA_TIMESTAMP_FORMATS); PARSER.declareBoolean(Builder::setNeedClientTimezone, NEED_CLIENT_TIMEZONE); PARSER.declareObject(Builder::setMappings, (p, c) -> new TreeMap<>(p.map()), MAPPINGS); PARSER.declareObject(Builder::setFieldStats, (p, c) -> { @@ -150,7 +152,8 @@ public class FileStructure implements ToXContentObject, Writeable { private final Character quote; private final Boolean shouldTrimFields; private final String grokPattern; - private final List timestampFormats; + private final List jodaTimestampFormats; + private final List javaTimestampFormats; private final String timestampField; private final boolean needClientTimezone; private final SortedMap mappings; @@ -160,8 +163,9 @@ public class FileStructure implements ToXContentObject, Writeable { public FileStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampleStart, String charset, Boolean hasByteOrderMarker, Format format, String multilineStartPattern, String excludeLinesPattern, List columnNames, Boolean hasHeaderRow, Character delimiter, Character quote, Boolean shouldTrimFields, String grokPattern, - String timestampField, List timestampFormats, boolean needClientTimezone, Map mappings, - Map fieldStats, List explanation) { + String timestampField, List jodaTimestampFormats, List javaTimestampFormats, + boolean needClientTimezone, Map mappings, Map fieldStats, + List explanation) { this.numLinesAnalyzed = numLinesAnalyzed; this.numMessagesAnalyzed = numMessagesAnalyzed; @@ -178,7 +182,10 @@ public class FileStructure implements ToXContentObject, Writeable { this.shouldTrimFields = shouldTrimFields; this.grokPattern = grokPattern; this.timestampField = timestampField; - this.timestampFormats = (timestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(timestampFormats)); + this.jodaTimestampFormats = + (jodaTimestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(jodaTimestampFormats)); + this.javaTimestampFormats = + (javaTimestampFormats == null) ? null : Collections.unmodifiableList(new ArrayList<>(javaTimestampFormats)); this.needClientTimezone = needClientTimezone; this.mappings = Collections.unmodifiableSortedMap(new TreeMap<>(mappings)); this.fieldStats = Collections.unmodifiableSortedMap(new TreeMap<>(fieldStats)); @@ -200,7 +207,8 @@ public class FileStructure implements ToXContentObject, Writeable { quote = in.readBoolean() ? (char) in.readVInt() : null; shouldTrimFields = in.readOptionalBoolean(); grokPattern = in.readOptionalString(); - timestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + jodaTimestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + javaTimestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; timestampField = in.readOptionalString(); needClientTimezone = in.readBoolean(); mappings = Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())); @@ -239,11 +247,17 @@ public class FileStructure implements ToXContentObject, Writeable { } out.writeOptionalBoolean(shouldTrimFields); out.writeOptionalString(grokPattern); - if (timestampFormats == null) { + if (jodaTimestampFormats == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - out.writeCollection(timestampFormats, StreamOutput::writeString); + out.writeCollection(jodaTimestampFormats, StreamOutput::writeString); + } + if (javaTimestampFormats == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeCollection(javaTimestampFormats, StreamOutput::writeString); } out.writeOptionalString(timestampField); out.writeBoolean(needClientTimezone); @@ -312,8 +326,12 @@ public class FileStructure implements ToXContentObject, Writeable { return timestampField; } - public List getTimestampFormats() { - return timestampFormats; + public List getJodaTimestampFormats() { + return jodaTimestampFormats; + } + + public List getJavaTimestampFormats() { + return javaTimestampFormats; } public boolean needClientTimezone() { @@ -371,8 +389,11 @@ public class FileStructure implements ToXContentObject, Writeable { if (timestampField != null && timestampField.isEmpty() == false) { builder.field(TIMESTAMP_FIELD.getPreferredName(), timestampField); } - if (timestampFormats != null && timestampFormats.isEmpty() == false) { - builder.field(TIMESTAMP_FORMATS.getPreferredName(), timestampFormats); + if (jodaTimestampFormats != null && jodaTimestampFormats.isEmpty() == false) { + builder.field(JODA_TIMESTAMP_FORMATS.getPreferredName(), jodaTimestampFormats); + } + if (javaTimestampFormats != null && javaTimestampFormats.isEmpty() == false) { + builder.field(JAVA_TIMESTAMP_FORMATS.getPreferredName(), javaTimestampFormats); } builder.field(NEED_CLIENT_TIMEZONE.getPreferredName(), needClientTimezone); builder.field(MAPPINGS.getPreferredName(), mappings); @@ -396,7 +417,7 @@ public class FileStructure implements ToXContentObject, Writeable { return Objects.hash(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, quote, shouldTrimFields, grokPattern, - timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); + timestampField, jodaTimestampFormats, javaTimestampFormats, needClientTimezone, mappings, fieldStats, explanation); } @Override @@ -413,7 +434,6 @@ public class FileStructure implements ToXContentObject, Writeable { FileStructure that = (FileStructure) other; return this.numLinesAnalyzed == that.numLinesAnalyzed && this.numMessagesAnalyzed == that.numMessagesAnalyzed && - this.needClientTimezone == that.needClientTimezone && Objects.equals(this.sampleStart, that.sampleStart) && Objects.equals(this.charset, that.charset) && Objects.equals(this.hasByteOrderMarker, that.hasByteOrderMarker) && @@ -427,7 +447,9 @@ public class FileStructure implements ToXContentObject, Writeable { Objects.equals(this.shouldTrimFields, that.shouldTrimFields) && Objects.equals(this.grokPattern, that.grokPattern) && Objects.equals(this.timestampField, that.timestampField) && - Objects.equals(this.timestampFormats, that.timestampFormats) && + Objects.equals(this.jodaTimestampFormats, that.jodaTimestampFormats) && + Objects.equals(this.javaTimestampFormats, that.javaTimestampFormats) && + this.needClientTimezone == that.needClientTimezone && Objects.equals(this.mappings, that.mappings) && Objects.equals(this.fieldStats, that.fieldStats) && Objects.equals(this.explanation, that.explanation); @@ -450,7 +472,8 @@ public class FileStructure implements ToXContentObject, Writeable { private Boolean shouldTrimFields; private String grokPattern; private String timestampField; - private List timestampFormats; + private List jodaTimestampFormats; + private List javaTimestampFormats; private boolean needClientTimezone; private Map mappings; private Map fieldStats = Collections.emptyMap(); @@ -539,8 +562,13 @@ public class FileStructure implements ToXContentObject, Writeable { return this; } - public Builder setTimestampFormats(List timestampFormats) { - this.timestampFormats = timestampFormats; + public Builder setJodaTimestampFormats(List jodaTimestampFormats) { + this.jodaTimestampFormats = jodaTimestampFormats; + return this; + } + + public Builder setJavaTimestampFormats(List javaTimestampFormats) { + this.javaTimestampFormats = javaTimestampFormats; return this; } @@ -652,11 +680,21 @@ public class FileStructure implements ToXContentObject, Writeable { throw new IllegalStateException("enum value [" + format + "] missing from switch."); } - if ((timestampField == null) != (timestampFormats == null || timestampFormats.isEmpty())) { - throw new IllegalArgumentException("Timestamp field and timestamp formats must both be specified or neither be specified."); + boolean isTimestampFieldSpecified = (timestampField != null); + boolean isJodaTimestampFormatsSpecified = (jodaTimestampFormats != null && jodaTimestampFormats.isEmpty() == false); + boolean isJavaTimestampFormatsSpecified = (javaTimestampFormats != null && javaTimestampFormats.isEmpty() == false); + + if (isTimestampFieldSpecified != isJodaTimestampFormatsSpecified) { + throw new IllegalArgumentException( + "Timestamp field and Joda timestamp formats must both be specified or neither be specified."); } - if (needClientTimezone && timestampField == null) { + if (isTimestampFieldSpecified != isJavaTimestampFormatsSpecified) { + throw new IllegalArgumentException( + "Timestamp field and Java timestamp formats must both be specified or neither be specified."); + } + + if (needClientTimezone && isTimestampFieldSpecified == false) { throw new IllegalArgumentException("Client timezone cannot be needed if there is no timestamp field."); } @@ -670,7 +708,7 @@ public class FileStructure implements ToXContentObject, Writeable { return new FileStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, quote, shouldTrimFields, grokPattern, - timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); + timestampField, jodaTimestampFormats, javaTimestampFormats, needClientTimezone, mappings, fieldStats, explanation); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java index 57bc360d64e..d4ccc22d32a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/scheduler/Cron.java @@ -254,7 +254,6 @@ public class Cron implements ToXContentFragment { private transient boolean lastdayOfMonth = false; private transient boolean nearestWeekday = false; private transient int lastdayOffset = 0; - private transient boolean expressionParsed = false; public static final int MAX_YEAR = Calendar.getInstance(UTC, Locale.ROOT).get(Calendar.YEAR) + 100; @@ -802,7 +801,6 @@ public class Cron implements ToXContentFragment { //////////////////////////////////////////////////////////////////////////// private void buildExpression(String expression) { - expressionParsed = true; try { @@ -1214,32 +1212,6 @@ public class Cron implements ToXContentFragment { return buf.toString(); } - private static String expressionSetSummary(java.util.ArrayList list) { - - if (list.contains(NO_SPEC)) { - return "?"; - } - if (list.contains(ALL_SPEC)) { - return "*"; - } - - StringBuilder buf = new StringBuilder(); - - Iterator itr = list.iterator(); - boolean first = true; - while (itr.hasNext()) { - Integer iVal = itr.next(); - String val = iVal.toString(); - if (!first) { - buf.append(","); - } - buf.append(val); - first = false; - } - - return buf.toString(); - } - private static int skipWhiteSpace(int i, String s) { for (; i < s.length() && (s.charAt(i) == ' ' || s.charAt(i) == '\t'); i++) { ; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index a481f880311..97f8eb5fa11 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -5,6 +5,9 @@ */ package org.elasticsearch.xpack.core.security; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.SearchRequest; @@ -12,7 +15,6 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.Client; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; @@ -25,6 +27,7 @@ import java.util.function.Function; public final class ScrollHelper { + private static final Logger LOGGER = LogManager.getLogger(ScrollHelper.class); private ScrollHelper() {} /** @@ -35,13 +38,15 @@ public final class ScrollHelper { Function hitParser) { final List results = new ArrayList<>(); if (request.scroll() == null) { // we do scroll by default lets see if we can get rid of this at some point. - request.scroll(TimeValue.timeValueSeconds(10L)); + throw new IllegalArgumentException("request must have scroll set"); } final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest, ActionListener.wrap((r) -> {}, (e) -> {})); + client.clearScroll(clearScrollRequest, ActionListener.wrap((r) -> {}, e -> + LOGGER.warn(new ParameterizedMessage("clear scroll failed for scroll id [{}]", response.getScrollId()), e) + )); } }; // This function is MADNESS! But it works, don't think about it too hard... diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index 144d2877463..53d6c328f5d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -23,7 +23,6 @@ import org.elasticsearch.xpack.core.security.support.Automatons; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -44,7 +43,6 @@ public final class FieldPermissions implements Accountable { private static final long BASE_FIELD_PERM_DEF_BYTES = RamUsageEstimator.shallowSizeOf(new FieldPermissionsDefinition(null, null)); private static final long BASE_FIELD_GROUP_BYTES = RamUsageEstimator.shallowSizeOf(new FieldGrantExcludeGroup(null, null)); - private static final long BASE_HASHSET_SIZE = RamUsageEstimator.shallowSizeOfInstance(HashSet.class); private static final long BASE_HASHSET_ENTRY_SIZE; static { HashMap map = new HashMap<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java index b82e9b64109..001a430ddb1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/client/WatchSourceBuilder.java @@ -185,7 +185,6 @@ public class WatchSourceBuilder implements ToXContentObject { static class TransformedAction implements ToXContentObject { - private final String id; private final Action action; @Nullable private final TimeValue throttlePeriod; @Nullable private final Condition condition; @@ -193,7 +192,6 @@ public class WatchSourceBuilder implements ToXContentObject { TransformedAction(String id, Action action, @Nullable TimeValue throttlePeriod, @Nullable Condition condition, @Nullable Transform transform) { - this.id = id; this.throttlePeriod = throttlePeriod; this.condition = condition; this.transform = transform; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java index 097d136c629..991f9ba3323 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherDateTimeUtils.java @@ -9,8 +9,8 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -26,7 +26,7 @@ import java.util.concurrent.TimeUnit; public class WatcherDateTimeUtils { public static final FormatDateTimeFormatter dateTimeFormatter = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; - public static final DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); + public static final DateMathParser dateMathParser = dateTimeFormatter.toDateMathParser(); private WatcherDateTimeUtils() { } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 8464f495371..2620fee9fd1 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -1008,6 +1008,38 @@ "type": "long" } } + }, + "ccr_auto_follow_stats" : { + "properties": { + "number_of_failed_follow_indices": { + "type": "long" + }, + "number_of_failed_remote_cluster_state_requests": { + "type": "long" + }, + "number_of_successful_follow_indices": { + "type": "long" + }, + "recent_auto_follow_errors": { + "type": "nested", + "properties": { + "leader_index": { + "type": "keyword" + }, + "auto_follow_exception": { + "type": "object", + "properties": { + "type": { + "type": "keyword" + }, + "reason": { + "type": "text" + } + } + } + } + } + } } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java index afa1a8d6796..0113634a882 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/TestUtils.java @@ -10,10 +10,10 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -50,7 +50,7 @@ import static org.junit.Assert.assertThat; public class TestUtils { private static final FormatDateTimeFormatter formatDateTimeFormatter = Joda.forPattern("yyyy-MM-dd"); - private static final DateMathParser dateMathParser = new DateMathParser(formatDateTimeFormatter); + private static final DateMathParser dateMathParser = formatDateTimeFormatter.toDateMathParser(); private static final DateTimeFormatter dateTimeFormatter = formatDateTimeFormatter.printer(); public static String dateMathString(String time, final long now) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DatafeedParamsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DatafeedParamsTests.java new file mode 100644 index 00000000000..24a6dbacfad --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/DatafeedParamsTests.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class DatafeedParamsTests extends AbstractSerializingTestCase { + @Override + protected StartDatafeedAction.DatafeedParams doParseInstance(XContentParser parser) throws IOException { + return StartDatafeedAction.DatafeedParams.parseRequest(null, parser); + } + + public static StartDatafeedAction.DatafeedParams createDatafeedParams() { + StartDatafeedAction.DatafeedParams params = + new StartDatafeedAction.DatafeedParams(randomAlphaOfLength(10), randomNonNegativeLong()); + if (randomBoolean()) { + params.setEndTime(randomNonNegativeLong()); + } + if (randomBoolean()) { + params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); + } + return params; + } + + @Override + protected StartDatafeedAction.DatafeedParams createTestInstance() { + return createDatafeedParams(); + } + + @Override + protected Writeable.Reader instanceReader() { + return StartDatafeedAction.DatafeedParams::new; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/JobParamsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/JobParamsTests.java new file mode 100644 index 00000000000..740b01abf0d --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/JobParamsTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; + +public class JobParamsTests extends AbstractSerializingTestCase { + + @Override + protected OpenJobAction.JobParams doParseInstance(XContentParser parser) throws IOException { + return OpenJobAction.JobParams.parseRequest(null, parser); + } + + public static OpenJobAction.JobParams createJobParams() { + OpenJobAction.JobParams params = new OpenJobAction.JobParams(randomAlphaOfLengthBetween(1, 20)); + if (randomBoolean()) { + params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); + } + return params; + } + + @Override + protected OpenJobAction.JobParams createTestInstance() { + return createJobParams(); + } + + @Override + protected Writeable.Reader instanceReader() { + return OpenJobAction.JobParams::new; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java index de85907a83e..da1ce5fdd0f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/OpenJobActionRequestTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import org.elasticsearch.xpack.core.ml.action.OpenJobAction.Request; @@ -14,11 +13,7 @@ public class OpenJobActionRequestTests extends AbstractStreamableXContentTestCas @Override protected Request createTestInstance() { - OpenJobAction.JobParams params = new OpenJobAction.JobParams(randomAlphaOfLengthBetween(1, 20)); - if (randomBoolean()) { - params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); - } - return new Request(params); + return new Request(JobParamsTests.createJobParams()); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java index ce6a64be6c5..af94c180a1f 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventActionRequestTests.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractStreamableTestCase; -import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEventTests; @@ -63,7 +62,6 @@ public class PostCalendarEventActionRequestTests extends AbstractStreamableTestC public void testParseRequest_throwsIfCalendarIdsAreDifferent() throws IOException { PostCalendarEventsAction.Request sourceRequest = createTestInstance("foo"); - PostCalendarEventsAction.Request request = new PostCalendarEventsAction.Request("bar", sourceRequest.getScheduledEvents()); StringBuilder requestString = new StringBuilder(); requestString.append("{\"events\": ["); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java index bae610c5e36..fe2bb5d6508 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedActionRequestTests.java @@ -6,10 +6,8 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractStreamableXContentTestCase; -import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction.DatafeedParams; import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction.Request; import static org.hamcrest.Matchers.equalTo; @@ -18,14 +16,7 @@ public class StartDatafeedActionRequestTests extends AbstractStreamableXContentT @Override protected Request createTestInstance() { - DatafeedParams params = new DatafeedParams(randomAlphaOfLength(10), randomNonNegativeLong()); - if (randomBoolean()) { - params.setEndTime(randomNonNegativeLong()); - } - if (randomBoolean()) { - params.setTimeout(TimeValue.timeValueMillis(randomNonNegativeLong())); - } - return new Request(params); + return new Request(DatafeedParamsTests.createDatafeedParams()); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java index ac6c647136b..d008b31f9a6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java @@ -63,7 +63,8 @@ public class FileStructureTests extends AbstractSerializingTestCase iter = fieldValues.iterator(); TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(iter.next()); while (timestampMatch != null && iter.hasNext()) { - // To be mapped as type date all the values must match the same date format - it is + // To be mapped as type date all the values must match the same timestamp format - it is // not acceptable for all values to be dates, but with different formats if (timestampMatch.equals(TimestampFormatFinder.findFirstFullMatch(iter.next(), timestampMatch.candidateIndex)) == false) { timestampMatch = null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java index b20658f872b..48c446c90f4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java @@ -55,7 +55,8 @@ public class JsonFileStructureFinder implements FileStructureFinder { Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides); if (timeField != null) { structureBuilder.setTimestampField(timeField.v1()) - .setTimestampFormats(timeField.v2().dateFormats) + .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) + .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index e6e445a3ff6..b9386fab72f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -112,7 +112,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { FileStructure structure = structureBuilder .setTimestampField(interimTimestampField) - .setTimestampFormats(bestTimestamp.v1().dateFormats) + .setJodaTimestampFormats(bestTimestamp.v1().jodaTimestampFormats) + .setJavaTimestampFormats(bestTimestamp.v1().javaTimestampFormats) .setNeedClientTimezone(bestTimestamp.v1().hasTimezoneDependentParsing()) .setGrokPattern(grokPattern) .setMappings(mappings) @@ -147,8 +148,8 @@ public class TextLogFileStructureFinder implements FileStructureFinder { for (String sampleLine : sampleLines) { TimestampMatch match = TimestampFormatFinder.findFirstMatch(sampleLine, overrides.getTimestampFormat()); if (match != null) { - TimestampMatch pureMatch = new TimestampMatch(match.candidateIndex, "", match.dateFormats, match.simplePattern, - match.grokPatternName, ""); + TimestampMatch pureMatch = new TimestampMatch(match.candidateIndex, "", match.jodaTimestampFormats, + match.javaTimestampFormats, match.simplePattern, match.grokPatternName, ""); timestampMatches.compute(pureMatch, (k, v) -> { if (v == null) { return new Tuple<>(weightForMatch(match.preface), new HashSet<>(Collections.singletonList(match.preface))); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java index 363b1352a54..7ed95f656b4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java @@ -12,7 +12,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.regex.Matcher; @@ -30,8 +29,12 @@ public final class TimestampFormatFinder { private static final String PREFACE = "preface"; private static final String EPILOGUE = "epilogue"; - private static final Pattern FRACTIONAL_SECOND_INTERPRETER = Pattern.compile("([:.,])(\\d{3,9})"); + private static final String FRACTIONAL_SECOND_SEPARATORS = ":.,"; + private static final Pattern FRACTIONAL_SECOND_INTERPRETER = Pattern.compile("([" + FRACTIONAL_SECOND_SEPARATORS + "])(\\d{3,9})"); private static final char DEFAULT_FRACTIONAL_SECOND_SEPARATOR = ','; + private static final Pattern FRACTIONAL_SECOND_TIMESTAMP_FORMAT_PATTERN = + Pattern.compile("([" + FRACTIONAL_SECOND_SEPARATORS + "]S{3,9})"); + private static final String DEFAULT_FRACTIONAL_SECOND_FORMAT = DEFAULT_FRACTIONAL_SECOND_SEPARATOR + "SSS"; /** * The timestamp patterns are complex and it can be slow to prove they do not @@ -48,7 +51,9 @@ public final class TimestampFormatFinder { // The end of some number (likely year or day) followed by a space then HH:mm Pattern.compile("\\d \\d{2}:\\d{2}\\b"), // HH:mm:ss surrounded by spaces - Pattern.compile(" \\d{2}:\\d{2}:\\d{2} ") + Pattern.compile(" \\d{2}:\\d{2}:\\d{2} "), + // Literal 'T' surrounded by numbers + Pattern.compile("\\dT\\d") ); /** @@ -59,81 +64,107 @@ public final class TimestampFormatFinder { // The TOMCAT_DATESTAMP format has to come before ISO8601 because it's basically ISO8601 but // with a space before the timezone, and because the timezone is optional in ISO8601 it will // be recognised as that with the timezone missed off if ISO8601 is checked first - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS Z", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS Z", "yyyy-MM-dd HH:mm:ss,SSS XX", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b20\\d{2}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9} (?:Z|[+-]%{HOUR}%{MINUTE})\\b", "TOMCAT_DATESTAMP", Arrays.asList(0, 1)), // The Elasticsearch ISO8601 parser requires a literal T between the date and time, so // longhand formats are needed if there's a space instead - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZ", "yyyy-MM-dd HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}(?:Z|[+-]%{HOUR}%{MINUTE})\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSSZZ", "yyyy-MM-dd HH:mm:ss,SSSXXX", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}[+-]%{HOUR}:%{MINUTE}\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZ", "yyyy-MM-dd HH:mm:ssXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)(?:Z|[+-]%{HOUR}%{MINUTE})\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZZ", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ssZZ", "yyyy-MM-dd HH:mm:ssXXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[+-]%{HOUR}:%{MINUTE}\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + new CandidateTimestampFormat("YYYY-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)\\b", "TIMESTAMP_ISO8601", Arrays.asList(0, 1)), - new CandidateTimestampFormat("ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "\\b%{TIMESTAMP_ISO8601}\\b", - "TIMESTAMP_ISO8601"), - new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm:ss zzz", + // When using Java time the Elasticsearch ISO8601 parser for fractional time requires that the fractional + // separator match the current JVM locale, which is too restrictive for arbitrary log file parsing + new CandidateTimestampFormat("ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}(?:Z|[+-]%{HOUR}%{MINUTE})\\b", + "TIMESTAMP_ISO8601", Collections.singletonList(3)), + new CandidateTimestampFormat("ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}[+-]%{HOUR}:%{MINUTE}\\b", + "TIMESTAMP_ISO8601", Collections.singletonList(3)), + new CandidateTimestampFormat("ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", + "\\b%{YEAR}-%{MONTHNUM}-%{MONTHDAY}T%{HOUR}:?%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}\\b", "TIMESTAMP_ISO8601", + Collections.singletonList(3)), + new CandidateTimestampFormat("ISO8601", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "\\b%{TIMESTAMP_ISO8601}\\b", + "TIMESTAMP_ISO8601", Collections.singletonList(3)), + new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm:ss zzz", "EEE MMM dd yyyy HH:mm:ss zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) %{TZ}\\b", "DATESTAMP_RFC822", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2} ", + new CandidateTimestampFormat("EEE MMM dd YYYY HH:mm zzz", "EEE MMM dd yyyy HH:mm zzz", + "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2} ", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{YEAR} %{HOUR}:%{MINUTE} %{TZ}\\b", "DATESTAMP_RFC822", Collections.singletonList(1)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss ZZ", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss ZZ", "EEE, dd MMM yyyy HH:mm:ss XXX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) (?:Z|[+-]%{HOUR}:%{MINUTE})\\b", "DATESTAMP_RFC2822", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss Z", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm:ss Z", "EEE, dd MMM yyyy HH:mm:ss XX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) (?:Z|[+-]%{HOUR}%{MINUTE})\\b", "DATESTAMP_RFC2822", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm ZZ", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm ZZ", "EEE, dd MMM yyyy HH:mm XXX", + "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE} (?:Z|[+-]%{HOUR}:%{MINUTE})\\b", "DATESTAMP_RFC2822", Collections.singletonList(1)), - new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm Z", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", + new CandidateTimestampFormat("EEE, dd MMM YYYY HH:mm Z", "EEE, dd MMM yyyy HH:mm XX", + "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "\\b%{DAY}, %{MONTHDAY} %{MONTH} %{YEAR} %{HOUR}:%{MINUTE} (?:Z|[+-]%{HOUR}%{MINUTE})\\b", "DATESTAMP_RFC2822", Collections.singletonList(1)), - new CandidateTimestampFormat("EEE MMM dd HH:mm:ss zzz YYYY", + new CandidateTimestampFormat("EEE MMM dd HH:mm:ss zzz YYYY", "EEE MMM dd HH:mm:ss zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) %{TZ} %{YEAR}\\b", "DATESTAMP_OTHER", Arrays.asList(1, 2)), - new CandidateTimestampFormat("EEE MMM dd HH:mm zzz YYYY", + new CandidateTimestampFormat("EEE MMM dd HH:mm zzz YYYY", "EEE MMM dd HH:mm zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{HOUR}:%{MINUTE} %{TZ} %{YEAR}\\b", "DATESTAMP_OTHER", Collections.singletonList(1)), - new CandidateTimestampFormat("YYYYMMddHHmmss", "\\b\\d{14}\\b", + new CandidateTimestampFormat("YYYYMMddHHmmss", "yyyyMMddHHmmss", "\\b\\d{14}\\b", "\\b20\\d{2}%{MONTHNUM2}(?:(?:0[1-9])|(?:[12][0-9])|(?:3[01]))(?:2[0123]|[01][0-9])%{MINUTE}(?:[0-5][0-9]|60)\\b", "DATESTAMP_EVENTLOG"), - new CandidateTimestampFormat("EEE MMM dd HH:mm:ss YYYY", + new CandidateTimestampFormat("EEE MMM dd HH:mm:ss YYYY", "EEE MMM dd HH:mm:ss yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} \\d{4}\\b", "\\b%{DAY} %{MONTH} %{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) %{YEAR}\\b", "HTTPDERROR_DATE", Arrays.asList(1, 2)), new CandidateTimestampFormat(Arrays.asList("MMM dd HH:mm:ss,SSS", "MMM d HH:mm:ss,SSS"), + Arrays.asList("MMM dd HH:mm:ss,SSS", "MMM d HH:mm:ss,SSS"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "%{MONTH} +%{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60)[:.,][0-9]{3,9}\\b", "SYSLOGTIMESTAMP", Collections.singletonList(1)), new CandidateTimestampFormat(Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "%{MONTH} +%{MONTHDAY} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60)\\b", "SYSLOGTIMESTAMP", Collections.singletonList(1)), - new CandidateTimestampFormat("dd/MMM/YYYY:HH:mm:ss Z", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", + new CandidateTimestampFormat("dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", + "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "\\b%{MONTHDAY}/%{MONTH}/%{YEAR}:%{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) [+-]?%{HOUR}%{MINUTE}\\b", "HTTPDATE"), - new CandidateTimestampFormat("MMM dd, YYYY K:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", + new CandidateTimestampFormat("MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", + "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "%{MONTH} %{MONTHDAY}, 20\\d{2} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60) (?:AM|PM)\\b", "CATALINA_DATESTAMP"), new CandidateTimestampFormat(Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"), + Arrays.asList("MMM dd yyyy HH:mm:ss", "MMM d yyyy HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2}\\b", "%{MONTH} +%{MONTHDAY} %{YEAR} %{HOUR}:%{MINUTE}:(?:[0-5][0-9]|60)\\b", "CISCOTIMESTAMP", Collections.singletonList(1)), - new CandidateTimestampFormat("UNIX_MS", "\\b\\d{13}\\b", "\\b\\d{13}\\b", "POSINT"), - new CandidateTimestampFormat("UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "\\b\\d{10}\\.(?:\\d{3}){1,3}\\b", "NUMBER"), - new CandidateTimestampFormat("UNIX", "\\b\\d{10}\\b", "\\b\\d{10}\\b", "POSINT"), - new CandidateTimestampFormat("TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM") + new CandidateTimestampFormat("UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "\\b\\d{13}\\b", "POSINT"), + new CandidateTimestampFormat("UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "\\b\\d{10}\\.(?:\\d{3}){1,3}\\b", "NUMBER"), + new CandidateTimestampFormat("UNIX", "UNIX", "\\b\\d{10}\\b", "\\b\\d{10}\\b", "POSINT"), + new CandidateTimestampFormat("TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM") ); private TimestampFormatFinder() { @@ -151,7 +182,7 @@ public final class TimestampFormatFinder { /** * Find the first timestamp format that matches part of the supplied value. * @param text The value that the returned timestamp format must exist within. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstMatch(String text, String requiredFormat) { @@ -174,14 +205,19 @@ public final class TimestampFormatFinder { * excluding a specified number of candidate formats. * @param text The value that the returned timestamp format must exist within. * @param ignoreCandidates The number of candidate formats to exclude from the search. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstMatch(String text, int ignoreCandidates, String requiredFormat) { + if (ignoreCandidates >= ORDERED_CANDIDATE_FORMATS.size()) { + return null; + } Boolean[] quickRuleoutMatches = new Boolean[QUICK_RULE_OUT_PATTERNS.size()]; int index = ignoreCandidates; + String adjustedRequiredFormat = adjustRequiredFormat(requiredFormat); for (CandidateTimestampFormat candidate : ORDERED_CANDIDATE_FORMATS.subList(ignoreCandidates, ORDERED_CANDIDATE_FORMATS.size())) { - if (requiredFormat == null || candidate.dateFormats.contains(requiredFormat)) { + if (adjustedRequiredFormat == null || candidate.jodaTimestampFormats.contains(adjustedRequiredFormat) || + candidate.javaTimestampFormats.contains(adjustedRequiredFormat)) { boolean quicklyRuledOut = false; for (Integer quickRuleOutIndex : candidate.quickRuleOutIndices) { if (quickRuleoutMatches[quickRuleOutIndex] == null) { @@ -219,7 +255,7 @@ public final class TimestampFormatFinder { /** * Find the best timestamp format for matching an entire field value. * @param text The value that the returned timestamp format must match in its entirety. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstFullMatch(String text, String requiredFormat) { @@ -242,13 +278,18 @@ public final class TimestampFormatFinder { * excluding a specified number of candidate formats. * @param text The value that the returned timestamp format must match in its entirety. * @param ignoreCandidates The number of candidate formats to exclude from the search. - * @param requiredFormat A date format that any returned match must support. + * @param requiredFormat A timestamp format that any returned match must support. * @return The timestamp format, or null if none matches. */ public static TimestampMatch findFirstFullMatch(String text, int ignoreCandidates, String requiredFormat) { + if (ignoreCandidates >= ORDERED_CANDIDATE_FORMATS.size()) { + return null; + } int index = ignoreCandidates; + String adjustedRequiredFormat = adjustRequiredFormat(requiredFormat); for (CandidateTimestampFormat candidate : ORDERED_CANDIDATE_FORMATS.subList(ignoreCandidates, ORDERED_CANDIDATE_FORMATS.size())) { - if (requiredFormat == null || candidate.dateFormats.contains(requiredFormat)) { + if (adjustedRequiredFormat == null || candidate.jodaTimestampFormats.contains(adjustedRequiredFormat) || + candidate.javaTimestampFormats.contains(adjustedRequiredFormat)) { Map captures = candidate.strictFullMatchGrok.captures(text); if (captures != null) { return makeTimestampMatch(candidate, index, "", text, ""); @@ -259,16 +300,32 @@ public final class TimestampFormatFinder { return null; } + /** + * If a required timestamp format contains a fractional seconds component, adjust it to the + * fractional seconds format that's in the candidate timestamp formats, i.e. ",SSS". So, for + * example, "YYYY-MM-dd HH:mm:ss.SSSSSSSSS Z" would get adjusted to "YYYY-MM-dd HH:mm:ss,SSS Z". + */ + static String adjustRequiredFormat(String requiredFormat) { + + return (requiredFormat == null) ? null : + FRACTIONAL_SECOND_TIMESTAMP_FORMAT_PATTERN.matcher(requiredFormat).replaceFirst(DEFAULT_FRACTIONAL_SECOND_FORMAT); + } + private static TimestampMatch makeTimestampMatch(CandidateTimestampFormat chosenTimestampFormat, int chosenIndex, String preface, String matchedDate, String epilogue) { Tuple fractionalSecondsInterpretation = interpretFractionalSeconds(matchedDate); - List dateFormats = chosenTimestampFormat.dateFormats; + List jodaTimestampFormats = chosenTimestampFormat.jodaTimestampFormats; + List javaTimestampFormats = chosenTimestampFormat.javaTimestampFormats; Pattern simplePattern = chosenTimestampFormat.simplePattern; char separator = fractionalSecondsInterpretation.v1(); if (separator != DEFAULT_FRACTIONAL_SECOND_SEPARATOR) { - dateFormats = dateFormats.stream().map(dateFormat -> dateFormat.replace(DEFAULT_FRACTIONAL_SECOND_SEPARATOR, separator)) + jodaTimestampFormats = jodaTimestampFormats.stream() + .map(jodaTimestampFormat -> jodaTimestampFormat.replace(DEFAULT_FRACTIONAL_SECOND_SEPARATOR, separator)) .collect(Collectors.toList()); - if (dateFormats.stream().noneMatch(dateFormat -> dateFormat.startsWith("UNIX"))) { + javaTimestampFormats = javaTimestampFormats.stream() + .map(javaTimestampFormat -> javaTimestampFormat.replace(DEFAULT_FRACTIONAL_SECOND_SEPARATOR, separator)) + .collect(Collectors.toList()); + if (jodaTimestampFormats.stream().noneMatch(jodaTimestampFormat -> jodaTimestampFormat.startsWith("UNIX"))) { String patternStr = simplePattern.pattern(); int separatorPos = patternStr.lastIndexOf(DEFAULT_FRACTIONAL_SECOND_SEPARATOR); if (separatorPos >= 0) { @@ -281,11 +338,15 @@ public final class TimestampFormatFinder { int numberOfDigitsInFractionalComponent = fractionalSecondsInterpretation.v2(); if (numberOfDigitsInFractionalComponent > 3) { String fractionalSecondsFormat = "SSSSSSSSS".substring(0, numberOfDigitsInFractionalComponent); - dateFormats = dateFormats.stream().map(dateFormat -> dateFormat.replace("SSS", fractionalSecondsFormat)) + jodaTimestampFormats = jodaTimestampFormats.stream() + .map(jodaTimestampFormat -> jodaTimestampFormat.replace("SSS", fractionalSecondsFormat)) + .collect(Collectors.toList()); + javaTimestampFormats = javaTimestampFormats.stream() + .map(javaTimestampFormat -> javaTimestampFormat.replace("SSS", fractionalSecondsFormat)) .collect(Collectors.toList()); } - return new TimestampMatch(chosenIndex, preface, dateFormats, simplePattern, chosenTimestampFormat.standardGrokPatternName, - epilogue); + return new TimestampMatch(chosenIndex, preface, jodaTimestampFormats, javaTimestampFormats, simplePattern, + chosenTimestampFormat.standardGrokPatternName, epilogue); } /** @@ -323,7 +384,12 @@ public final class TimestampFormatFinder { /** * Time format specifier(s) that will work with Logstash and Ingest pipeline date parsers. */ - public final List dateFormats; + public final List jodaTimestampFormats; + + /** + * Time format specifier(s) that will work with Logstash and Ingest pipeline date parsers. + */ + public final List javaTimestampFormats; /** * A simple regex that will work in many languages to detect whether the timestamp format @@ -341,25 +407,25 @@ public final class TimestampFormatFinder { */ public final String epilogue; - TimestampMatch(int candidateIndex, String preface, String dateFormat, String simpleRegex, String grokPatternName, String epilogue) { - this(candidateIndex, preface, Collections.singletonList(dateFormat), simpleRegex, grokPatternName, epilogue); + TimestampMatch(int candidateIndex, String preface, String jodaTimestampFormat, String javaTimestampFormat, String simpleRegex, + String grokPatternName, String epilogue) { + this(candidateIndex, preface, Collections.singletonList(jodaTimestampFormat), Collections.singletonList(javaTimestampFormat), + simpleRegex, grokPatternName, epilogue); } - TimestampMatch(int candidateIndex, String preface, String dateFormat, String simpleRegex, String grokPatternName, String epilogue, - boolean hasFractionalComponentSmallerThanMillisecond) { - this(candidateIndex, preface, Collections.singletonList(dateFormat), simpleRegex, grokPatternName, epilogue); + TimestampMatch(int candidateIndex, String preface, List jodaTimestampFormats, List javaTimestampFormats, + String simpleRegex, String grokPatternName, String epilogue) { + this(candidateIndex, preface, jodaTimestampFormats, javaTimestampFormats, Pattern.compile(simpleRegex), grokPatternName, + epilogue); } - TimestampMatch(int candidateIndex, String preface, List dateFormats, String simpleRegex, String grokPatternName, - String epilogue) { - this(candidateIndex, preface, dateFormats, Pattern.compile(simpleRegex), grokPatternName, epilogue); - } - - TimestampMatch(int candidateIndex, String preface, List dateFormats, Pattern simplePattern, String grokPatternName, + TimestampMatch(int candidateIndex, String preface, List jodaTimestampFormats, List javaTimestampFormats, + Pattern simplePattern, String grokPatternName, String epilogue) { this.candidateIndex = candidateIndex; this.preface = preface; - this.dateFormats = dateFormats; + this.jodaTimestampFormats = Collections.unmodifiableList(jodaTimestampFormats); + this.javaTimestampFormats = Collections.unmodifiableList(javaTimestampFormats); this.simplePattern = simplePattern; this.grokPatternName = grokPatternName; this.epilogue = epilogue; @@ -370,8 +436,8 @@ public final class TimestampFormatFinder { * I.e., does the textual representation NOT define the timezone? */ public boolean hasTimezoneDependentParsing() { - return dateFormats.stream() - .anyMatch(dateFormat -> dateFormat.contains("HH") && dateFormat.toLowerCase(Locale.ROOT).indexOf('z') == -1); + return javaTimestampFormats.stream().anyMatch(javaTimestampFormat -> + javaTimestampFormat.indexOf('X') == -1 && javaTimestampFormat.indexOf('z') == -1 && javaTimestampFormat.contains("mm")); } /** @@ -380,13 +446,13 @@ public final class TimestampFormatFinder { * and possibly also a "format" setting. */ public Map getEsDateMappingTypeWithFormat() { - if (dateFormats.contains("TAI64N")) { - // There's no format for TAI64N in the date formats used in mappings + if (jodaTimestampFormats.contains("TAI64N")) { + // There's no format for TAI64N in the timestamp formats used in mappings return Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"); } Map mapping = new LinkedHashMap<>(); mapping.put(FileStructureUtils.MAPPING_TYPE_SETTING, "date"); - String formats = dateFormats.stream().flatMap(format -> { + String formats = jodaTimestampFormats.stream().flatMap(format -> { switch (format) { case "ISO8601": return Stream.empty(); @@ -406,7 +472,8 @@ public final class TimestampFormatFinder { @Override public int hashCode() { - return Objects.hash(candidateIndex, preface, dateFormats, simplePattern.pattern(), grokPatternName, epilogue); + return Objects.hash(candidateIndex, preface, jodaTimestampFormats, javaTimestampFormats, simplePattern.pattern(), + grokPatternName, epilogue); } @Override @@ -421,7 +488,8 @@ public final class TimestampFormatFinder { TimestampMatch that = (TimestampMatch) other; return this.candidateIndex == that.candidateIndex && Objects.equals(this.preface, that.preface) && - Objects.equals(this.dateFormats, that.dateFormats) && + Objects.equals(this.jodaTimestampFormats, that.jodaTimestampFormats) && + Objects.equals(this.javaTimestampFormats, that.javaTimestampFormats) && Objects.equals(this.simplePattern.pattern(), that.simplePattern.pattern()) && Objects.equals(this.grokPatternName, that.grokPatternName) && Objects.equals(this.epilogue, that.epilogue); @@ -430,7 +498,8 @@ public final class TimestampFormatFinder { @Override public String toString() { return "index = " + candidateIndex + (preface.isEmpty() ? "" : ", preface = '" + preface + "'") + - ", date formats = " + dateFormats.stream().collect(Collectors.joining("', '", "[ '", "' ]")) + + ", Joda timestamp formats = " + jodaTimestampFormats.stream().collect(Collectors.joining("', '", "[ '", "' ]")) + + ", Java timestamp formats = " + javaTimestampFormats.stream().collect(Collectors.joining("', '", "[ '", "' ]")) + ", simple pattern = '" + simplePattern.pattern() + "', grok pattern = '" + grokPatternName + "'" + (epilogue.isEmpty() ? "" : ", epilogue = '" + epilogue + "'"); } @@ -438,29 +507,36 @@ public final class TimestampFormatFinder { static final class CandidateTimestampFormat { - final List dateFormats; + final List jodaTimestampFormats; + final List javaTimestampFormats; final Pattern simplePattern; final Grok strictSearchGrok; final Grok strictFullMatchGrok; final String standardGrokPatternName; final List quickRuleOutIndices; - CandidateTimestampFormat(String dateFormat, String simpleRegex, String strictGrokPattern, String standardGrokPatternName) { - this(Collections.singletonList(dateFormat), simpleRegex, strictGrokPattern, standardGrokPatternName); + CandidateTimestampFormat(String jodaTimestampFormat, String javaTimestampFormat, String simpleRegex, String strictGrokPattern, + String standardGrokPatternName) { + this(Collections.singletonList(jodaTimestampFormat), Collections.singletonList(javaTimestampFormat), simpleRegex, + strictGrokPattern, standardGrokPatternName); } - CandidateTimestampFormat(String dateFormat, String simpleRegex, String strictGrokPattern, String standardGrokPatternName, - List quickRuleOutIndices) { - this(Collections.singletonList(dateFormat), simpleRegex, strictGrokPattern, standardGrokPatternName, quickRuleOutIndices); + CandidateTimestampFormat(String jodaTimestampFormat, String javaTimestampFormat, String simpleRegex, String strictGrokPattern, + String standardGrokPatternName, List quickRuleOutIndices) { + this(Collections.singletonList(jodaTimestampFormat), Collections.singletonList(javaTimestampFormat), simpleRegex, + strictGrokPattern, standardGrokPatternName, quickRuleOutIndices); } - CandidateTimestampFormat(List dateFormats, String simpleRegex, String strictGrokPattern, String standardGrokPatternName) { - this(dateFormats, simpleRegex, strictGrokPattern, standardGrokPatternName, Collections.emptyList()); + CandidateTimestampFormat(List jodaTimestampFormats, List javaTimestampFormats, String simpleRegex, + String strictGrokPattern, String standardGrokPatternName) { + this(jodaTimestampFormats, javaTimestampFormats, simpleRegex, strictGrokPattern, standardGrokPatternName, + Collections.emptyList()); } - CandidateTimestampFormat(List dateFormats, String simpleRegex, String strictGrokPattern, String standardGrokPatternName, - List quickRuleOutIndices) { - this.dateFormats = dateFormats; + CandidateTimestampFormat(List jodaTimestampFormats, List javaTimestampFormats, String simpleRegex, + String strictGrokPattern, String standardGrokPatternName, List quickRuleOutIndices) { + this.jodaTimestampFormats = jodaTimestampFormats; + this.javaTimestampFormats = javaTimestampFormats; this.simplePattern = Pattern.compile(simpleRegex, Pattern.MULTILINE); // The (?m) here has the Ruby meaning, which is equivalent to (?s) in Java this.strictSearchGrok = new Grok(Grok.getBuiltinPatterns(), "(?m)%{DATA:" + PREFACE + "}" + strictGrokPattern + diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java index d5e3fba34c9..66e6bbae886 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java @@ -93,7 +93,8 @@ public class XmlFileStructureFinder implements FileStructureFinder { Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords, overrides); if (timeField != null) { structureBuilder.setTimestampField(timeField.v1()) - .setTimestampFormats(timeField.v2().dateFormats) + .setJodaTimestampFormats(timeField.v2().jodaTimestampFormats) + .setJavaTimestampFormats(timeField.v2().javaTimestampFormats) .setNeedClientTimezone(timeField.v2().hasTimezoneDependentParsing()); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java index d906ccf2f7a..5d7d84c14ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporter.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.ml.job.process; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; @@ -42,17 +42,28 @@ public class DataCountsReporter extends AbstractComponent { * The max percentage of date parse errors allowed before * an exception is thrown. */ + @Deprecated public static final Setting ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING = Setting.intSetting("max.percent.date.errors", 25, - Property.NodeScope); - + Property.NodeScope, Property.Deprecated); + public static final Setting MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING = Setting.intSetting( + "xpack.ml.max_percent_date_errors", + ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING, + 0, + Property.Dynamic, + Property.NodeScope); /** * The max percentage of out of order records allowed before * an exception is thrown. */ + @Deprecated public static final Setting ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING = Setting - .intSetting("max.percent.outoforder.errors", 25, Property.NodeScope); - - private static final TimeValue PERSIST_INTERVAL = TimeValue.timeValueMillis(10_000L); + .intSetting("max.percent.outoforder.errors", 25, Property.NodeScope, Property.Deprecated); + public static final Setting MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING = Setting.intSetting( + "xpack.ml.max_percent_out_of_order_errors", + ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING, + 0, + Property.Dynamic, + Property.NodeScope); private final Job job; private final JobDataCountsPersister dataCountsPersister; @@ -66,14 +77,15 @@ public class DataCountsReporter extends AbstractComponent { private long logEvery = 1; private long logCount = 0; - private final int acceptablePercentDateParseErrors; - private final int acceptablePercentOutOfOrderErrors; + private volatile int acceptablePercentDateParseErrors; + private volatile int acceptablePercentOutOfOrderErrors; private Function reportingBoundaryFunction; private DataStreamDiagnostics diagnostics; - public DataCountsReporter(Settings settings, Job job, DataCounts counts, JobDataCountsPersister dataCountsPersister) { + public DataCountsReporter(Settings settings, Job job, DataCounts counts, JobDataCountsPersister dataCountsPersister, + ClusterService clusterService) { super(settings); @@ -84,9 +96,12 @@ public class DataCountsReporter extends AbstractComponent { incrementalRecordStats = new DataCounts(job.getId()); diagnostics = new DataStreamDiagnostics(job, counts); - acceptablePercentDateParseErrors = ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING.get(settings); - acceptablePercentOutOfOrderErrors = ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING.get(settings); - + acceptablePercentDateParseErrors = MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING.get(settings); + acceptablePercentOutOfOrderErrors = MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING.get(settings); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING, this::setAcceptablePercentDateParseErrors); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING, this::setAcceptablePercentOutOfOrderErrors); reportingBoundaryFunction = this::reportEvery10000Records; } @@ -352,4 +367,17 @@ public class DataCountsReporter extends AbstractComponent { diagnostics.resetCounts(); } + + private void setAcceptablePercentDateParseErrors(int acceptablePercentDateParseErrors) { + logger.info("Changing [{}] from [{}] to [{}]", MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING.getKey(), + this.acceptablePercentDateParseErrors, acceptablePercentDateParseErrors); + this.acceptablePercentDateParseErrors = acceptablePercentDateParseErrors; + } + + private void setAcceptablePercentOutOfOrderErrors(int acceptablePercentOutOfOrderErrors) { + logger.info("Changing [{}] from [{}] to [{}]", MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING.getKey(), + this.acceptablePercentOutOfOrderErrors, acceptablePercentOutOfOrderErrors); + this.acceptablePercentOutOfOrderErrors = acceptablePercentOutOfOrderErrors; + } + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java index 0094eba97ce..4942200606d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectBuilder.java @@ -83,8 +83,16 @@ public class AutodetectBuilder { /** * The maximum number of anomaly records that will be written each bucket */ + @Deprecated public static final Setting MAX_ANOMALY_RECORDS_SETTING = Setting.intSetting("max.anomaly.records", DEFAULT_MAX_NUM_RECORDS, - Setting.Property.NodeScope); + Setting.Property.NodeScope, Setting.Property.Deprecated); + // Though this setting is dynamic, it is only set when a new job is opened. So, already runnin jobs will not get the updated value. + public static final Setting MAX_ANOMALY_RECORDS_SETTING_DYNAMIC = Setting.intSetting( + "xpack.ml.max_anomaly_records", + MAX_ANOMALY_RECORDS_SETTING, + 1, + Setting.Property.NodeScope, + Setting.Property.Dynamic); /** * Config setting storing the flag that disables model persistence @@ -244,9 +252,8 @@ public class AutodetectBuilder { return command; } - static String maxAnomalyRecordsArg(Settings settings) { - return "--maxAnomalyRecords=" + MAX_ANOMALY_RECORDS_SETTING.get(settings); + return "--maxAnomalyRecords=" + MAX_ANOMALY_RECORDS_SETTING_DYNAMIC.get(settings); } private static String getTimeFieldOrDefault(Job job) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index fa05c2e63ee..7e6d923bb51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; @@ -130,12 +131,13 @@ public class AutodetectProcessManager extends AbstractComponent { private final NamedXContentRegistry xContentRegistry; private final Auditor auditor; + private final ClusterService clusterService; public AutodetectProcessManager(Environment environment, Settings settings, Client client, ThreadPool threadPool, JobManager jobManager, JobResultsProvider jobResultsProvider, JobResultsPersister jobResultsPersister, JobDataCountsPersister jobDataCountsPersister, AutodetectProcessFactory autodetectProcessFactory, NormalizerFactory normalizerFactory, - NamedXContentRegistry xContentRegistry, Auditor auditor) { + NamedXContentRegistry xContentRegistry, Auditor auditor, ClusterService clusterService) { super(settings); this.environment = environment; this.client = client; @@ -150,6 +152,7 @@ public class AutodetectProcessManager extends AbstractComponent { this.jobDataCountsPersister = jobDataCountsPersister; this.auditor = auditor; this.nativeStorageProvider = new NativeStorageProvider(environment, MIN_DISK_SPACE_OFF_HEAP.get(settings)); + this.clusterService = clusterService; } public void onNodeStartup() { @@ -493,8 +496,11 @@ public class AutodetectProcessManager extends AbstractComponent { Job job = jobManager.getJobOrThrowIfUnknown(jobId); // A TP with no queue, so that we fail immediately if there are no threads available ExecutorService autoDetectExecutorService = threadPool.executor(MachineLearning.AUTODETECT_THREAD_POOL_NAME); - DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, autodetectParams.dataCounts(), - jobDataCountsPersister); + DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, + job, + autodetectParams.dataCounts(), + jobDataCountsPersister, + clusterService); ScoresUpdater scoresUpdater = new ScoresUpdater(job, jobResultsProvider, new JobRenormalizedResultsPersister(job.getId(), settings, client), normalizerFactory); ExecutorService renormalizerExecutorService = threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java index 01ad0bec85a..06055476f76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/NativeAutodetectProcessFactory.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -40,12 +41,15 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory private final Environment env; private final Settings settings; private final NativeController nativeController; + private final ClusterService clusterService; - public NativeAutodetectProcessFactory(Environment env, Settings settings, NativeController nativeController, Client client) { + public NativeAutodetectProcessFactory(Environment env, Settings settings, NativeController nativeController, Client client, + ClusterService clusterService) { this.env = Objects.requireNonNull(env); this.settings = Objects.requireNonNull(settings); this.nativeController = Objects.requireNonNull(nativeController); this.client = client; + this.clusterService = clusterService; } @Override @@ -85,8 +89,15 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory private void createNativeProcess(Job job, AutodetectParams autodetectParams, ProcessPipes processPipes, List filesToDelete) { try { + + Settings updatedSettings = Settings.builder() + .put(settings) + .put(AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING_DYNAMIC.getKey(), + clusterService.getClusterSettings().get(AutodetectBuilder.MAX_ANOMALY_RECORDS_SETTING_DYNAMIC)) + .build(); + AutodetectBuilder autodetectBuilder = new AutodetectBuilder(job, filesToDelete, LOGGER, env, - settings, nativeController, processPipes) + updatedSettings, nativeController, processPipes) .referencedFilters(autodetectParams.filters()) .scheduledEvents(autodetectParams.scheduledEvents()); @@ -95,7 +106,6 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory if (autodetectParams.quantiles() != null) { autodetectBuilder.quantiles(autodetectParams.quantiles()); } - autodetectBuilder.build(); processPipes.connectStreams(PROCESS_STARTUP_TIMEOUT); } catch (IOException e) { @@ -104,5 +114,6 @@ public class NativeAutodetectProcessFactory implements AutodetectProcessFactory throw ExceptionsHelper.serverError(msg, e); } } + } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java index 22e7d3ba995..74eb01987c5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/normalizer/Normalizer.java @@ -190,23 +190,34 @@ public class Normalizer { * Encapsulate the logic for deciding whether a change to a normalized score * is "big". *

- * Current logic is that a big change is a change of at least 1 or more than - * than 50% of the higher of the two values. + * Current logic is that a change is considered big if any of the following criteria are met: + *

    + *
  • the change would result in a change of colour in the UI + * (e.g. severity would be changed from WARNING to MINOR)
  • + *
  • the change is at least 1.5
  • + *
  • the change in values is greater than 67% of the higher of the two values.
  • + *
+ * These values have been chosen through a process of experimentation, in particular it was desired to reduce + * the number of updates written to the results index due to renormalization events for performance reasons + * while not changing the normalized scores greatly * * @param oldVal The old value of the normalized score * @param newVal The new value of the normalized score * @return true if the update is considered "big" */ private static boolean isBigUpdate(double oldVal, double newVal) { - if (Math.abs(oldVal - newVal) >= 1.0) { + if ((int) (oldVal / 25.0) != (int) (newVal / 25.0)) { + return true; + } + if (Math.abs(oldVal - newVal) >= 1.5) { return true; } if (oldVal > newVal) { - if (oldVal * 0.5 > newVal) { + if (oldVal * 0.33 > newVal) { return true; } } else { - if (newVal * 0.5 > oldVal) { + if (newVal * 0.33 > oldVal) { return true; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java index ceeb55442e5..5b4895f6bbb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/modelsnapshots/RestGetModelSnapshotsAction.java @@ -12,11 +12,11 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; -import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction; import org.elasticsearch.xpack.core.ml.action.GetModelSnapshotsAction.Request; import org.elasticsearch.xpack.core.ml.action.util.PageParams; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -30,7 +30,6 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler { private final String DEFAULT_SORT = null; private final String DEFAULT_START = null; private final String DEFAULT_END = null; - private final String DEFAULT_DESCRIPTION = null; private final boolean DEFAULT_DESC_ORDER = true; public RestGetModelSnapshotsAction(Settings settings, RestController controller) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java index 903ab4af115..93bdc125890 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationDataExtractorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term; import org.junit.Before; import java.io.BufferedReader; @@ -33,7 +34,6 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.Term; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createHistogramBucket; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createMax; import static org.elasticsearch.xpack.ml.datafeed.extractor.aggregation.AggregationTestUtils.createTerms; @@ -254,7 +254,7 @@ public class AggregationDataExtractorTests extends ESTestCase { extractor.setNextResponse(createResponseWithShardFailures()); assertThat(extractor.hasNext(), is(true)); - IOException e = expectThrows(IOException.class, extractor::next); + expectThrows(IOException.class, extractor::next); } public void testExtractionGivenInitSearchResponseEncounteredUnavailableShards() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java index decc61a5397..9f6699fe71b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java @@ -50,7 +50,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("time", "message"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCompleteCsvAndColumnNamesOverride() throws Exception { @@ -84,7 +84,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("my_time", "my_message"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("my_time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCompleteCsvAndHasHeaderRowOverride() throws Exception { @@ -120,7 +120,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("column1", "column2"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertNull(structure.getTimestampField()); - assertNull(structure.getTimestampFormats()); + assertNull(structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithIncompleteLastRecord() throws Exception { @@ -153,7 +153,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("message", "time", "count"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNulls() throws Exception { @@ -193,7 +193,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_pickup_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNullsAndOverriddenTimeField() throws Exception { @@ -237,7 +237,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_dropoff_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeader() throws Exception { @@ -277,7 +277,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_pickup_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeaderAndColumnNamesOverride() throws Exception { @@ -324,7 +324,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("my_tpep_pickup_datetime", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenCsvWithTimeLastColumn() throws Exception { @@ -357,7 +357,7 @@ public class DelimitedFileStructureFinderTests extends FileStructureTestCase { assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getJodaTimestampFormats()); } public void testFindHeaderFromSampleGivenHeaderInSample() throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java index 901fca83ffd..698b53cf7c7 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java @@ -39,7 +39,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -52,7 +52,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -76,7 +76,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), overrides); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -99,7 +99,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("field1", match.v1()); - assertThat(match.v2().dateFormats, contains("ISO8601")); + assertThat(match.v2().jodaTimestampFormats, contains("ISO8601")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -128,7 +128,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); + assertThat(match.v2().jodaTimestampFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -145,7 +145,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); + assertThat(match.v2().jodaTimestampFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -176,7 +176,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); + assertThat(match.v2().jodaTimestampFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); assertEquals("TIMESTAMP_ISO8601", match.v2().grokPatternName); } @@ -193,7 +193,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("time", match.v1()); - assertThat(match.v2().dateFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); + assertThat(match.v2().jodaTimestampFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); assertEquals("CISCOTIMESTAMP", match.v2().grokPatternName); } @@ -226,7 +226,7 @@ public class FileStructureUtilsTests extends FileStructureTestCase { FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2), EMPTY_OVERRIDES); assertNotNull(match); assertEquals("time2", match.v1()); - assertThat(match.v2().dateFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); + assertThat(match.v2().jodaTimestampFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); assertEquals("CISCOTIMESTAMP", match.v2().grokPatternName); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java index 6856e9a6021..ce401c182ea 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java @@ -38,6 +38,6 @@ public class JsonFileStructureFinderTests extends FileStructureTestCase { assertNull(structure.getShouldTrimFields()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("UNIX_MS"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("UNIX_MS"), structure.getJodaTimestampFormats()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java index 5bc40a16511..b4c28eda149 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java @@ -119,14 +119,14 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); } assertNull(structure.getExcludeLinesPattern()); - assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", structure.getMultilineStartPattern()); assertNull(structure.getDelimiter()); assertNull(structure.getQuote()); assertNull(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals("\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} \\]\\[.*", structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenElasticsearchLogAndTimestampFieldOverride() throws Exception { @@ -149,14 +149,14 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); } assertNull(structure.getExcludeLinesPattern()); - assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", structure.getMultilineStartPattern()); assertNull(structure.getDelimiter()); assertNull(structure.getQuote()); assertNull(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); assertEquals("\\[%{TIMESTAMP_ISO8601:my_time}\\]\\[%{LOGLEVEL:loglevel} \\]\\[.*", structure.getGrokPattern()); assertEquals("my_time", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenElasticsearchLogAndGrokPatternOverride() throws Exception { @@ -180,7 +180,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals(hasByteOrderMarker, structure.getHasByteOrderMarker()); } assertNull(structure.getExcludeLinesPattern()); - assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", structure.getMultilineStartPattern()); + assertEquals("^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", structure.getMultilineStartPattern()); assertNull(structure.getDelimiter()); assertNull(structure.getQuote()); assertNull(structure.getHasHeaderRow()); @@ -188,7 +188,7 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertEquals("\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} *\\]" + "\\[%{JAVACLASS:class} *\\] \\[%{HOSTNAME:node}\\] %{JAVALOGMESSAGE:message}", structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("ISO8601"), structure.getJodaTimestampFormats()); } public void testCreateConfigsGivenElasticsearchLogAndImpossibleGrokPatternOverride() { @@ -312,8 +312,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { Tuple> mostLikelyMatch = TextLogFileStructureFinder.mostLikelyTimestamp(sample.split("\n"), FileStructureOverrides.EMPTY_OVERRIDES); assertNotNull(mostLikelyMatch); - assertEquals(new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), - mostLikelyMatch.v1()); + assertEquals(new TimestampMatch(9, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); } public void testMostLikelyTimestampGivenExceptionTrace() { @@ -325,8 +325,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { // Even though many lines have a timestamp near the end (in the Lucene version information), // these are so far along the lines that the weight of the timestamp near the beginning of the // first line should take precedence - assertEquals(new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), - mostLikelyMatch.v1()); + assertEquals(new TimestampMatch(9, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); } public void testMostLikelyTimestampGivenExceptionTraceAndTimestampFormatOverride() { @@ -338,8 +338,8 @@ public class TextLogFileStructureFinderTests extends FileStructureTestCase { assertNotNull(mostLikelyMatch); // The override should force the seemingly inferior choice of timestamp - assertEquals(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", - ""), mostLikelyMatch.v1()); + assertEquals(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", + "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); } public void testMostLikelyTimestampGivenExceptionTraceAndImpossibleTimestampFormatOverride() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java index bf27912b9db..4b9b0b36c34 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java @@ -6,14 +6,11 @@ package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; -import org.joda.time.format.ISODateTimeFormat; import java.util.Arrays; +import java.util.List; import java.util.Locale; public class TimestampFormatFinderTests extends FileStructureTestCase { @@ -28,199 +25,206 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { public void testFindFirstMatchGivenOnlyIso8601() { - TimestampMatch expected = new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", - ""); + validateTimestampMatch(new TimestampMatch(7, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T16:14:56,374Z", + 1526400896374L); + validateTimestampMatch(new TimestampMatch(7, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T17:14:56,374+0100", + 1526400896374L); + validateTimestampMatch(new TimestampMatch(8, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSSXXX", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T17:14:56,374+01:00", + 1526400896374L); + validateTimestampMatch(new TimestampMatch(9, "", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15T17:14:56,374", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T16:14:56,374Z", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56,374+0100", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56,374+01:00", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56,374", 1526400896374L); - checkAndValidateDateFormat(expected, "2018-05-15T16:14:56Z", 1526400896000L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56+0100", 1526400896000L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56+01:00", 1526400896000L); - checkAndValidateDateFormat(expected, "2018-05-15T17:14:56", 1526400896000L); + TimestampMatch pureIso8601Expected = new TimestampMatch(10, "", "ISO8601", "ISO8601", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""); - checkAndValidateDateFormat(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", + validateTimestampMatch(pureIso8601Expected, "2018-05-15T16:14:56Z", 1526400896000L); + validateTimestampMatch(pureIso8601Expected, "2018-05-15T17:14:56+0100", 1526400896000L); + validateTimestampMatch(pureIso8601Expected, "2018-05-15T17:14:56+01:00", 1526400896000L); + validateTimestampMatch(pureIso8601Expected, "2018-05-15T17:14:56", 1526400896000L); + + validateTimestampMatch(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", "yyyy-MM-dd HH:mm:ss,SSSXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 16:14:56,374Z", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", + validateTimestampMatch(new TimestampMatch(1, "", "YYYY-MM-dd HH:mm:ss,SSSZ", "yyyy-MM-dd HH:mm:ss,SSSXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56,374+0100", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(2, "", "YYYY-MM-dd HH:mm:ss,SSSZZ", + validateTimestampMatch(new TimestampMatch(2, "", "YYYY-MM-dd HH:mm:ss,SSSZZ", "yyyy-MM-dd HH:mm:ss,SSSXXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56,374+01:00", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss,SSS", + validateTimestampMatch(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss,SSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56,374", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", + validateTimestampMatch(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", "yyyy-MM-dd HH:mm:ssXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 16:14:56Z", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", + validateTimestampMatch(new TimestampMatch(4, "", "YYYY-MM-dd HH:mm:ssZ", "yyyy-MM-dd HH:mm:ssXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56+0100", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(5, "", "YYYY-MM-dd HH:mm:ssZZ", + validateTimestampMatch(new TimestampMatch(5, "", "YYYY-MM-dd HH:mm:ssZZ", "yyyy-MM-dd HH:mm:ssXXX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56+01:00", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", + validateTimestampMatch(new TimestampMatch(6, "", "YYYY-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), "2018-05-15 17:14:56", 1526400896000L); } - public void testFindFirstMatchGivenOnlyKnownDateFormat() { + public void testFindFirstMatchGivenOnlyKnownTimestampFormat() { // Note: some of the time formats give millisecond accuracy, some second accuracy and some minute accuracy - checkAndValidateDateFormat(new TimestampMatch(0, "", "YYYY-MM-dd HH:mm:ss,SSS Z", + validateTimestampMatch(new TimestampMatch(0, "", "YYYY-MM-dd HH:mm:ss,SSS Z", "yyyy-MM-dd HH:mm:ss,SSS XX", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2},\\d{3}", "TOMCAT_DATESTAMP", ""), "2018-05-15 17:14:56,374 +0100", 1526400896374L); - checkAndValidateDateFormat(new TimestampMatch(8, "", "EEE MMM dd YYYY HH:mm:ss zzz", + validateTimestampMatch(new TimestampMatch(11, "", "EEE MMM dd YYYY HH:mm:ss zzz", "EEE MMM dd yyyy HH:mm:ss zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "DATESTAMP_RFC822", ""), "Tue May 15 2018 16:14:56 UTC", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(9, "", "EEE MMM dd YYYY HH:mm zzz", + validateTimestampMatch(new TimestampMatch(12, "", "EEE MMM dd YYYY HH:mm zzz", "EEE MMM dd yyyy HH:mm zzz", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{4} \\d{2}:\\d{2} ", "DATESTAMP_RFC822", ""), "Tue May 15 2018 16:14 UTC", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(10, "", "EEE, dd MMM YYYY HH:mm:ss ZZ", + validateTimestampMatch(new TimestampMatch(13, "", "EEE, dd MMM YYYY HH:mm:ss ZZ", "EEE, dd MMM yyyy HH:mm:ss XXX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14:56 +01:00", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(11, "", "EEE, dd MMM YYYY HH:mm:ss Z", + validateTimestampMatch(new TimestampMatch(14, "", "EEE, dd MMM YYYY HH:mm:ss Z", "EEE, dd MMM yyyy HH:mm:ss XX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14:56 +0100", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(12, "", "EEE, dd MMM YYYY HH:mm ZZ", + validateTimestampMatch(new TimestampMatch(15, "", "EEE, dd MMM YYYY HH:mm ZZ", "EEE, dd MMM yyyy HH:mm XXX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14 +01:00", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(13, "", "EEE, dd MMM YYYY HH:mm Z", + validateTimestampMatch(new TimestampMatch(16, "", "EEE, dd MMM YYYY HH:mm Z", "EEE, dd MMM yyyy HH:mm XX", "\\b[A-Z]\\S{2,8}, \\d{1,2} [A-Z]\\S{2,8} \\d{4} \\d{2}:\\d{2} ", "DATESTAMP_RFC2822", ""), "Tue, 15 May 2018 17:14 +0100", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(14, "", "EEE MMM dd HH:mm:ss zzz YYYY", + validateTimestampMatch(new TimestampMatch(17, "", "EEE MMM dd HH:mm:ss zzz YYYY", "EEE MMM dd HH:mm:ss zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "DATESTAMP_OTHER", ""), "Tue May 15 16:14:56 UTC 2018", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(15, "", "EEE MMM dd HH:mm zzz YYYY", + validateTimestampMatch(new TimestampMatch(18, "", "EEE MMM dd HH:mm zzz YYYY", "EEE MMM dd HH:mm zzz yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2} [A-Z]{3,4} \\d{4}\\b", "DATESTAMP_OTHER", ""), "Tue May 15 16:14 UTC 2018", 1526400840000L); - checkAndValidateDateFormat(new TimestampMatch(16, "", "YYYYMMddHHmmss", "\\b\\d{14}\\b", "DATESTAMP_EVENTLOG", ""), + validateTimestampMatch(new TimestampMatch(19, "", "YYYYMMddHHmmss", "yyyyMMddHHmmss", "\\b\\d{14}\\b", + "DATESTAMP_EVENTLOG", ""), "20180515171456", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(17, "", "EEE MMM dd HH:mm:ss YYYY", + validateTimestampMatch(new TimestampMatch(20, "", "EEE MMM dd HH:mm:ss YYYY", "EEE MMM dd HH:mm:ss yyyy", "\\b[A-Z]\\S{2,8} [A-Z]\\S{2,8} \\d{1,2} \\d{2}:\\d{2}:\\d{2} \\d{4}\\b", "HTTPDERROR_DATE", ""), "Tue May 15 17:14:56 2018", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(18, "", Arrays.asList("MMM dd HH:mm:ss.SSS", "MMM d HH:mm:ss.SSS"), + validateTimestampMatch(new TimestampMatch(21, "", Arrays.asList("MMM dd HH:mm:ss.SSS", "MMM d HH:mm:ss.SSS"), + Arrays.asList("MMM dd HH:mm:ss.SSS", "MMM d HH:mm:ss.SSS"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "SYSLOGTIMESTAMP", ""), "May 15 17:14:56.725", 1526400896725L); - checkAndValidateDateFormat(new TimestampMatch(19, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + validateTimestampMatch(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", ""), "May 15 17:14:56", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(20, "", "dd/MMM/YYYY:HH:mm:ss Z", + validateTimestampMatch(new TimestampMatch(23, "", "dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "HTTPDATE", ""), "15/May/2018:17:14:56 +0100", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(21, "", "MMM dd, YYYY K:mm:ss a", + validateTimestampMatch(new TimestampMatch(24, "", "MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "CATALINA_DATESTAMP", ""), "May 15, 2018 5:14:56 PM", 1526400896000L); - checkAndValidateDateFormat(new TimestampMatch(22, "", Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"), + validateTimestampMatch(new TimestampMatch(25, "", Arrays.asList("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss"), + Arrays.asList("MMM dd yyyy HH:mm:ss", "MMM d yyyy HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{4} \\d{2}:\\d{2}:\\d{2}\\b", "CISCOTIMESTAMP", ""), "May 15 2018 17:14:56", 1526400896000L); } public void testFindFirstMatchGivenOnlySystemDate() { - assertEquals(new TimestampMatch(23, "", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(26, "", "UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), TimestampFormatFinder.findFirstMatch("1526400896374")); - assertEquals(new TimestampMatch(23, "", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(26, "", "UNIX_MS", "UNIX_MS", "\\b\\d{13}\\b", "POSINT", ""), TimestampFormatFinder.findFirstFullMatch("1526400896374")); - assertEquals(new TimestampMatch(24, "", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), + assertEquals(new TimestampMatch(27, "", "UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), TimestampFormatFinder.findFirstMatch("1526400896.736")); - assertEquals(new TimestampMatch(24, "", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), + assertEquals(new TimestampMatch(27, "", "UNIX", "UNIX", "\\b\\d{10}\\.\\d{3,9}\\b", "NUMBER", ""), TimestampFormatFinder.findFirstFullMatch("1526400896.736")); - assertEquals(new TimestampMatch(25, "", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(28, "", "UNIX", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), TimestampFormatFinder.findFirstMatch("1526400896")); - assertEquals(new TimestampMatch(25, "", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), + assertEquals(new TimestampMatch(28, "", "UNIX", "UNIX", "\\b\\d{10}\\b", "POSINT", ""), TimestampFormatFinder.findFirstFullMatch("1526400896")); - assertEquals(new TimestampMatch(26, "", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), + assertEquals(new TimestampMatch(29, "", "TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), TimestampFormatFinder.findFirstMatch("400000005afb159a164ac980")); - assertEquals(new TimestampMatch(26, "", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), + assertEquals(new TimestampMatch(29, "", "TAI64N", "TAI64N", "\\b[0-9A-Fa-f]{24}\\b", "BASE16NUM", ""), TimestampFormatFinder.findFirstFullMatch("400000005afb159a164ac980")); } - private void checkAndValidateDateFormat(TimestampMatch expected, String text, long expectedEpochMs) { - - assertEquals(expected, TimestampFormatFinder.findFirstMatch(text)); - assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text)); - - // All the test times are for Tue May 15 2018 16:14:56 UTC, which is 17:14:56 in London - DateTimeZone zone = DateTimeZone.forID("Europe/London"); - DateTime parsed; - for (int i = 0; i < expected.dateFormats.size(); ++i) { - try { - String dateFormat = expected.dateFormats.get(i); - switch (dateFormat) { - case "ISO8601": - parsed = ISODateTimeFormat.dateTimeParser().withZone(zone).withDefaultYear(2018).parseDateTime(text); - break; - default: - DateTimeFormatter parser = DateTimeFormat.forPattern(dateFormat).withZone(zone).withLocale(Locale.UK); - parsed = parser.withDefaultYear(2018).parseDateTime(text); - break; - } - if (expectedEpochMs == parsed.getMillis()) { - break; - } - // If the last one isn't right then propagate - if (i == expected.dateFormats.size() - 1) { - assertEquals(expectedEpochMs, parsed.getMillis()); - } - } catch (RuntimeException e) { - // If the last one throws then propagate - if (i == expected.dateFormats.size() - 1) { - throw e; - } - } - } - assertTrue(expected.simplePattern.matcher(text).find()); - } - public void testFindFirstMatchGivenRealLogMessages() { - assertEquals(new TimestampMatch(7, "[", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", + assertEquals(new TimestampMatch(9, "[", "ISO8601", "yyyy-MM-dd'T'HH:mm:ss,SSS", + "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", "TIMESTAMP_ISO8601", "][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [3.9gb], compressed ordinary object pointers [true]"), TimestampFormatFinder.findFirstMatch("[2018-05-11T17:07:29,553][INFO ][o.e.e.NodeEnvironment ] [node-0] " + "heap size [3.9gb], compressed ordinary object pointers [true]")); - assertEquals(new TimestampMatch(20, "192.168.62.101 - - [", "dd/MMM/YYYY:HH:mm:ss Z", + assertEquals(new TimestampMatch(23, "192.168.62.101 - - [", "dd/MMM/YYYY:HH:mm:ss Z", "dd/MMM/yyyy:HH:mm:ss XX", "\\b\\d{2}/[A-Z]\\S{2}/\\d{4}:\\d{2}:\\d{2}:\\d{2} ", "HTTPDATE", "] \"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384"), TimestampFormatFinder.findFirstMatch("192.168.62.101 - - [29/Jun/2016:12:11:31 +0000] " + "\"POST //apiserv:8080/engine/v2/jobs HTTP/1.1\" 201 42 \"-\" \"curl/7.46.0\" 384")); - assertEquals(new TimestampMatch(21, "", "MMM dd, YYYY K:mm:ss a", + assertEquals(new TimestampMatch(24, "", "MMM dd, YYYY h:mm:ss a", "MMM dd, yyyy h:mm:ss a", "\\b[A-Z]\\S{2,8} \\d{1,2}, \\d{4} \\d{1,2}:\\d{2}:\\d{2} [AP]M\\b", "CATALINA_DATESTAMP", " org.apache.tomcat.util.http.Parameters processParameters"), TimestampFormatFinder.findFirstMatch("Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters")); - assertEquals(new TimestampMatch(19, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + assertEquals(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", " esxi1.acme.com Vpxa: " + "[3CB3FB90 verbose 'vpxavpxaInvtVm' opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed"), TimestampFormatFinder.findFirstMatch("Oct 19 17:04:44 esxi1.acme.com Vpxa: [3CB3FB90 verbose 'vpxavpxaInvtVm' " + "opID=WFU-33d82c31] [VpxaInvtVmChangeListener] Guest DiskInfo Changed")); - assertEquals(new TimestampMatch(7, "559550912540598297\t", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", + assertEquals(new TimestampMatch(10, "559550912540598297\t", "ISO8601", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", "\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp"), TimestampFormatFinder.findFirstMatch("559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t" + "192.168.114.28\tAuthpriv\tInfo\tsshd\tsubsystem request for sftp")); - assertEquals(new TimestampMatch(19, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + assertEquals(new TimestampMatch(22, "", Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), + Arrays.asList("MMM dd HH:mm:ss", "MMM d HH:mm:ss"), "\\b[A-Z]\\S{2,8} {1,2}\\d{1,2} \\d{2}:\\d{2}:\\d{2}\\b", "SYSLOGTIMESTAMP", " dnsserv named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53"), TimestampFormatFinder.findFirstMatch("Sep 8 11:55:35 dnsserv named[22529]: error (unexpected RCODE REFUSED) resolving " + "'www.elastic.co/A/IN': 95.110.68.206#53")); - assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", - "TIMESTAMP_ISO8601", + assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "TIMESTAMP_ISO8601", "|INFO |VirtualServer |1 |client 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client " + "'User1'(id:2) in channel '3er Instanz'(id:2)"), TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)")); + + // Differs from the above as the required format is specified + assertEquals(new TimestampMatch(3, "", "YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS", + "\\b\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d{3}", "TIMESTAMP_ISO8601", + "|INFO |VirtualServer |1 |client 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client " + + "'User1'(id:2) in channel '3er Instanz'(id:2)"), + TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", + randomFrom("YYYY-MM-dd HH:mm:ss.SSSSSS", "yyyy-MM-dd HH:mm:ss.SSSSSS"))); + + // Non-matching required format specified + assertNull(TimestampFormatFinder.findFirstMatch("2018-01-06 19:22:20.106822|INFO |VirtualServer |1 |client " + + " 'User1'(id:2) was added to channelgroup 'Channel Admin'(id:5) by client 'User1'(id:2) in channel '3er Instanz'(id:2)", + randomFrom("UNIX", "EEE MMM dd YYYY HH:mm zzz"))); + } + + public void testAdjustRequiredFormat() { + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS Z", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSSSSS Z")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss,SSSSSSSSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSS")); + assertEquals("YYYY-MM-dd HH:mm:ss,SSS", TimestampFormatFinder.adjustRequiredFormat("YYYY-MM-dd HH:mm:ss.SSSSSSSSS")); } public void testInterpretFractionalSeconds() { @@ -239,4 +243,112 @@ public class TimestampFormatFinderTests extends FileStructureTestCase { assertEquals(new Tuple<>(',', 3), TimestampFormatFinder.interpretFractionalSeconds("2018-01-06T17:21:25,764 Z")); assertEquals(new Tuple<>('.', 3), TimestampFormatFinder.interpretFractionalSeconds("2018-01-06T17:21:25.764 Z")); } + + private void validateTimestampMatch(TimestampMatch expected, String text, long expectedEpochMs) { + + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, expected.candidateIndex)); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, expected.candidateIndex)); + assertNull(TimestampFormatFinder.findFirstMatch(text, Integer.MAX_VALUE)); + assertNull(TimestampFormatFinder.findFirstFullMatch(text, Integer.MAX_VALUE)); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.jodaTimestampFormats))); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.jodaTimestampFormats))); + assertEquals(expected, TimestampFormatFinder.findFirstMatch(text, randomFrom(expected.javaTimestampFormats))); + assertEquals(expected, TimestampFormatFinder.findFirstFullMatch(text, randomFrom(expected.javaTimestampFormats))); + assertNull(TimestampFormatFinder.findFirstMatch(text, "wrong format")); + assertNull(TimestampFormatFinder.findFirstFullMatch(text, "wrong format")); + + validateJodaTimestampFormats(expected.jodaTimestampFormats, text, expectedEpochMs); + validateJavaTimestampFormats(expected.javaTimestampFormats, text, expectedEpochMs); + + assertTrue(expected.simplePattern.matcher(text).find()); + } + + private void validateJodaTimestampFormats(List jodaTimestampFormats, String text, long expectedEpochMs) { + + // All the test times are for Tue May 15 2018 16:14:56 UTC, which is 17:14:56 in London. + // This is the timezone that will be used for any text representations that don't include it. + org.joda.time.DateTimeZone defaultZone = org.joda.time.DateTimeZone.forID("Europe/London"); + org.joda.time.DateTime parsed; + for (int i = 0; i < jodaTimestampFormats.size(); ++i) { + try { + String timestampFormat = jodaTimestampFormats.get(i); + switch (timestampFormat) { + case "ISO8601": + parsed = org.joda.time.format.ISODateTimeFormat.dateTimeParser() + .withZone(defaultZone).withDefaultYear(2018).parseDateTime(text); + break; + default: + org.joda.time.format.DateTimeFormatter parser = + org.joda.time.format.DateTimeFormat.forPattern(timestampFormat).withZone(defaultZone).withLocale(Locale.ROOT); + parsed = parser.withDefaultYear(2018).parseDateTime(text); + break; + } + if (expectedEpochMs == parsed.getMillis()) { + break; + } + // If the last one isn't right then propagate + if (i == jodaTimestampFormats.size() - 1) { + assertEquals(expectedEpochMs, parsed.getMillis()); + } + } catch (RuntimeException e) { + // If the last one throws then propagate + if (i == jodaTimestampFormats.size() - 1) { + throw e; + } + } + } + } + + private void validateJavaTimestampFormats(List javaTimestampFormats, String text, long expectedEpochMs) { + + // All the test times are for Tue May 15 2018 16:14:56 UTC, which is 17:14:56 in London. + // This is the timezone that will be used for any text representations that don't include it. + java.time.ZoneId defaultZone = java.time.ZoneId.of("Europe/London"); + java.time.temporal.TemporalAccessor parsed; + for (int i = 0; i < javaTimestampFormats.size(); ++i) { + try { + String timestampFormat = javaTimestampFormats.get(i); + switch (timestampFormat) { + case "ISO8601": + parsed = DateFormatters.forPattern("strict_date_optional_time_nanos").withZone(defaultZone).parse(text); + break; + default: + java.time.format.DateTimeFormatter parser = new java.time.format.DateTimeFormatterBuilder() + .appendPattern(timestampFormat).parseDefaulting(java.time.temporal.ChronoField.YEAR_OF_ERA, 2018) + .toFormatter(Locale.ROOT); + // This next line parses the textual date without any default timezone, so if + // the text doesn't contain the timezone then the resulting temporal accessor + // will be incomplete (i.e. impossible to convert to an Instant). You would + // hope that it would be possible to specify a timezone to be used only in this + // case, and in Java 9 and 10 it is, by adding withZone(zone) before the + // parse(text) call. However, with Java 8 this overrides any timezone parsed + // from the text. The solution is to parse twice, once without a default + // timezone and then again with a default timezone if the first parse didn't + // find one in the text. + parsed = parser.parse(text); + if (parsed.query(java.time.temporal.TemporalQueries.zone()) == null) { + // TODO: when Java 8 is no longer supported remove the two + // lines and comment above and the closing brace below + parsed = parser.withZone(defaultZone).parse(text); + } + break; + } + long actualEpochMs = java.time.Instant.from(parsed).toEpochMilli(); + if (expectedEpochMs == actualEpochMs) { + break; + } + // If the last one isn't right then propagate + if (i == javaTimestampFormats.size() - 1) { + assertEquals(expectedEpochMs, actualEpochMs); + } + } catch (RuntimeException e) { + // If the last one throws then propagate + if (i == javaTimestampFormats.size() - 1) { + throw e; + } + } + } + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java index 01c44147b04..2429da09018 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java @@ -38,6 +38,6 @@ public class XmlFileStructureFinderTests extends FileStructureTestCase { assertNull(structure.getShouldTrimFields()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); - assertEquals(Collections.singletonList("UNIX_MS"), structure.getTimestampFormats()); + assertEquals(Collections.singletonList("UNIX_MS"), structure.getJodaTimestampFormats()); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/CountingInputStreamTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/CountingInputStreamTests.java index 45a5e57af5f..b867a6bbe1e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/CountingInputStreamTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/CountingInputStreamTests.java @@ -5,18 +5,44 @@ */ package org.elasticsearch.xpack.ml.job.process; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.mock.orig.Mockito.when; +import static org.mockito.Mockito.mock; public class CountingInputStreamTests extends ESTestCase { + private ClusterService clusterService; + + @Before + public void setUpMocks() { + Settings settings = Settings.builder().put(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING.getKey(), 10) + .put(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING.getKey(), 10) + .build(); + Set> setOfSettings = new HashSet<>(); + setOfSettings.add(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING); + setOfSettings.add(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING); + ClusterSettings clusterSettings = new ClusterSettings(settings, setOfSettings); + + clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + } + public void testRead_OneByteAtATime() throws IOException { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); final String TEXT = "123"; InputStream source = new ByteArrayInputStream(TEXT.getBytes(StandardCharsets.UTF_8)); @@ -30,7 +56,7 @@ public class CountingInputStreamTests extends ESTestCase { public void testRead_WithBuffer() throws IOException { final String TEXT = "To the man who only has a hammer, everything he encounters begins to look like a nail."; - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); InputStream source = new ByteArrayInputStream(TEXT.getBytes(StandardCharsets.UTF_8)); @@ -44,7 +70,7 @@ public class CountingInputStreamTests extends ESTestCase { public void testRead_WithTinyBuffer() throws IOException { final String TEXT = "To the man who only has a hammer, everything he encounters begins to look like a nail."; - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); InputStream source = new ByteArrayInputStream(TEXT.getBytes(StandardCharsets.UTF_8)); @@ -57,7 +83,7 @@ public class CountingInputStreamTests extends ESTestCase { public void testRead_WithResets() throws IOException { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); final String TEXT = "To the man who only has a hammer, everything he encounters begins to look like a nail."; InputStream source = new ByteArrayInputStream(TEXT.getBytes(StandardCharsets.UTF_8)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java index d3afb732418..5f56db164a3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DataCountsReporterTests.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.ml.job.process; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; @@ -22,11 +25,15 @@ import org.mockito.Mockito; import java.io.IOException; import java.util.Arrays; import java.util.Date; +import java.util.HashSet; +import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.mock.orig.Mockito.when; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; @@ -38,12 +45,13 @@ public class DataCountsReporterTests extends ESTestCase { private JobDataCountsPersister jobDataCountsPersister; private Settings settings; private TimeValue bucketSpan = TimeValue.timeValueSeconds(300); + private ClusterService clusterService; @Before public void setUpMocks() { settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(DataCountsReporter.ACCEPTABLE_PERCENTAGE_DATE_PARSE_ERRORS_SETTING.getKey(), MAX_PERCENT_DATE_PARSE_ERRORS) - .put(DataCountsReporter.ACCEPTABLE_PERCENTAGE_OUT_OF_ORDER_ERRORS_SETTING.getKey(), MAX_PERCENT_OUT_OF_ORDER_ERRORS) + .put(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING.getKey(), MAX_PERCENT_DATE_PARSE_ERRORS) + .put(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING.getKey(), MAX_PERCENT_OUT_OF_ORDER_ERRORS) .build(); AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(Arrays.asList(new Detector.Builder("metric", "field").build())); @@ -51,6 +59,15 @@ public class DataCountsReporterTests extends ESTestCase { acBuilder.setLatency(TimeValue.ZERO); acBuilder.setDetectors(Arrays.asList(new Detector.Builder("metric", "field").build())); + + Set> setOfSettings = new HashSet<>(); + setOfSettings.add(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING); + setOfSettings.add(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING); + ClusterSettings clusterSettings = new ClusterSettings(settings, setOfSettings); + + clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + Job.Builder builder = new Job.Builder("sr"); builder.setAnalysisConfig(acBuilder); builder.setDataDescription(new DataDescription.Builder()); @@ -61,14 +78,14 @@ public class DataCountsReporterTests extends ESTestCase { public void testSettingAcceptablePercentages() throws IOException { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + jobDataCountsPersister, clusterService); assertEquals(dataCountsReporter.getAcceptablePercentDateParseErrors(), MAX_PERCENT_DATE_PARSE_ERRORS); assertEquals(dataCountsReporter.getAcceptablePercentOutOfOrderErrors(), MAX_PERCENT_OUT_OF_ORDER_ERRORS); } public void testSimpleConstructor() throws Exception { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + jobDataCountsPersister, clusterService); DataCounts stats = dataCountsReporter.incrementalStats(); assertNotNull(stats); assertAllCountFieldsEqualZero(stats); @@ -79,7 +96,7 @@ public class DataCountsReporterTests extends ESTestCase { new Date(), new Date(), new Date(), new Date(), new Date()); DataCountsReporter dataCountsReporter = - new DataCountsReporter(settings, job, counts, jobDataCountsPersister); + new DataCountsReporter(settings, job, counts, jobDataCountsPersister, clusterService); DataCounts stats = dataCountsReporter.incrementalStats(); assertNotNull(stats); assertAllCountFieldsEqualZero(stats); @@ -97,7 +114,7 @@ public class DataCountsReporterTests extends ESTestCase { public void testResetIncrementalCounts() throws Exception { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + jobDataCountsPersister, clusterService); DataCounts stats = dataCountsReporter.incrementalStats(); assertNotNull(stats); assertAllCountFieldsEqualZero(stats); @@ -150,7 +167,7 @@ public class DataCountsReporterTests extends ESTestCase { public void testReportLatestTimeIncrementalStats() throws IOException { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + jobDataCountsPersister, clusterService); dataCountsReporter.startNewIncrementalCount(); dataCountsReporter.reportLatestTimeIncrementalStats(5001L); assertEquals(5001L, dataCountsReporter.incrementalStats().getLatestRecordTimeStamp().getTime()); @@ -158,7 +175,7 @@ public class DataCountsReporterTests extends ESTestCase { public void testReportRecordsWritten() { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + jobDataCountsPersister, clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); dataCountsReporter.reportRecordWritten(5, 2000); @@ -182,7 +199,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testReportRecordsWritten_Given9999Records() { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); for (int i = 1; i <= 9999; i++) { @@ -199,7 +216,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testReportRecordsWritten_Given30000Records() { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); for (int i = 1; i <= 30001; i++) { @@ -216,7 +233,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testReportRecordsWritten_Given100_000Records() { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); for (int i = 1; i <= 100000; i++) { @@ -233,7 +250,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testReportRecordsWritten_Given1_000_000Records() { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); for (int i = 1; i <= 1_000_000; i++) { @@ -250,7 +267,7 @@ public class DataCountsReporterTests extends ESTestCase { } public void testReportRecordsWritten_Given2_000_000Records() { - DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(); + DummyDataCountsReporter dataCountsReporter = new DummyDataCountsReporter(clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); for (int i = 1; i <= 2_000_000; i++) { @@ -269,7 +286,7 @@ public class DataCountsReporterTests extends ESTestCase { public void testFinishReporting() { DataCountsReporter dataCountsReporter = new DataCountsReporter(settings, job, new DataCounts(job.getId()), - jobDataCountsPersister); + jobDataCountsPersister, clusterService); dataCountsReporter.setAnalysedFieldsPerRecord(3); Date now = new Date(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java index bcf41a994b9..6b4c68e1f30 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/DummyDataCountsReporter.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.job.process; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; @@ -26,9 +27,9 @@ class DummyDataCountsReporter extends DataCountsReporter { int logStatusCallCount = 0; - DummyDataCountsReporter() { + DummyDataCountsReporter(ClusterService clusterService) { super(Settings.EMPTY, createJob(), new DataCounts("DummyJobId"), - mock(JobDataCountsPersister.class)); + mock(JobDataCountsPersister.class), clusterService); } /** diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index 43cc909e392..dd67de41996 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -8,7 +8,10 @@ package org.elasticsearch.xpack.ml.job.process.autodetect; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -41,6 +44,7 @@ import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.DataCountsReporter; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; @@ -106,6 +110,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { private JobDataCountsPersister jobDataCountsPersister; private NormalizerFactory normalizerFactory; private Auditor auditor; + private ClusterService clusterService; private DataCounts dataCounts = new DataCounts("foo"); private ModelSizeStats modelSizeStats = new ModelSizeStats.Builder("foo").build(); @@ -126,6 +131,15 @@ public class AutodetectProcessManagerTests extends ESTestCase { normalizerFactory = mock(NormalizerFactory.class); auditor = mock(Auditor.class); + + Set> setOfSettings = new HashSet<>(); + setOfSettings.add(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_DATE_PARSE_ERRORS_SETTING); + setOfSettings.add(DataCountsReporter.MAX_ACCEPTABLE_PERCENT_OF_OUT_OF_ORDER_ERRORS_SETTING); + ClusterSettings clusterSettings = new ClusterSettings(settings, setOfSettings); + + clusterService = mock(ClusterService.class); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo")); doAnswer(invocationOnMock -> { @SuppressWarnings("unchecked") @@ -228,7 +242,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { settings.put(AutodetectProcessManager.MAX_OPEN_JOBS_PER_NODE.getKey(), 3); AutodetectProcessManager manager = spy(new AutodetectProcessManager(environment, settings.build(), client, threadPool, jobManager, jobResultsProvider, jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, - normalizerFactory, new NamedXContentRegistry(Collections.emptyList()), auditor)); + normalizerFactory, new NamedXContentRegistry(Collections.emptyList()), auditor, clusterService)); doReturn(executorService).when(manager).createAutodetectExecutorService(any()); doAnswer(invocationOnMock -> { @@ -583,7 +597,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { (j, autodetectParams, e, onProcessCrash) -> autodetectProcess; AutodetectProcessManager manager = new AutodetectProcessManager(environment, Settings.EMPTY, client, threadPool, jobManager, jobResultsProvider, jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, - normalizerFactory, new NamedXContentRegistry(Collections.emptyList()), auditor); + normalizerFactory, new NamedXContentRegistry(Collections.emptyList()), auditor, clusterService); JobTask jobTask = mock(JobTask.class); when(jobTask.getJobId()).thenReturn("my_id"); @@ -656,7 +670,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { (j, autodetectParams, e, onProcessCrash) -> autodetectProcess; return new AutodetectProcessManager(environment, Settings.EMPTY, client, threadPool, jobManager, jobResultsProvider, jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, - normalizerFactory, new NamedXContentRegistry(Collections.emptyList()), auditor); + normalizerFactory, new NamedXContentRegistry(Collections.emptyList()), auditor, clusterService); } private AutodetectParams buildAutodetectParams() { @@ -682,7 +696,7 @@ public class AutodetectProcessManagerTests extends ESTestCase { AutodetectProcessManager manager = new AutodetectProcessManager(environment, Settings.EMPTY, client, threadPool, jobManager, jobResultsProvider, jobResultsPersister, jobDataCountsPersister, autodetectProcessFactory, normalizerFactory, - new NamedXContentRegistry(Collections.emptyList()), auditor); + new NamedXContentRegistry(Collections.emptyList()), auditor, clusterService); manager = spy(manager); doReturn(communicator).when(manager).create(any(), eq(buildAutodetectParams()), any()); return manager; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java index 84d9e6ceabd..b3467a3d405 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/params/ForecastParamsTests.java @@ -16,7 +16,6 @@ import static org.hamcrest.Matchers.equalTo; public class ForecastParamsTests extends ESTestCase { - private static ParseField END = new ParseField("end"); private static ParseField DURATION = new ParseField("duration"); public void testForecastIdsAreUnique() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java index d06146ad53f..661eeca98db 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/normalizer/NormalizerTests.java @@ -29,7 +29,7 @@ public class NormalizerTests extends ESTestCase { private static final String INDEX_NAME = "foo-index"; private static final String QUANTILES_STATE = "someState"; private static final int BUCKET_SPAN = 600; - private static final double INITIAL_SCORE = 2.0; + private static final double INITIAL_SCORE = 3.0; private static final double FACTOR = 2.0; private Bucket generateBucket(Date timestamp) throws IOException { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 027cb7de937..d18286a9db5 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.monitoring.action.TransportMonitoringBulkAction; import org.elasticsearch.xpack.monitoring.cleaner.CleanerService; import org.elasticsearch.xpack.monitoring.collector.Collector; +import org.elasticsearch.xpack.monitoring.collector.ccr.CcrAutoFollowStatsCollector; import org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsCollector; import org.elasticsearch.xpack.monitoring.collector.cluster.ClusterStatsCollector; import org.elasticsearch.xpack.monitoring.collector.indices.IndexRecoveryCollector; @@ -144,6 +145,7 @@ public class Monitoring extends Plugin implements ActionPlugin { collectors.add(new IndexRecoveryCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new JobStatsCollector(settings, clusterService, getLicenseState(), client)); collectors.add(new CcrStatsCollector(settings, clusterService, getLicenseState(), client)); + collectors.add(new CcrAutoFollowStatsCollector(settings, clusterService, getLicenseState(), client)); final MonitoringService monitoringService = new MonitoringService(settings, clusterService, threadPool, collectors, exporters); @@ -183,6 +185,7 @@ public class Monitoring extends Plugin implements ActionPlugin { settings.add(IndexStatsCollector.INDEX_STATS_TIMEOUT); settings.add(JobStatsCollector.JOB_STATS_TIMEOUT); settings.add(CcrStatsCollector.CCR_STATS_TIMEOUT); + settings.add(CcrAutoFollowStatsCollector.CCR_AUTO_FOLLOW_STATS_TIMEOUT); settings.add(NodeStatsCollector.NODE_STATS_TIMEOUT); settings.addAll(Exporters.getSettings()); return Collections.unmodifiableList(settings); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java new file mode 100644 index 00000000000..f6b124d6df5 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AbstractCcrCollector.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; +import org.elasticsearch.xpack.monitoring.collector.Collector; + +import java.util.Collection; + +import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; +import static org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsMonitoringDoc.TYPE; + +public abstract class AbstractCcrCollector extends Collector { + + private final ThreadContext threadContext; + final CcrClient ccrClient; + + AbstractCcrCollector( + final Settings settings, + final ClusterService clusterService, + final Setting timeoutSetting, + final XPackLicenseState licenseState, + final CcrClient ccrClient, + final ThreadContext threadContext) { + super(settings, TYPE, clusterService, timeoutSetting, licenseState); + this.ccrClient = ccrClient; + this.threadContext = threadContext; + } + + @Override + protected boolean shouldCollect(final boolean isElectedMaster) { + // this can only run when monitoring is allowed and CCR is enabled and allowed, but also only on the elected master node + return isElectedMaster + && super.shouldCollect(isElectedMaster) + && XPackSettings.CCR_ENABLED_SETTING.get(settings) + && licenseState.isCcrAllowed(); + } + + + @Override + protected Collection doCollect( + final MonitoringDoc.Node node, + final long interval, + final ClusterState clusterState) throws Exception { + try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, MONITORING_ORIGIN)) { + final long timestamp = timestamp(); + final String clusterUuid = clusterUuid(clusterState); + return innerDoCollect(timestamp, clusterUuid, interval, node); + } + } + + abstract Collection innerDoCollect( + long timestamp, + String clusterUuid, + long interval, + MonitoringDoc.Node node) throws Exception; +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDoc.java new file mode 100644 index 00000000000..82312203fd8 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/AutoFollowStatsMonitoringDoc.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.io.IOException; +import java.util.Objects; + +public class AutoFollowStatsMonitoringDoc extends MonitoringDoc { + + public static final String TYPE = "ccr_auto_follow_stats"; + + private final AutoFollowStats stats; + + public AutoFollowStats stats() { + return stats; + } + + public AutoFollowStatsMonitoringDoc( + final String cluster, + final long timestamp, + final long intervalMillis, + final Node node, + final AutoFollowStats stats) { + super(cluster, timestamp, intervalMillis, node, MonitoredSystem.ES, TYPE, null); + this.stats = Objects.requireNonNull(stats, "stats"); + } + + + @Override + protected void innerToXContent(final XContentBuilder builder, final Params params) throws IOException { + builder.startObject(TYPE); + { + stats.toXContentFragment(builder, params); + } + builder.endObject(); + } + +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollector.java new file mode 100644 index 00000000000..e179c204416 --- /dev/null +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrAutoFollowStatsCollector.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.monitoring.collector.ccr; + +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.XPackClient; +import org.elasticsearch.xpack.core.ccr.action.AutoFollowStatsAction; +import org.elasticsearch.xpack.core.ccr.client.CcrClient; +import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; + +import java.util.Collection; +import java.util.Collections; + +public final class CcrAutoFollowStatsCollector extends AbstractCcrCollector { + + public static final Setting CCR_AUTO_FOLLOW_STATS_TIMEOUT = collectionTimeoutSetting("ccr.auto_follow.stats.timeout"); + + public CcrAutoFollowStatsCollector( + final Settings settings, + final ClusterService clusterService, + final XPackLicenseState licenseState, + final Client client) { + super(settings, clusterService, CCR_AUTO_FOLLOW_STATS_TIMEOUT, licenseState, new XPackClient(client).ccr(), + client.threadPool().getThreadContext()); + } + + CcrAutoFollowStatsCollector( + final Settings settings, + final ClusterService clusterService, + final XPackLicenseState licenseState, + final CcrClient ccrClient, + final ThreadContext threadContext) { + super(settings, clusterService, CCR_AUTO_FOLLOW_STATS_TIMEOUT, licenseState, ccrClient, threadContext); + } + + @Override + Collection innerDoCollect( + long timestamp, + String clusterUuid, + long interval, + MonitoringDoc.Node node) throws Exception { + + final AutoFollowStatsAction.Request request = new AutoFollowStatsAction.Request(); + final AutoFollowStatsAction.Response response = ccrClient.autoFollowStats(request).actionGet(getCollectionTimeout()); + + final AutoFollowStatsMonitoringDoc doc = + new AutoFollowStatsMonitoringDoc(clusterUuid, timestamp, interval, node, response.getStats()); + return Collections.singletonList(doc); + } + +} diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java index 510f430d196..45a8ddc0f1a 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsCollector.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.monitoring.collector.ccr; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -16,32 +15,24 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.xpack.core.XPackClient; -import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.core.ccr.client.CcrClient; import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; -import org.elasticsearch.xpack.monitoring.collector.Collector; import java.util.Collection; import java.util.stream.Collectors; -import static org.elasticsearch.xpack.core.ClientHelper.MONITORING_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; -import static org.elasticsearch.xpack.monitoring.collector.ccr.CcrStatsMonitoringDoc.TYPE; - -public class CcrStatsCollector extends Collector { +public final class CcrStatsCollector extends AbstractCcrCollector { public static final Setting CCR_STATS_TIMEOUT = collectionTimeoutSetting("ccr.stats.timeout"); - private final ThreadContext threadContext; - private final CcrClient ccrClient; - public CcrStatsCollector( final Settings settings, final ClusterService clusterService, final XPackLicenseState licenseState, final Client client) { - this(settings, clusterService, licenseState, new XPackClient(client).ccr(), client.threadPool().getThreadContext()); + super(settings, clusterService, CCR_STATS_TIMEOUT, licenseState, new XPackClient(client).ccr(), + client.threadPool().getThreadContext()); } CcrStatsCollector( @@ -50,41 +41,26 @@ public class CcrStatsCollector extends Collector { final XPackLicenseState licenseState, final CcrClient ccrClient, final ThreadContext threadContext) { - super(settings, TYPE, clusterService, CCR_STATS_TIMEOUT, licenseState); - this.ccrClient = ccrClient; - this.threadContext = threadContext; + super(settings, clusterService, CCR_STATS_TIMEOUT, licenseState, ccrClient, threadContext); } @Override - protected boolean shouldCollect(final boolean isElectedMaster) { - // this can only run when monitoring is allowed and CCR is enabled and allowed, but also only on the elected master node - return isElectedMaster - && super.shouldCollect(isElectedMaster) - && XPackSettings.CCR_ENABLED_SETTING.get(settings) - && licenseState.isCcrAllowed(); - } + Collection innerDoCollect( + long timestamp, + String clusterUuid, + long interval, + MonitoringDoc.Node node) throws Exception { + final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); + request.setIndices(getCollectionIndices()); + request.setIndicesOptions(IndicesOptions.lenientExpandOpen()); + final CcrStatsAction.StatsResponses responses = ccrClient.stats(request).actionGet(getCollectionTimeout()); - @Override - protected Collection doCollect( - final MonitoringDoc.Node node, - final long interval, - final ClusterState clusterState) throws Exception { - try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, MONITORING_ORIGIN)) { - final CcrStatsAction.StatsRequest request = new CcrStatsAction.StatsRequest(); - request.setIndices(getCollectionIndices()); - request.setIndicesOptions(IndicesOptions.lenientExpandOpen()); - final CcrStatsAction.StatsResponses responses = ccrClient.stats(request).actionGet(getCollectionTimeout()); - - final long timestamp = timestamp(); - final String clusterUuid = clusterUuid(clusterState); - - return responses - .getStatsResponses() - .stream() - .map(stats -> new CcrStatsMonitoringDoc(clusterUuid, timestamp, interval, node, stats.status())) - .collect(Collectors.toList()); - } + return responses + .getStatsResponses() + .stream() + .map(stats -> new CcrStatsMonitoringDoc(clusterUuid, timestamp, interval, node, stats.status())) + .collect(Collectors.toList()); } } diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java index 1a05b034364..23fe4d46543 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollector.java @@ -112,7 +112,8 @@ public class ClusterStatsCollector extends Collector { // Adds a cluster stats document return Collections.singleton( new ClusterStatsMonitoringDoc(clusterUuid, timestamp(), interval, node, clusterName, version, clusterStats.getStatus(), - license, apmIndicesExist, xpackUsage, clusterStats, clusterState, clusterNeedsTLSEnabled)); + license, apmIndicesExist, xpackUsage, clusterStats, clusterState, + clusterNeedsTLSEnabled)); } boolean doAPMIndicesExist(final ClusterState clusterState) { diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java index 414945c2a15..75c0ba6b81c 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDoc.java @@ -8,10 +8,12 @@ package org.elasticsearch.xpack.monitoring.collector.cluster; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.license.License; @@ -45,6 +47,7 @@ public class ClusterStatsMonitoringDoc extends MonitoringDoc { ClusterState.Metric.NODES)); public static final String TYPE = "cluster_stats"; + protected static final String SETTING_CLUSTER_METADATA = "cluster.metadata"; private final String clusterName; private final String version; @@ -118,6 +121,14 @@ public class ClusterStatsMonitoringDoc extends MonitoringDoc { return clusterNeedsTLSEnabled; } + Settings getClusterMetaDataSettings() { + MetaData metaData = this.clusterState.getMetaData(); + if (metaData == null) { + return Settings.EMPTY; + } + return metaData.settings().getAsSettings(SETTING_CLUSTER_METADATA); + } + @Override protected void innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field("cluster_name", clusterName); @@ -156,6 +167,25 @@ public class ClusterStatsMonitoringDoc extends MonitoringDoc { builder.endObject(); } + Settings clusterMetaDataSettings = getClusterMetaDataSettings(); + if (clusterMetaDataSettings != null) { + builder.startObject("cluster_settings"); + { + if (clusterMetaDataSettings.size() > 0) { + builder.startObject("cluster"); + { + builder.startObject("metadata"); + { + clusterMetaDataSettings.toXContent(builder, params); + } + builder.endObject(); + } + builder.endObject(); + } + } + builder.endObject(); + } + builder.startObject("stack_stats"); { // in the future, it may be useful to pass in an object that represents APM (and others), but for now this diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index 61224ac0fd7..bc531bb2d6b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -203,7 +203,12 @@ public class ClusterStatsMonitoringDocTests extends BaseMonitoringDocTestCase meta = ImmutableOpenMap.builder().build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> TransportRollupSearchAction.separateIndices(indices, meta)); + expectThrows(IllegalArgumentException.class, () -> TransportRollupSearchAction.separateIndices(indices, meta)); } public void testSeparateAll() { @@ -774,6 +774,7 @@ public class SearchActionTests extends ESTestCase { MultiSearchResponse msearchResponse = new MultiSearchResponse(new MultiSearchResponse.Item[]{unrolledResponse, rolledResponse}, 123); + SearchResponse response = TransportRollupSearchAction.processResponses(separateIndices, msearchResponse, mock(InternalAggregation.ReduceContext.class)); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index f5d335ca6f1..bee43bce471 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -200,7 +200,6 @@ public class IndexerUtilsTests extends AggregatorTestCase { String indexName = randomAlphaOfLengthBetween(1, 10); RollupIndexerJobStats stats= new RollupIndexerJobStats(0, 0, 0, 0); - String timestampField = "the_histo"; String valueField = "the_avg"; Directory directory = newDirectory(); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 55f1cfbdbb2..2e52160a6fa 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; -import org.elasticsearch.common.joda.DateMathParser; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.rounding.Rounding; import org.elasticsearch.common.unit.TimeValue; @@ -47,13 +46,13 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.indexing.IndexerState; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.junit.Before; @@ -601,7 +600,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase { RangeQueryBuilder range = (RangeQueryBuilder) request.source().query(); final DateTimeZone timeZone = range.timeZone() != null ? DateTimeZone.forID(range.timeZone()) : null; Query query = timestampField.rangeQuery(range.from(), range.to(), range.includeLower(), range.includeUpper(), - null, timeZone, new DateMathParser(Joda.forPattern(range.format())), queryShardContext); + null, timeZone, Joda.forPattern(range.format()).toDateMathParser(), queryShardContext); // extract composite agg assertThat(request.source().aggregations().getAggregatorFactories().size(), equalTo(1)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 937bd22d982..4f1ec4ad8c0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -118,6 +118,7 @@ import java.util.function.Supplier; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -846,7 +847,7 @@ public final class TokenService extends AbstractComponent { ); final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(boolQuery) .setVersion(false) .setSize(1000) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index d923a029804..620c3817ebb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; @@ -62,6 +61,7 @@ import java.util.Map; import java.util.function.Consumer; import java.util.function.Supplier; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -139,7 +139,7 @@ public class NativeUsersStore extends AbstractComponent { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 677d13082ca..b45de8184d6 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -56,6 +55,7 @@ import java.util.stream.Stream; import static org.elasticsearch.action.DocWriteResponse.Result.CREATED; import static org.elasticsearch.action.DocWriteResponse.Result.DELETED; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -129,7 +129,7 @@ public class NativeRoleMappingStore extends AbstractComponent implements UserRol final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setTypes(SECURITY_GENERIC_TYPE) .setQuery(query) .setSize(1000) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index beb2ca60fb2..7e1cc49e2c0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -97,9 +97,7 @@ public class CompositeRolesStore extends AbstractComponent { ThreadContext threadContext, XPackLicenseState licenseState) { super(settings); this.fileRolesStore = fileRolesStore; - // invalidating all on a file based role update is heavy handed to say the least, but in general this should be infrequent so the - // impact isn't really worth the added complexity of only clearing the changed values - fileRolesStore.addListener(this::invalidateAll); + fileRolesStore.addListener(this::invalidate); this.nativeRolesStore = nativeRolesStore; this.reservedRolesStore = reservedRolesStore; this.privilegeStore = privilegeStore; @@ -356,6 +354,23 @@ public class CompositeRolesStore extends AbstractComponent { negativeLookupCache.remove(role); } + public void invalidate(Set roles) { + numInvalidation.incrementAndGet(); + + // the cache cannot be modified while doing this operation per the terms of the cache iterator + try (ReleasableLock ignored = writeLock.acquire()) { + Iterator> keyIter = roleCache.keys().iterator(); + while (keyIter.hasNext()) { + Set key = keyIter.next(); + if (Sets.haveEmptyIntersection(key, roles) == false) { + keyIter.remove(); + } + } + } + + negativeLookupCache.removeAll(roles); + } + public void usageStats(ActionListener> listener) { final Map usage = new HashMap<>(2); usage.put("file", fileRolesStore.usageStats()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 59bc8042fba..868a7076b8b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,13 +35,16 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; @@ -52,16 +56,16 @@ public class FileRolesStore extends AbstractComponent { private final Path file; private final XPackLicenseState licenseState; - private final List listeners = new ArrayList<>(); + private final List>> listeners = new ArrayList<>(); private volatile Map permissions; public FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, XPackLicenseState licenseState) throws IOException { - this(settings, env, watcherService, () -> {}, licenseState); + this(settings, env, watcherService, null, licenseState); } - FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Runnable listener, + FileRolesStore(Settings settings, Environment env, ResourceWatcherService watcherService, Consumer> listener, XPackLicenseState licenseState) throws IOException { super(settings); this.file = resolveFile(env); @@ -76,9 +80,10 @@ public class FileRolesStore extends AbstractComponent { } public Set roleDescriptors(Set roleNames) { + final Map localPermissions = permissions; Set descriptors = new HashSet<>(); roleNames.forEach((name) -> { - RoleDescriptor descriptor = permissions.get(name); + RoleDescriptor descriptor = localPermissions.get(name); if (descriptor != null) { descriptors.add(descriptor); } @@ -87,12 +92,13 @@ public class FileRolesStore extends AbstractComponent { } public Map usageStats() { + final Map localPermissions = permissions; Map usageStats = new HashMap<>(3); - usageStats.put("size", permissions.size()); + usageStats.put("size", localPermissions.size()); boolean dls = false; boolean fls = false; - for (RoleDescriptor descriptor : permissions.values()) { + for (RoleDescriptor descriptor : localPermissions.values()) { for (IndicesPrivileges indicesPrivileges : descriptor.getIndicesPrivileges()) { fls = fls || indicesPrivileges.getGrantedFields() != null || indicesPrivileges.getDeniedFields() != null; dls = dls || indicesPrivileges.getQuery() != null; @@ -107,10 +113,10 @@ public class FileRolesStore extends AbstractComponent { return usageStats; } - public void addListener(Runnable runnable) { - Objects.requireNonNull(runnable); + public void addListener(Consumer> consumer) { + Objects.requireNonNull(consumer); synchronized (this) { - listeners.add(runnable); + listeners.add(consumer); } } @@ -118,6 +124,11 @@ public class FileRolesStore extends AbstractComponent { return file; } + // package private for testing + Set getAllRoleNames() { + return permissions.keySet(); + } + public static Path resolveFile(Environment env) { return XPackPlugin.resolveConfigFile(env, "roles.yml"); } @@ -319,11 +330,13 @@ public class FileRolesStore extends AbstractComponent { } @Override - public void onFileChanged(Path file) { + public synchronized void onFileChanged(Path file) { if (file.equals(FileRolesStore.this.file)) { + final Map previousPermissions = permissions; try { permissions = parseFile(file, logger, settings, licenseState); - logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), Files.exists(file) ? "changed" : "removed"); + logger.info("updated roles (roles file [{}] {})", file.toAbsolutePath(), + Files.exists(file) ? "changed" : "removed"); } catch (Exception e) { logger.error( (Supplier) () -> new ParameterizedMessage( @@ -331,9 +344,13 @@ public class FileRolesStore extends AbstractComponent { return; } - synchronized (FileRolesStore.this) { - listeners.forEach(Runnable::run); - } + final Set changedOrMissingRoles = Sets.difference(previousPermissions.entrySet(), permissions.entrySet()) + .stream() + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + final Set addedRoles = Sets.difference(permissions.keySet(), previousPermissions.keySet()); + final Set changedRoles = Collections.unmodifiableSet(Sets.union(changedOrMissingRoles, addedRoles)); + listeners.forEach(c -> c.accept(changedRoles)); } } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java index 807cfff6c2c..2cfa89b647c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStore.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.iterable.Iterables; @@ -56,6 +55,7 @@ import java.util.stream.Collector; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -115,7 +115,7 @@ public class NativePrivilegeStore extends AbstractComponent { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index e578a4005c4..e032d524038 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -59,6 +59,7 @@ import java.util.function.Supplier; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; +import static org.elasticsearch.search.SearchService.DEFAULT_KEEPALIVE_SETTING; import static org.elasticsearch.xpack.core.ClientHelper.SECURITY_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; @@ -120,7 +121,7 @@ public class NativeRolesStore extends AbstractComponent { final Supplier supplier = client.threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN)) { SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setScroll(TimeValue.timeValueSeconds(10L)) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) .setQuery(query) .setSize(1000) .setFetchSource(true) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index 7ab26b0c33f..3d623f343c3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -79,6 +79,7 @@ public class ScrollHelperIntegTests extends ESSingleNodeTestCase { when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); SearchRequest request = new SearchRequest(); + request.scroll(TimeValue.timeValueHours(10L)); String scrollId = randomAlphaOfLength(5); SearchHit[] hits = new SearchHit[] {new SearchHit(1), new SearchHit(2)}; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index 94856f701fa..612a0ea83c0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -126,7 +126,6 @@ public class TransportDeleteRoleActionTests extends ESTestCase { DeleteRoleRequest request = new DeleteRoleRequest(); request.name(roleName); - final boolean found = randomBoolean(); doAnswer(new Answer() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index ef3c6aa56ae..022328f426f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Requests; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecurityIntegTestCase; @@ -161,6 +162,7 @@ public class AuditTrailTests extends SecurityIntegTestCase { client.admin().indices().refresh(Requests.refreshRequest(indexName)).get(); final SearchRequest request = client.prepareSearch(indexName) + .setScroll(TimeValue.timeValueMinutes(10L)) .setTypes(IndexAuditTrail.DOC_TYPE) .setQuery(QueryBuilders.matchAllQuery()) .setSize(1000) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java index e239c8706b9..f2c91437c3e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java @@ -33,7 +33,6 @@ import org.opensaml.xmlsec.signature.Signature; import org.opensaml.xmlsec.signature.X509Certificate; import org.opensaml.xmlsec.signature.X509Data; import org.opensaml.xmlsec.signature.support.SignatureValidator; -import org.w3c.dom.Element; import java.io.OutputStream; import java.nio.file.Files; @@ -385,7 +384,7 @@ public class SamlMetadataCommandTests extends SamlTestCase { final MockTerminal terminal = new MockTerminal(); final EntityDescriptor descriptor = command.buildEntityDescriptor(terminal, options, env); - Element e = command.possiblySignDescriptor(terminal, options, descriptor, env); + command.possiblySignDescriptor(terminal, options, descriptor, env); assertThat(descriptor, notNullValue()); // Verify generated signature assertThat(descriptor.getSignature(), notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 036f1667e14..dca113b6e42 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -362,8 +362,7 @@ public class IndexAliasesTests extends SecurityIntegTestCase { IndicesAliasesAction.NAME, "create_test_aliases_alias"); //fails: user doesn't have manage_aliases on test_*, wildcards can't get replaced - IndexNotFoundException indexNotFoundException = expectThrows(IndexNotFoundException.class, - client.admin().indices().prepareAliases().removeAlias("test_*", "alias_1")::get); + expectThrows(IndexNotFoundException.class, client.admin().indices().prepareAliases().removeAlias("test_*", "alias_1")::get); } public void testGetAliasesCreateAndAliasesPermission2() { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index 0c2ab1ecc76..9f1490856d6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -53,6 +53,7 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; @@ -213,7 +214,7 @@ public class CompositeRolesStoreTests extends ESTestCase { new CompositeRolesStore(SECURITY_ENABLED_SETTINGS, fileRolesStore, nativeRolesStore, reservedRolesStore, mock(NativePrivilegeStore.class), Collections.emptyList(), new ThreadContext(SECURITY_ENABLED_SETTINGS), new XPackLicenseState(SECURITY_ENABLED_SETTINGS)); - verify(fileRolesStore).addListener(any(Runnable.class)); // adds a listener in ctor + verify(fileRolesStore).addListener(any(Consumer.class)); // adds a listener in ctor final String roleName = randomAlphaOfLengthBetween(1, 10); PlainActionFuture future = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 5cb93b898ba..0763ff65ec5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -37,6 +37,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -319,8 +320,11 @@ public class FileRolesStoreTests extends ESTestCase { threadPool = new TestThreadPool("test"); watcherService = new ResourceWatcherService(settings, threadPool); final CountDownLatch latch = new CountDownLatch(1); - FileRolesStore store = new FileRolesStore(settings, env, watcherService, latch::countDown, - new XPackLicenseState(Settings.EMPTY)); + final Set modifiedRoles = new HashSet<>(); + FileRolesStore store = new FileRolesStore(settings, env, watcherService, roleSet -> { + modifiedRoles.addAll(roleSet); + latch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); Set descriptors = store.roleDescriptors(Collections.singleton("role1")); assertThat(descriptors, notNullValue()); @@ -344,6 +348,8 @@ public class FileRolesStoreTests extends ESTestCase { fail("Waited too long for the updated file to be picked up"); } + assertEquals(1, modifiedRoles.size()); + assertTrue(modifiedRoles.contains("role5")); final TransportRequest request = mock(TransportRequest.class); descriptors = store.roleDescriptors(Collections.singleton("role5")); assertThat(descriptors, notNullValue()); @@ -354,6 +360,49 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(role.cluster().check("cluster:monitor/foo/bar", request), is(true)); assertThat(role.cluster().check("cluster:admin/foo/bar", request), is(false)); + // truncate to remove some + final Set truncatedFileRolesModified = new HashSet<>(); + final CountDownLatch truncateLatch = new CountDownLatch(1); + store = new FileRolesStore(settings, env, watcherService, roleSet -> { + truncatedFileRolesModified.addAll(roleSet); + truncateLatch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); + + final Set allRolesPreTruncate = store.getAllRoleNames(); + try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { + writer.append("role5:").append(System.lineSeparator()); + writer.append(" cluster:").append(System.lineSeparator()); + writer.append(" - 'MONITOR'"); + } + + truncateLatch.await(); + assertEquals(allRolesPreTruncate.size() - 1, truncatedFileRolesModified.size()); + assertTrue(allRolesPreTruncate.contains("role5")); + assertFalse(truncatedFileRolesModified.contains("role5")); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); + + // modify + final Set modifiedFileRolesModified = new HashSet<>(); + final CountDownLatch modifyLatch = new CountDownLatch(1); + store = new FileRolesStore(settings, env, watcherService, roleSet -> { + modifiedFileRolesModified.addAll(roleSet); + modifyLatch.countDown(); + }, new XPackLicenseState(Settings.EMPTY)); + + try (BufferedWriter writer = Files.newBufferedWriter(tmp, StandardCharsets.UTF_8, StandardOpenOption.TRUNCATE_EXISTING)) { + writer.append("role5:").append(System.lineSeparator()); + writer.append(" cluster:").append(System.lineSeparator()); + writer.append(" - 'ALL'"); + } + + modifyLatch.await(); + assertEquals(1, modifiedFileRolesModified.size()); + assertTrue(modifiedFileRolesModified.contains("role5")); + descriptors = store.roleDescriptors(Collections.singleton("role5")); + assertThat(descriptors, notNullValue()); + assertEquals(1, descriptors.size()); } finally { if (watcherService != null) { watcherService.stop(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java index 05eb4bc5564..2e1a423d5fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/AbstractSimpleSecurityTransportTestCase.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.transport; +import java.util.concurrent.atomic.AtomicBoolean; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.SuppressForbidden; @@ -38,7 +39,6 @@ import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.nio.file.Path; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -123,7 +123,6 @@ public abstract class AbstractSimpleSecurityTransportTestCase extends AbstractSi } @SuppressForbidden(reason = "Need to open socket connection") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33772") public void testRenegotiation() throws Exception { SSLService sslService = createSSLService(); final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); @@ -148,27 +147,24 @@ public abstract class AbstractSimpleSecurityTransportTestCase extends AbstractSi HandshakeCompletedListener secondListener = event -> renegotiationLatch.countDown(); socket.addHandshakeCompletedListener(secondListener); socket.startHandshake(); - - AtomicReference error = new AtomicReference<>(); - CountDownLatch catchReadErrorsLatch = new CountDownLatch(1); - Thread renegotiationThread = new Thread(() -> { - try { - socket.setSoTimeout(50); - socket.getInputStream().read(); - } catch (SocketTimeoutException e) { - // Ignore. We expect a timeout. - } catch (IOException e) { - error.set(e); - } finally { - catchReadErrorsLatch.countDown(); + AtomicBoolean stopped = new AtomicBoolean(false); + socket.setSoTimeout(10); + Thread emptyReader = new Thread(() -> { + while (stopped.get() == false) { + try { + socket.getInputStream().read(); + } catch (SocketTimeoutException e) { + // Ignore. We expect a timeout. + } catch (IOException e) { + throw new AssertionError(e); + } } }); - renegotiationThread.start(); + emptyReader.start(); renegotiationLatch.await(); + stopped.set(true); + emptyReader.join(); socket.removeHandshakeCompletedListener(secondListener); - catchReadErrorsLatch.await(); - - assertNull(error.get()); stream.writeByte((byte) 'E'); stream.writeByte((byte) 'S'); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java index 541e6606912..16bdc705a43 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java @@ -32,10 +32,6 @@ import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLHandshakeException; -import javax.net.ssl.TrustManagerFactory; - import java.io.InputStreamReader; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; @@ -48,10 +44,13 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLHandshakeException; +import javax.net.ssl.TrustManagerFactory; + import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; public class SslIntegrationTests extends SecurityIntegTestCase { @@ -150,10 +149,7 @@ public class SslIntegrationTests extends SecurityIntegTestCase { SSLConnectionSocketFactory sf = new SSLConnectionSocketFactory(sslContext, new String[]{ "SSLv3" }, null, NoopHostnameVerifier.INSTANCE); try (CloseableHttpClient client = HttpClients.custom().setSSLSocketFactory(sf).build()) { - CloseableHttpResponse result = SocketAccess.doPrivileged(() -> client.execute(new HttpGet(getNodeUrl()))); - fail("Expected a connection error due to SSLv3 not being supported by default"); - } catch (Exception e) { - assertThat(e, is(instanceOf(SSLHandshakeException.class))); + expectThrows(SSLHandshakeException.class, () -> SocketAccess.doPrivileged(() -> client.execute(new HttpGet(getNodeUrl())))); } } diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java index a2ab9060b5a..ca35504b2c8 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcConfiguration.java @@ -66,7 +66,6 @@ public class JdbcConfiguration extends ConnectionConfiguration { } // immutable properties - private final String originalUrl; private final boolean debug; private final String debugOut; @@ -147,8 +146,6 @@ public class JdbcConfiguration extends ConnectionConfiguration { private JdbcConfiguration(URI baseURI, String u, Properties props) throws JdbcSQLException { super(baseURI, u, props); - this.originalUrl = u; - this.debug = parseValue(DEBUG, props.getProperty(DEBUG, DEBUG_DEFAULT), Boolean::parseBoolean); this.debugOut = props.getProperty(DEBUG_OUTPUT, DEBUG_OUTPUT_DEFAULT); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index f45368afc06..ec56e2c2f11 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -112,10 +112,6 @@ public class Analyzer extends RuleExecutor { new ResolveAggsInHaving() //new ImplicitCasting() ); - // TODO: this might be removed since the deduplication happens already in ResolveFunctions - Batch deduplication = new Batch("Deduplication", - new PruneDuplicateFunctions()); - return Arrays.asList(substitution, resolution); } @@ -196,7 +192,7 @@ public class Analyzer extends RuleExecutor { .collect(toList()) ); } - + private static boolean hasStar(List exprs) { for (Expression expression : exprs) { if (expression instanceof UnresolvedStar) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 58d858c4241..23d2c20d305 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -18,7 +18,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FromClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GroupByContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; @@ -33,7 +32,6 @@ import org.elasticsearch.xpack.sql.plan.logical.Aggregate; import org.elasticsearch.xpack.sql.plan.logical.Distinct; import org.elasticsearch.xpack.sql.plan.logical.Filter; import org.elasticsearch.xpack.sql.plan.logical.Join; -import org.elasticsearch.xpack.sql.plan.logical.Join.JoinType; import org.elasticsearch.xpack.sql.plan.logical.Limit; import org.elasticsearch.xpack.sql.plan.logical.LocalRelation; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; @@ -168,39 +166,20 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder { LogicalPlan result = plan(ctx.relationPrimary()); for (JoinRelationContext j : ctx.joinRelation()) { - result = doJoin(result, j); + result = doJoin(j); } return result; } - private Join doJoin(LogicalPlan left, JoinRelationContext ctx) { - JoinTypeContext joinType = ctx.joinType(); + private Join doJoin(JoinRelationContext ctx) { - Join.JoinType type = JoinType.INNER; - if (joinType != null) { - if (joinType.FULL() != null) { - type = JoinType.FULL; - } - if (joinType.LEFT() != null) { - type = JoinType.LEFT; - } - if (joinType.RIGHT() != null) { - type = JoinType.RIGHT; - } - } - - Expression condition = null; JoinCriteriaContext criteria = ctx.joinCriteria(); if (criteria != null) { if (criteria.USING() != null) { throw new UnsupportedOperationException(); } - if (criteria.booleanExpression() != null) { - condition = expression(criteria.booleanExpression()); - } } - // We would return this if we actually supported JOINs, but we don't yet. // new Join(source(ctx), left, plan(ctx.right), type, condition); throw new ParsingException(source(ctx), "Queries with JOIN are not yet supported"); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java index c1f47564a4d..2aee552907b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.parser; +import com.carrotsearch.hppc.ObjectShortHashMap; import org.antlr.v4.runtime.BaseErrorListener; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CommonToken; @@ -22,8 +23,8 @@ import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.misc.Pair; import org.antlr.v4.runtime.tree.TerminalNode; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; @@ -41,7 +42,8 @@ import java.util.function.Function; import static java.lang.String.format; public class SqlParser { - private static final Logger log = Loggers.getLogger(SqlParser.class); + + private static final Logger log = LogManager.getLogger(); private final boolean DEBUG = false; @@ -83,7 +85,9 @@ public class SqlParser { return invokeParser(expression, params, SqlBaseParser::singleExpression, AstBuilder::expression); } - private T invokeParser(String sql, List params, Function parseFunction, + private T invokeParser(String sql, + List params, Function parseFunction, BiFunction visitor) { SqlBaseLexer lexer = new SqlBaseLexer(new CaseInsensitiveStream(sql)); @@ -96,6 +100,7 @@ public class SqlParser { CommonTokenStream tokenStream = new CommonTokenStream(tokenSource); SqlBaseParser parser = new SqlBaseParser(tokenStream); + parser.addParseListener(new CircuitBreakerListener()); parser.addParseListener(new PostProcessor(Arrays.asList(parser.getRuleNames()))); parser.removeErrorListeners(); @@ -125,7 +130,7 @@ public class SqlParser { return visitor.apply(new AstBuilder(paramTokens), tree); } - private void debug(SqlBaseParser parser) { + private static void debug(SqlBaseParser parser) { // when debugging, use the exact prediction mode (needed for diagnostics as well) parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); @@ -154,7 +159,7 @@ public class SqlParser { public void exitBackQuotedIdentifier(SqlBaseParser.BackQuotedIdentifierContext context) { Token token = context.BACKQUOTED_IDENTIFIER().getSymbol(); throw new ParsingException( - "backquoted indetifiers not supported; please use double quotes instead", + "backquoted identifiers not supported; please use double quotes instead", null, token.getLine(), token.getCharPositionInLine()); @@ -205,6 +210,35 @@ public class SqlParser { } } + /** + * Used to catch large expressions that can lead to stack overflows + */ + private class CircuitBreakerListener extends SqlBaseBaseListener { + + private static final short MAX_RULE_DEPTH = 100; + + // Keep current depth for every rule visited. + // The totalDepth alone cannot be used as expressions like: e1 OR e2 OR e3 OR ... + // are processed as e1 OR (e2 OR (e3 OR (... and this results in the totalDepth not growing + // while the stack call depth is, leading to a StackOverflowError. + private ObjectShortHashMap depthCounts = new ObjectShortHashMap<>(); + + @Override + public void enterEveryRule(ParserRuleContext ctx) { + short currentDepth = depthCounts.putOrAdd(ctx.getClass().getSimpleName(), (short) 1, (short) 1); + if (currentDepth > MAX_RULE_DEPTH) { + throw new ParsingException("expression is too large to parse, (tree's depth exceeds {})", MAX_RULE_DEPTH); + } + super.enterEveryRule(ctx); + } + + @Override + public void exitEveryRule(ParserRuleContext ctx) { + depthCounts.putOrAdd(ctx.getClass().getSimpleName(), (short) 0, (short) -1); + super.exitEveryRule(ctx); + } + } + private static final BaseErrorListener ERROR_LISTENER = new BaseErrorListener() { @Override public void syntaxError(Recognizer recognizer, Object offendingSymbol, int line, diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java index 60f621b38a3..1b466c7852f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/AggFilter.java @@ -18,11 +18,13 @@ import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorB public class AggFilter extends PipelineAgg { + private static final String BUCKET_SELECTOR_ID_PREFIX = "having"; + private final ScriptTemplate scriptTemplate; private final Map aggPaths; public AggFilter(String name, ScriptTemplate scriptTemplate) { - super(name); + super(BUCKET_SELECTOR_ID_PREFIX + name); Check.isTrue(scriptTemplate != null, "a valid script is required"); this.scriptTemplate = scriptTemplate; this.aggPaths = scriptTemplate.aggPaths(); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java index ccd2bf934ab..0953517d5f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByScriptKey.java @@ -39,8 +39,12 @@ public class GroupByScriptKey extends GroupByKey { .order(direction().asOrder()) .missingBucket(true); - if (script.outputType().isNumeric()) { - builder.valueType(ValueType.NUMBER); + if (script.outputType().isInteger) { + builder.valueType(ValueType.LONG); + } else if (script.outputType().isRational) { + builder.valueType(ValueType.DOUBLE); + } else if (script.outputType().isString()) { + builder.valueType(ValueType.STRING); } return builder; diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java index 816b6651335..43c72b5583d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java @@ -61,12 +61,12 @@ public class SourceGeneratorTests extends ESTestCase { public void testLimit() { QueryContainer container = new QueryContainer().withLimit(10).addGroups(singletonList(new GroupByColumnKey("1", "field"))); - SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); + int size = randomIntBetween(1, 10); + SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, size); Builder aggBuilder = sourceBuilder.aggregations(); assertEquals(1, aggBuilder.count()); CompositeAggregationBuilder composite = (CompositeAggregationBuilder) aggBuilder.getAggregatorFactories().get(0); - // TODO: cannot access size - //assertEquals(10, composite.size()); + assertEquals(size, composite.size()); } public void testSortNoneSpecified() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java index ceb9611a62c..2f23ea7cb73 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java @@ -70,7 +70,7 @@ public class QuotingTests extends ESTestCase { String name = "@timestamp"; ParsingException ex = expectThrows(ParsingException.class, () -> new SqlParser().createExpression(quote + name + quote)); - assertThat(ex.getMessage(), equalTo("line 1:1: backquoted indetifiers not supported; please use double quotes instead")); + assertThat(ex.getMessage(), equalTo("line 1:1: backquoted identifiers not supported; please use double quotes instead")); } public void testQuotedAttributeAndQualifier() { @@ -92,7 +92,7 @@ public class QuotingTests extends ESTestCase { String name = "@timestamp"; ParsingException ex = expectThrows(ParsingException.class, () -> new SqlParser().createExpression(quote + qualifier + quote + "." + quote + name + quote)); - assertThat(ex.getMessage(), equalTo("line 1:1: backquoted indetifiers not supported; please use double quotes instead")); + assertThat(ex.getMessage(), equalTo("line 1:1: backquoted identifiers not supported; please use double quotes instead")); } public void testGreedyQuoting() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java index 3e7e562e599..aab25349a1d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.sql.parser; +import com.google.common.base.Joiner; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.NamedExpression; import org.elasticsearch.xpack.sql.expression.Order; @@ -22,6 +23,7 @@ import org.elasticsearch.xpack.sql.plan.logical.Project; import java.util.ArrayList; import java.util.List; +import static java.util.Collections.nCopies; import static java.util.stream.Collectors.toList; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; @@ -136,6 +138,88 @@ public class SqlParserTests extends ESTestCase { assertThat(mmqp.optionMap(), hasEntry("fuzzy_rewrite", "scoring_boolean")); } + public void testLimitToPreventStackOverflowFromLargeUnaryBooleanExpression() { + // Create expression in the form of NOT(NOT(NOT ... (b) ...) + + // 40 elements is ok + new SqlParser().createExpression( + Joiner.on("").join(nCopies(40, "NOT(")).concat("b").concat(Joiner.on("").join(nCopies(40, ")")))); + + // 100 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createExpression( + Joiner.on("").join(nCopies(100, "NOT(")).concat("b").concat(Joiner.on("").join(nCopies(100, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeBinaryBooleanExpression() { + // Create expression in the form of a = b OR a = b OR ... a = b + + // 50 elements is ok + new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(50, "a = b"))); + + // 100 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> + new SqlParser().createExpression(Joiner.on(" OR ").join(nCopies(100, "a = b")))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeUnaryArithmeticExpression() { + // Create expression in the form of abs(abs(abs ... (i) ...) + + // 50 elements is ok + new SqlParser().createExpression( + Joiner.on("").join(nCopies(50, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(50, ")")))); + + // 101 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createExpression( + Joiner.on("").join(nCopies(101, "abs(")).concat("i").concat(Joiner.on("").join(nCopies(101, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeBinaryArithmeticExpression() { + // Create expression in the form of a + a + a + ... + a + + // 100 elements is ok + new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(100, "a"))); + + // 101 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> + new SqlParser().createExpression(Joiner.on(" + ").join(nCopies(101, "a")))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeSubselectTree() { + // Test with queries in the form of `SELECT * FROM (SELECT * FROM (... t) ...) + + // 100 elements is ok + new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(100, "SELECT * FROM")) + .concat("t") + .concat(Joiner.on("").join(nCopies(99, ")")))); + + // 101 elements parser's "circuit breaker" is triggered + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(101, "SELECT * FROM")) + .concat("t") + .concat(Joiner.on("").join(nCopies(100, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + + public void testLimitToPreventStackOverflowFromLargeComplexSubselectTree() { + // Test with queries in the form of `SELECT true OR true OR .. FROM (SELECT true OR true OR... FROM (... t) ...) + + new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(20, "SELECT ")). + concat(Joiner.on(" OR ").join(nCopies(50, "true"))).concat(" FROM") + .concat("t").concat(Joiner.on("").join(nCopies(19, ")")))); + + ParsingException e = expectThrows(ParsingException.class, () -> new SqlParser().createStatement( + Joiner.on(" (").join(nCopies(20, "SELECT ")). + concat(Joiner.on(" OR ").join(nCopies(100, "true"))).concat(" FROM") + .concat("t").concat(Joiner.on("").join(nCopies(19, ")"))))); + assertEquals("expression is too large to parse, (tree's depth exceeds 100)", e.getErrorMessage()); + } + private LogicalPlan parseStatement(String sql) { return new SqlParser().createStatement(sql); } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json new file mode 100644 index 00000000000..c8826dc5f1b --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.get_auto_follow_pattern.json @@ -0,0 +1,16 @@ +{ + "ccr.get_auto_follow_pattern": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", + "methods": [ "GET" ], + "url": { + "path": "/_ccr/auto_follow/{leader_cluster_alias}", + "paths": [ "/_ccr/auto_follow", "/_ccr/auto_follow/{leader_cluster_alias}" ], + "parts": { + "leader_cluster_alias": { + "type": "string", + "description": "The name of the leader cluster alias." + } + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json index 20a5c8e0c2a..e630b34ddf7 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json @@ -49,7 +49,7 @@ }, "timestamp_format": { "type": "string", - "description": "Optional parameter to specify the timestamp format in the file" + "description": "Optional parameter to specify the timestamp format in the file - may be either a Joda or Java time format" }, "explain": { "type": "boolean", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml index fcacdb9cdc7..8f1d9a4dc9c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml @@ -27,7 +27,8 @@ - match: { has_byte_order_marker: false } - match: { format: json } - match: { timestamp_field: time } - - match: { timestamp_formats.0: UNIX } + - match: { joda_timestamp_formats.0: UNIX } + - match: { java_timestamp_formats.0: UNIX } - match: { need_client_timezone: false } - match: { mappings.airline.type: keyword } - match: { mappings.responsetime.type: double } @@ -83,7 +84,8 @@ - match: { has_byte_order_marker: false } - match: { format: json } - match: { timestamp_field: time } - - match: { timestamp_formats.0: UNIX } + - match: { joda_timestamp_formats.0: UNIX } + - match: { java_timestamp_formats.0: UNIX } - match: { need_client_timezone: false } - match: { mappings.airline.type: keyword } - match: { mappings.responsetime.type: double } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java index 7b87a9e87a5..d2159fd572f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateEngine.java @@ -35,6 +35,11 @@ public class TextTemplateEngine extends AbstractComponent { String mediaType = compileParams(detectContentType(template)); template = trimContentType(textTemplate); + int indexStartMustacheExpression = template.indexOf("{{"); + if (indexStartMustacheExpression == -1) { + return template; + } + Map mergedModel = new HashMap<>(); if (textTemplate.getParams() != null) { mergedModel.putAll(textTemplate.getParams()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java index e2befe9a24e..b65eca086c4 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/condition/ScriptCondition.java @@ -29,20 +29,17 @@ public final class ScriptCondition implements ExecutableCondition { private static final Result MET = new Result(null, TYPE, true); private static final Result UNMET = new Result(null, TYPE, false); - private final ScriptService scriptService; private final Script script; private final ExecutableScript.Factory scriptFactory; public ScriptCondition(Script script) { this.script = script; - scriptService = null; scriptFactory = null; } - ScriptCondition(Script script, ScriptService scriptService) { - this.scriptService = scriptService; + ScriptCondition(Script script, ExecutableScript.Factory scriptFactory) { this.script = script; - scriptFactory = scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT); + this.scriptFactory = scriptFactory; } public Script getScript() { @@ -52,7 +49,7 @@ public final class ScriptCondition implements ExecutableCondition { public static ScriptCondition parse(ScriptService scriptService, String watchId, XContentParser parser) throws IOException { try { Script script = Script.parse(parser); - return new ScriptCondition(script, scriptService); + return new ScriptCondition(script, scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); } catch (ElasticsearchParseException pe) { throw new ElasticsearchParseException("could not parse [{}] condition for watch [{}]. failed to parse script", pe, TYPE, watchId); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java index b2498a749d7..fdc5ca07b84 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyAccount.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.notification.pagerduty; -import org.apache.logging.log4j.Logger; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -29,15 +28,13 @@ public class PagerDutyAccount { private final String serviceKey; private final HttpClient httpClient; private final IncidentEventDefaults eventDefaults; - private final Logger logger; - PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient, Logger logger) { + PagerDutyAccount(String name, Settings accountSettings, Settings serviceSettings, HttpClient httpClient) { this.name = name; this.serviceKey = getServiceKey(name, accountSettings, serviceSettings); this.httpClient = httpClient; this.eventDefaults = new IncidentEventDefaults(accountSettings.getAsSettings(TRIGGER_DEFAULTS_SETTING)); - this.logger = logger; } public String getName() { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java index 32a6dcb91aa..c10bcf4782f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java @@ -51,7 +51,7 @@ public class PagerDutyService extends NotificationService { @Override protected PagerDutyAccount createAccount(String name, Settings accountSettings) { - return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient, logger); + return new PagerDutyAccount(name, accountSettings, accountSettings, httpClient); } public static List> getSettings() { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java index 0e084af23e1..002d833c209 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java @@ -21,6 +21,7 @@ import org.junit.Before; import java.util.Collections; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import static java.util.Collections.singletonMap; @@ -31,7 +32,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class TextTemplateTests extends ESTestCase { @@ -47,7 +51,7 @@ public class TextTemplateTests extends ESTestCase { } public void testRender() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map params = singletonMap("param_key", "param_val"); Map model = singletonMap("model_key", "model_val"); Map merged = new HashMap<>(params); @@ -72,7 +76,7 @@ public class TextTemplateTests extends ESTestCase { } public void testRenderOverridingModel() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map params = singletonMap("key", "param_val"); Map model = singletonMap("key", "model_val"); ScriptType type = randomFrom(ScriptType.values()); @@ -94,7 +98,7 @@ public class TextTemplateTests extends ESTestCase { } public void testRenderDefaults() throws Exception { - String templateText = "_template"; + String templateText = "{{_template}}"; Map model = singletonMap("key", "model_val"); TemplateScript.Factory compiledTemplate = templateParams -> @@ -113,6 +117,39 @@ public class TextTemplateTests extends ESTestCase { assertThat(engine.render(template, model), is("rendered_text")); } + public void testDontInvokeScriptServiceOnNonMustacheText() { + assertNoCompilation("this is my text"); + assertScriptServiceInvoked("}}{{"); + assertScriptServiceInvoked("}}{{ctx.payload}}"); + } + + private void assertNoCompilation(String input) { + String output = engine.render(new TextTemplate(input), Collections.emptyMap()); + assertThat(input, is(output)); + verifyZeroInteractions(service); + } + + private void assertScriptServiceInvoked(final String input) { + ScriptService scriptService = mock(ScriptService.class); + TextTemplateEngine e = new TextTemplateEngine(Settings.EMPTY, scriptService); + + TemplateScript.Factory compiledTemplate = templateParams -> + new TemplateScript(templateParams) { + @Override + public String execute() { + return input.toUpperCase(Locale.ROOT); + } + }; + + when(scriptService.compile(new Script(ScriptType.INLINE, lang, input, + Collections.singletonMap("content_type", "text/plain"), Collections.emptyMap()), Watcher.SCRIPT_TEMPLATE_CONTEXT)) + .thenReturn(compiledTemplate); + + String output = e.render(new TextTemplate(input), Collections.emptyMap()); + verify(scriptService).compile(any(), any()); + assertThat(output, is(input.toUpperCase(Locale.ROOT))); + } + public void testParser() throws Exception { ScriptType type = randomScriptType(); TextTemplate template = diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java index 8ae0025066e..c29452302c0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/AlwaysConditionTests.java @@ -8,9 +8,11 @@ package org.elasticsearch.xpack.watcher.condition; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.condition.ExecutableCondition; +import org.elasticsearch.xpack.watcher.Watcher; import java.time.Clock; @@ -53,7 +55,8 @@ public class AlwaysConditionTests extends ESTestCase { String type = randomFrom(ScriptCondition.TYPE, InternalAlwaysCondition.TYPE, CompareCondition.TYPE, ArrayCompareCondition.TYPE); switch (type) { case ScriptCondition.TYPE: - return new ScriptCondition(mockScript("_script"), scriptService); + Script mockScript = mockScript("_script"); + return new ScriptCondition(mockScript, scriptService.compile(mockScript, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); case CompareCondition.TYPE: return new CompareCondition("_path", randomFrom(CompareCondition.Op.values()), randomFrom(5, "3"), Clock.systemUTC()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java index c7b7f2c63cd..bef38a27b6b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java @@ -94,7 +94,8 @@ public class ScriptConditionTests extends ESTestCase { } public void testExecute() throws Exception { - ScriptCondition condition = new ScriptCondition(mockScript("ctx.payload.hits.total > 1"), scriptService); + Script script = mockScript("ctx.payload.hits.total > 1"); + ScriptCondition condition = new ScriptCondition(script, scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); @@ -103,7 +104,7 @@ public class ScriptConditionTests extends ESTestCase { public void testExecuteMergedParams() throws Exception { Script script = new Script(ScriptType.INLINE, "mockscript", "ctx.payload.hits.total > threshold", singletonMap("threshold", 1)); - ScriptCondition executable = new ScriptCondition(script, scriptService); + ScriptCondition executable = new ScriptCondition(script, scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); @@ -181,8 +182,9 @@ public class ScriptConditionTests extends ESTestCase { } public void testScriptConditionThrowException() throws Exception { + Script script = mockScript("null.foo"); ScriptCondition condition = new ScriptCondition( - mockScript("null.foo"), scriptService); + script, scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); @@ -191,7 +193,8 @@ public class ScriptConditionTests extends ESTestCase { } public void testScriptConditionReturnObjectThrowsException() throws Exception { - ScriptCondition condition = new ScriptCondition(mockScript("return new Object()"), scriptService); + Script script = mockScript("return new Object()"); + ScriptCondition condition = new ScriptCondition(script, scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response)); @@ -201,8 +204,8 @@ public class ScriptConditionTests extends ESTestCase { } public void testScriptConditionAccessCtx() throws Exception { - ScriptCondition condition = new ScriptCondition(mockScript("ctx.trigger.scheduled_time.getMillis() < new Date().time"), - scriptService); + Script script = mockScript("ctx.trigger.scheduled_time.getMillis() < new Date().time"); + ScriptCondition condition = new ScriptCondition(script, scriptService.compile(script, Watcher.SCRIPT_EXECUTABLE_CONTEXT)); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); WatchExecutionContext ctx = mockExecutionContext("_name", new DateTime(DateTimeZone.UTC), new Payload.XContent(response)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 3461c530b44..8cce6fd6663 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.test; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; @@ -181,7 +180,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase public void _setup() throws Exception { if (timeWarped()) { timeWarp = new TimeWarp(internalCluster().getInstances(ScheduleTriggerEngineMock.class), - (ClockMock)getInstanceFromMaster(Clock.class), logger); + (ClockMock)getInstanceFromMaster(Clock.class)); } if (internalCluster().size() > 0) { @@ -541,12 +540,10 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase private final List schedulers; private final ClockMock clock; - private final Logger logger; - TimeWarp(Iterable schedulers, ClockMock clock, Logger logger) { + TimeWarp(Iterable schedulers, ClockMock clock) { this.schedulers = StreamSupport.stream(schedulers.spliterator(), false).collect(Collectors.toList()); this.clock = clock; - this.logger = logger; } public void trigger(String jobName) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java index ae3066a3ee6..2e8190da42f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/watch/WatchTests.java @@ -236,8 +236,7 @@ public class WatchTests extends ESTestCase { TriggerService triggerService = new TriggerService(Settings.EMPTY, Collections.emptySet()) { @Override public Trigger parseTrigger(String jobName, XContentParser parser) throws IOException { - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { } return new ScheduleTrigger(randomSchedule()); diff --git a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java index 4ac927c6646..3581bf2fda7 100644 --- a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java +++ b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolIT.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.security; import joptsimple.OptionParser; import joptsimple.OptionSet; -import org.elasticsearch.action.search.SearchResponse; + import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; @@ -125,6 +125,6 @@ public class MigrateToolIT extends MigrateToolTestCase { .waitForEvents(Priority.LANGUID) .waitForNoRelocatingShards(true)) .actionGet(); - SearchResponse searchResp = client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get(); + client.filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("index1").get(); } } diff --git a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java index 2987c1afc8d..0111aeff4cc 100644 --- a/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java +++ b/x-pack/qa/security-migrate-tests/src/test/java/org/elasticsearch/xpack/security/MigrateToolTestCase.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.security; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.xpack.client.PreBuiltXPackTransportClient; @@ -58,7 +58,7 @@ public abstract class MigrateToolTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER_DEFAULT = "localhost:9300"; - protected static final Logger logger = ESLoggerFactory.getLogger(MigrateToolTestCase.class.getName()); + protected static final Logger logger = LogManager.getLogger(MigrateToolTestCase.class); private static final AtomicInteger counter = new AtomicInteger(); private static Client client; @@ -129,7 +129,6 @@ public abstract class MigrateToolTestCase extends LuceneTestCase { @BeforeClass public static void initializeSettings() throws UnknownHostException { - String port = System.getProperty("integ.http.port"); clusterAddresses = System.getProperty(TESTS_CLUSTER); clusterHttpAddresses = System.getProperty(TESTS_HTTP_CLUSTER); if (clusterAddresses == null || clusterAddresses.isEmpty()) { diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java index 133006c66a8..b0a0d36fba4 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java @@ -14,10 +14,8 @@ import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; -import java.util.Calendar; import java.util.List; import java.util.Locale; -import java.util.TimeZone; import static java.lang.String.format; import static java.sql.Types.BIGINT; @@ -35,8 +33,6 @@ import static org.junit.Assert.fail; * Utility class for doing JUnit-style asserts over JDBC. */ public class JdbcAssert { - private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); - public static void assertResultSets(ResultSet expected, ResultSet actual) throws SQLException { assertResultSets(expected, actual, null); } @@ -133,7 +129,7 @@ public class JdbcAssert { doAssertResultSetData(ex, ac, logger, lenient); } } - + private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Logger logger, boolean lenient) throws SQLException { ResultSetMetaData metaData = expected.getMetaData(); int columns = metaData.getColumnCount(); @@ -172,7 +168,7 @@ public class JdbcAssert { } catch (ClassNotFoundException cnfe) { throw new SQLException(cnfe); } - + Object expectedObject = expected.getObject(column); Object actualObject = lenient ? actual.getObject(column, expectedColumnClass) : actual.getObject(column); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java index 7403bee5448..60605fb4a0d 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/rest/RestSqlTestCase.java @@ -28,7 +28,9 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.sql.JDBCType; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -415,6 +417,109 @@ public abstract class RestSqlTestCase extends ESRestTestCase implements ErrorsTe assertEquals("foo", matchQuery.get("query")); } + public void testTranslateQueryWithGroupByAndHaving() throws IOException { + index("{\"salary\":100}", + "{\"age\":20}"); + + Map response = runSql("", + new StringEntity("{\"query\":\"SELECT avg(salary) FROM test GROUP BY abs(age) HAVING avg(salary) > 50 LIMIT 10\"}", + ContentType.APPLICATION_JSON), "/translate/" + ); + + assertEquals(response.get("size"), 0); + assertEquals(false, response.get("_source")); + assertEquals("_none_", response.get("stored_fields")); + + @SuppressWarnings("unchecked") + Map aggregations = (Map) response.get("aggregations"); + assertEquals(1, aggregations.size()); + assertNotNull(aggregations); + + @SuppressWarnings("unchecked") + Map groupby = (Map) aggregations.get("groupby"); + assertEquals(2, groupby.size()); + + @SuppressWarnings("unchecked") + Map composite = (Map) groupby.get("composite"); + assertEquals(2, composite.size()); + assertEquals(10, composite.get("size")); + + @SuppressWarnings("unchecked") + List sources = (List) composite.get("sources"); + assertEquals(1, sources.size()); + + @SuppressWarnings("unchecked") + Map sourcesListMap = + (Map) ((Map) sources.get(0)).values().iterator().next(); + assertEquals(1, sourcesListMap.size()); + + @SuppressWarnings("unchecked") + Map terms = (Map) sourcesListMap.get("terms"); + assertEquals(4, terms.size()); + assertEquals("long", terms.get("value_type")); + assertEquals(true, terms.get("missing_bucket")); + assertEquals("asc", terms.get("order")); + + @SuppressWarnings("unchecked") + Map termsScript = (Map) terms.get("script"); + assertEquals(3, termsScript.size()); + assertEquals("Math.abs(doc[params.v0].value)", termsScript.get("source")); + assertEquals("painless", termsScript.get("lang")); + + @SuppressWarnings("unchecked") + Map termsScriptParams = (Map) termsScript.get("params"); + assertEquals(1, termsScriptParams.size()); + assertEquals("age", termsScriptParams.get("v0")); + + @SuppressWarnings("unchecked") + Map aggregations2 = (Map) groupby.get("aggregations"); + assertEquals(3, aggregations2.size()); + + List aggKeys = new ArrayList<>(2); + String aggFilterKey = null; + for (Map.Entry entry : aggregations2.entrySet()) { + String key = entry.getKey(); + if (key.startsWith("having")) { + aggFilterKey = key; + } else { + aggKeys.add(Integer.valueOf(key)); + @SuppressWarnings("unchecked") + Map aggr = (Map) entry.getValue(); + assertEquals(1, aggr.size()); + @SuppressWarnings("unchecked") + Map avg = (Map) aggr.get("avg"); + assertEquals(1, avg.size()); + assertEquals("salary", avg.get("field")); + } + } + Collections.sort(aggKeys); + assertEquals("having" + aggKeys.get(1), aggFilterKey); + + @SuppressWarnings("unchecked") + Map having = (Map) aggregations2.get(aggFilterKey); + assertEquals(1, having.size()); + + @SuppressWarnings("unchecked") + Map bucketSelector = (Map) having.get("bucket_selector"); + assertEquals(3, bucketSelector.size()); + assertEquals("skip", bucketSelector.get("gap_policy")); + + @SuppressWarnings("unchecked") + Map bucketsPath = (Map) bucketSelector.get("buckets_path"); + assertEquals(1, bucketsPath.size()); + assertEquals(aggKeys.get(1).toString(), bucketsPath.get("a0")); + + @SuppressWarnings("unchecked") + Map filterScript = (Map) bucketSelector.get("script"); + assertEquals(3, filterScript.size()); + assertEquals("params.a0 > params.v0", filterScript.get("source")); + assertEquals("painless", filterScript.get("lang")); + @SuppressWarnings("unchecked") + Map filterScriptParams = (Map) filterScript.get("params"); + assertEquals(1, filterScriptParams.size()); + assertEquals(50, filterScriptParams.get("v0")); + } + public void testBasicQueryText() throws IOException { index("{\"test\":\"test\"}", "{\"test\":\"test\"}"); diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index dbc1b67a3e8..481831b2ad8 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -1126,6 +1126,222 @@ SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year; // end::conversionStringToDateCast ; +/////////////////////////////// +// +// Math +// +/////////////////////////////// + +mathInlineAbs +// tag::abs +SELECT ABS(-123.5), ABS(55); + + ABS(-123.5) | ABS(55) +---------------+--------------- +123.5 |55 +// end::abs +; + +mathInlineAcos +// tag::mathInlineAcos +SELECT ACOS(COS(PI())), PI(); + + ACOS(COS(PI)) | PI +-----------------+----------------- +3.141592653589793|3.141592653589793 +// end::mathInlineAcos +; + +mathInlineAsin +// tag::mathInlineAsin +SELECT ROUND(DEGREES(ASIN(0.7071067811865475))) AS "ASIN(0.707)", ROUND(SIN(RADIANS(45)), 3) AS "SIN(45)"; + + ASIN(0.707) | SIN(45) +---------------+--------------- +45.0 |0.707 +// end::mathInlineAsin +; + +mathInlineAtan +// tag::mathInlineAtan +SELECT DEGREES(ATAN(TAN(RADIANS(90)))); + +DEGREES(ATAN(TAN(RADIANS(90)))) +------------------------------- +90.0 +// end::mathInlineAtan +; + +mathInlineAtan2 +// tag::mathInlineAtan2 +SELECT ATAN2(5 * SIN(RADIANS(45)), 5 * COS(RADIANS(45))) AS "ATAN2(5*SIN(45), 5*COS(45))", RADIANS(45); + +ATAN2(5*SIN(45), 5*COS(45))| RADIANS(45) +---------------------------+------------------ +0.7853981633974483 |0.7853981633974483 +// end::mathInlineAtan2 +; + +mathInlineCbrtWithNegativeValue +// tag::mathInlineCbrtWithNegativeValue +SELECT CBRT(-125.5); + + CBRT(-125.5) +------------------- +-5.0066577974783435 +// end::mathInlineCbrtWithNegativeValue +; + +mathInlineCeiling +// tag::mathInlineCeiling +SELECT CEIL(125.01), CEILING(-125.99); + + CEIL(125.01) | CEIL(-125.99) +---------------+--------------- +126 |-125 +// end::mathInlineCeiling +; + +mathInlineCosine +// tag::mathInlineCosine +SELECT COS(RADIANS(180)), POWER(SIN(RADIANS(54)), 2) + POWER(COS(RADIANS(54)), 2) AS pythagorean_identity; + +COS(RADIANS(180))|pythagorean_identity +-----------------+-------------------- +-1.0 |1.0 +// end::mathInlineCosine +; + +mathInlineCosh +// tag::mathInlineCosh +SELECT COSH(5), (POWER(E(), 5) + POWER(E(), -5)) / 2 AS "(e^5 + e^-5)/2"; + + COSH(5) | (e^5 + e^-5)/2 +-----------------+----------------- +74.20994852478785|74.20994852478783 +// end::mathInlineCosh +; + +mathInlineCotangent +// tag::mathInlineCotangent +SELECT COT(RADIANS(30)) AS "COT(30)", COS(RADIANS(30)) / SIN(RADIANS(30)) AS "COS(30)/SIN(30)"; + + COT(30) | COS(30)/SIN(30) +------------------+------------------ +1.7320508075688774|1.7320508075688776 +// end::mathInlineCotangent +; + +mathInlineDegrees +// tag::mathInlineDegrees +SELECT DEGREES(PI() * 2), DEGREES(PI()); + +DEGREES(((PI) * 2))| DEGREES(PI) +-------------------+--------------- +360.0 |180.0 +// end::mathInlineDegrees +; + +mathEulersNumber +// tag::mathEulersNumber +SELECT E(), CEIL(E()); + + E | CEIL(E) +-----------------+--------------- +2.718281828459045|3 +// end::mathEulersNumber +; + +mathExpInline +// tag::mathExpInline +SELECT EXP(1), E(), EXP(2), E() * E(); + + EXP(1) | E | EXP(2) | ((E) * (E)) +-----------------+-----------------+----------------+------------------ +2.718281828459045|2.718281828459045|7.38905609893065|7.3890560989306495 +// end::mathExpInline +; + +mathExpm1Inline +// tag::mathExpm1Inline +SELECT E(), EXP(2), EXPM1(2); + + E | EXP(2) | EXPM1(2) +-----------------+----------------+---------------- +2.718281828459045|7.38905609893065|6.38905609893065 +// end::mathExpm1Inline +; + +mathInlineFloor +// tag::mathInlineFloor +SELECT FLOOR(125.01), FLOOR(-125.99); + + FLOOR(125.01) |FLOOR(-125.99) +---------------+--------------- +125 |-126 +// end::mathInlineFloor +; + +mathInlineLog +// tag::mathInlineLog +SELECT EXP(3), LOG(20.085536923187668); + + EXP(3) |LOG(20.085536923187668) +------------------+----------------------- +20.085536923187668|3.0 +// end::mathInlineLog +; + +mathInlineLog10 +// tag::mathInlineLog10 +SELECT LOG10(5), LOG(5)/LOG(10); + + LOG10(5) |((LOG(5)) / (LOG(10))) +------------------+---------------------- +0.6989700043360189|0.6989700043360187 +// end::mathInlineLog10 +; + +mathPINumber +// tag::mathPINumber +SELECT PI(); + + PI +----------------- +3.141592653589793 +// end::mathPINumber +; + +mathInlinePowerPositive +// tag::mathInlinePowerPositive +SELECT POWER(3, 2), POWER(3, 3); + + POWER(3,2) | POWER(3,3) +---------------+--------------- +9.0 |27.0 +// end::mathInlinePowerPositive +; + +mathInlinePowerNegative +// tag::mathInlinePowerNegative +SELECT POWER(5, -1), POWER(5, -2); + + POWER(5,-1) | POWER(5,-2) +---------------+--------------- +0.2 |0.04 +// end::mathInlinePowerNegative +; + +mathInlineRadians +// tag::mathInlineRadians +SELECT RADIANS(90), PI()/2; + + RADIANS(90) | ((PI) / 2) +------------------+------------------ +1.5707963267948966|1.5707963267948966 +// end::mathInlineRadians +; + mathRoundWithNegativeParameter // tag::mathRoundWithNegativeParameter SELECT ROUND(-345.153, -1) AS rounded; @@ -1146,6 +1362,56 @@ SELECT ROUND(-345.153, 1) AS rounded; // end::mathRoundWithPositiveParameter ; +mathInlineSign +// tag::mathInlineSign +SELECT SIGN(-123), SIGN(0), SIGN(415); + + SIGN(-123) | SIGN(0) | SIGN(415) +---------------+---------------+--------------- +-1 |0 |1 +// end::mathInlineSign +; + +mathInlineSine +// tag::mathInlineSine +SELECT SIN(RADIANS(90)), POWER(SIN(RADIANS(67)), 2) + POWER(COS(RADIANS(67)), 2) AS pythagorean_identity; + +SIN(RADIANS(90))|pythagorean_identity +----------------+-------------------- +1.0 |1.0 +// end::mathInlineSine +; + +mathInlineSinh +// tag::mathInlineSinh +SELECT SINH(5), (POWER(E(), 5) - POWER(E(), -5)) / 2 AS "(e^5 - e^-5)/2"; + + SINH(5) | (e^5 - e^-5)/2 +-----------------+----------------- +74.20321057778875|74.20321057778874 +// end::mathInlineSinh +; + +mathInlineSqrt +// tag::mathInlineSqrt +SELECT SQRT(EXP(2)), E(), SQRT(25); + + SQRT(EXP(2)) | E | SQRT(25) +-----------------+-----------------+--------------- +2.718281828459045|2.718281828459045|5.0 +// end::mathInlineSqrt +; + +mathInlineTanget +// tag::mathInlineTanget +SELECT TAN(RADIANS(66)) AS "TAN(66)", SIN(RADIANS(66))/COS(RADIANS(66)) AS "SIN(66)/COS(66)=TAN(66)"; + + TAN(66) |SIN(66)/COS(66)=TAN(66) +------------------+----------------------- +2.2460367739042164|2.246036773904216 +// end::mathInlineTanget +; + mathTruncateWithNegativeParameter // tag::mathTruncateWithNegativeParameter SELECT TRUNCATE(-345.153, -1) AS trimmed; @@ -1164,4 +1430,5 @@ SELECT TRUNCATE(-345.153, 1) AS trimmed; --------------- -345.1 // end::mathTruncateWithPositiveParameter -; \ No newline at end of file +; + diff --git a/x-pack/qa/sql/src/main/resources/math.sql-spec b/x-pack/qa/sql/src/main/resources/math.sql-spec index 74855b00b11..a8ab87b087b 100644 --- a/x-pack/qa/sql/src/main/resources/math.sql-spec +++ b/x-pack/qa/sql/src/main/resources/math.sql-spec @@ -3,86 +3,50 @@ // mathAbs -// tag::abs SELECT ABS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::abs mathACos -// tag::acos SELECT ACOS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::acos mathASin -// tag::asin SELECT ASIN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::asin mathATan -// tag::atan SELECT ATAN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::atan //mathCbrt //SELECT CBRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathCeil // H2 returns CEIL as a double despite the value being an integer; we return a long as the other DBs SELECT CAST(CEIL(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathCos -// tag::cos SELECT COS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::cos mathCosh -// tag::cosh SELECT COSH(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::cosh mathCot -// tag::cot SELECT COT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::cot mathDegrees -// tag::degrees SELECT DEGREES(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::degrees mathExp -// tag::exp SELECT EXP(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::exp mathExpm1 -// tag::expm1 SELECT EXP(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::expm1 mathFloor SELECT CAST(FLOOR(emp_no) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathLog -// tag::log SELECT LOG(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::log mathLog10 -// tag::log10 SELECT LOG10(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::log10 mathRadians -// tag::radians SELECT RADIANS(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::radians mathRound SELECT CAST(ROUND(emp_no, 0) AS INT) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathSign -// tag::sign SELECT SIGN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sign mathSin -// tag::sin SELECT SIN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sin mathSinH -// tag::sinh SELECT SINH(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sinh mathSqrt -// tag::sqrt SELECT SQRT(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::sqrt mathTan -// tag::tan SELECT TAN(emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::tan // // Combined methods @@ -125,15 +89,11 @@ SELECT 5 + 2 * 3 / 2 % 2 AS c, PI() as e, first_name FROM "test_emp" WHERE emp_n // binary functions // mathATan2 -// tag::atan2 SELECT ATAN2(emp_no, emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::atan2 -// tag::power mathPowerPositive SELECT POWER(emp_no, 2) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; mathPowerNegative SELECT POWER(salary, -1) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; -// end::power roundInline1 SELECT ROUND(-345.123, -2) AS rounded; diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index d6b2e321d6c..135ae14b9c8 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.DocWriteResponse; @@ -13,7 +14,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -110,8 +110,9 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase realmConfig = randomFrom(RealmConfig.values()); roleMappings = realmConfig.selectRoleMappings(ESTestCase::randomBoolean); useGlobalSSL = randomBoolean(); - ESLoggerFactory.getLogger("test").info("running test with realm configuration [{}], with direct group to role mapping [{}]. " + - "Settings [{}]", realmConfig, realmConfig.mapGroupsAsRoles, realmConfig.settings); + LogManager.getLogger(AbstractAdLdapRealmTestCase.class).info( + "running test with realm configuration [{}], with direct group to role mapping [{}]. Settings [{}]", + realmConfig, realmConfig.mapGroupsAsRoles, realmConfig.settings); } @AfterClass diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java index 3d1cdb202d1..2231d23296a 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.security.authc.ldap; -import org.elasticsearch.common.logging.ESLoggerFactory; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.common.settings.Settings; import org.junit.BeforeClass; @@ -31,9 +31,9 @@ public class MultipleAdRealmIT extends AbstractAdLdapRealmTestCase { .filter(config -> config.name().startsWith("AD")) .collect(Collectors.toList()); secondaryRealmConfig = randomFrom(configs); - ESLoggerFactory.getLogger("test") - .info("running test with secondary realm configuration [{}], with direct group to role mapping [{}]. Settings [{}]", - secondaryRealmConfig, secondaryRealmConfig.mapGroupsAsRoles, secondaryRealmConfig.settings); + LogManager.getLogger(MultipleAdRealmIT.class).info( + "running test with secondary realm configuration [{}], with direct group to role mapping [{}]. Settings [{}]", + secondaryRealmConfig, secondaryRealmConfig.mapGroupsAsRoles, secondaryRealmConfig.settings); // It's easier to test 2 realms when using file based role mapping, and for the purposes of // this test, there's no need to test native mappings. diff --git a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java index c77715431ec..28267614dd3 100644 --- a/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java +++ b/x-pack/qa/transport-client-tests/src/test/java/org/elasticsearch/xpack/ml/client/ESXPackSmokeClientTestCase.java @@ -6,11 +6,11 @@ package org.elasticsearch.xpack.ml.client; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; @@ -54,8 +54,7 @@ public abstract class ESXPackSmokeClientTestCase extends LuceneTestCase { */ public static final String TESTS_CLUSTER = "tests.cluster"; - protected static final Logger logger = ESLoggerFactory - .getLogger(ESXPackSmokeClientTestCase.class.getName()); + protected static final Logger logger = LogManager.getLogger(ESXPackSmokeClientTestCase.class); private static final AtomicInteger counter = new AtomicInteger(); private static Client client;