Merge remote-tracking branch 'origin/master' into index-lifecycle

This commit is contained in:
Lee Hinman 2018-09-27 11:00:37 -06:00
commit a26cc1a242
552 changed files with 11951 additions and 3942 deletions

View File

@ -19,13 +19,13 @@
package org.elasticsearch.client.benchmark.ops.bulk;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.client.benchmark.BenchmarkTask;
import org.elasticsearch.client.benchmark.metrics.Sample;
import org.elasticsearch.client.benchmark.metrics.SampleRecorder;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
@ -135,7 +135,7 @@ public class BulkBenchmarkTask implements BenchmarkTask {
private static final class BulkIndexer implements Runnable {
private static final Logger logger = ESLoggerFactory.getLogger(BulkIndexer.class.getName());
private static final Logger logger = LogManager.getLogger(BulkIndexer.class);
private final BlockingQueue<List<String>> bulkData;
private final int warmupIterations;

View File

@ -0,0 +1,30 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl;
import org.elasticsearch.common.xcontent.ToXContentObject;
/**
* Implementations of this interface represent an expression used for user role mapping
* that can later be resolved to a boolean value.
*/
public interface RoleMapperExpression extends ToXContentObject {
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.expressions;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import java.util.ArrayList;
import java.util.List;
/**
* An expression that evaluates to <code>true</code> if-and-only-if all its children
* evaluate to <code>true</code>.
* An <em>all</em> expression with no children is always <code>true</code>.
*/
public final class AllRoleMapperExpression extends CompositeRoleMapperExpression {
private AllRoleMapperExpression(String name, RoleMapperExpression[] elements) {
super(name, elements);
}
public static Builder builder() {
return new Builder();
}
public static final class Builder {
private List<RoleMapperExpression> elements = new ArrayList<>();
public Builder addExpression(final RoleMapperExpression expression) {
assert expression != null : "expression cannot be null";
elements.add(expression);
return this;
}
public AllRoleMapperExpression build() {
return new AllRoleMapperExpression(CompositeType.ALL.getName(), elements.toArray(new RoleMapperExpression[0]));
}
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.expressions;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import java.util.ArrayList;
import java.util.List;
/**
* An expression that evaluates to <code>true</code> if at least one of its children
* evaluate to <code>true</code>.
* An <em>any</em> expression with no children is never <code>true</code>.
*/
public final class AnyRoleMapperExpression extends CompositeRoleMapperExpression {
private AnyRoleMapperExpression(String name, RoleMapperExpression[] elements) {
super(name, elements);
}
public static Builder builder() {
return new Builder();
}
public static final class Builder {
private List<RoleMapperExpression> elements = new ArrayList<>();
public Builder addExpression(final RoleMapperExpression expression) {
assert expression != null : "expression cannot be null";
elements.add(expression);
return this;
}
public AnyRoleMapperExpression build() {
return new AnyRoleMapperExpression(CompositeType.ANY.getName(), elements.toArray(new RoleMapperExpression[0]));
}
}
}

View File

@ -0,0 +1,100 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.expressions;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Expression of role mapper expressions which can be combined by operators like AND, OR
* <p>
* Expression builder example:
* <pre>
* {@code
* final RoleMapperExpression allExpression = AllRoleMapperExpression.builder()
.addExpression(AnyRoleMapperExpression.builder()
.addExpression(FieldRoleMapperExpression.ofUsername("user1@example.org"))
.addExpression(FieldRoleMapperExpression.ofUsername("user2@example.org"))
.build())
.addExpression(FieldRoleMapperExpression.ofMetadata("metadata.location", "AMER"))
.addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofUsername("user3@example.org")))
.build();
* }
* </pre>
*/
public abstract class CompositeRoleMapperExpression implements RoleMapperExpression {
private final String name;
private final List<RoleMapperExpression> elements;
CompositeRoleMapperExpression(final String name, final RoleMapperExpression... elements) {
assert name != null : "field name cannot be null";
assert elements != null : "at least one field expression is required";
this.name = name;
this.elements = Collections.unmodifiableList(Arrays.asList(elements));
}
public String getName() {
return this.getName();
}
public List<RoleMapperExpression> getElements() {
return elements;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final CompositeRoleMapperExpression that = (CompositeRoleMapperExpression) o;
if (Objects.equals(this.getName(), that.getName()) == false) {
return false;
}
return Objects.equals(this.getElements(), that.getElements());
}
@Override
public int hashCode() {
return Objects.hash(name, elements);
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
builder.startArray(name);
for (RoleMapperExpression e : elements) {
e.toXContent(builder, params);
}
builder.endArray();
return builder.endObject();
}
}

View File

@ -0,0 +1,59 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.expressions;
import org.elasticsearch.common.ParseField;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public enum CompositeType {
ANY("any"), ALL("all"), EXCEPT("except");
private static Map<String, CompositeType> nameToType = Collections.unmodifiableMap(initialize());
private ParseField field;
CompositeType(String name) {
this.field = new ParseField(name);
}
public String getName() {
return field.getPreferredName();
}
public ParseField getParseField() {
return field;
}
public static CompositeType fromName(String name) {
return nameToType.get(name);
}
private static Map<String, CompositeType> initialize() {
Map<String, CompositeType> map = new HashMap<>();
for (CompositeType field : values()) {
map.put(field.getName(), field);
}
return map;
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.expressions;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
* A negating expression. That is, this expression evaluates to <code>true</code> if-and-only-if
* its delegate expression evaluate to <code>false</code>.
* Syntactically, <em>except</em> expressions are intended to be children of <em>all</em>
* expressions ({@link AllRoleMapperExpression}).
*/
public final class ExceptRoleMapperExpression extends CompositeRoleMapperExpression {
public ExceptRoleMapperExpression(final RoleMapperExpression expression) {
super(CompositeType.EXCEPT.getName(), expression);
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
builder.field(CompositeType.EXCEPT.getName());
builder.value(getElements().get(0));
return builder.endObject();
}
}

View File

@ -0,0 +1,122 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.fields;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* An expression that evaluates to <code>true</code> if a field (map element) matches
* the provided values. A <em>field</em> expression may have more than one provided value, in which
* case the expression is true if <em>any</em> of the values are matched.
* <p>
* Expression builder example:
* <pre>
* {@code
* final RoleMapperExpression usernameExpression = FieldRoleMapperExpression.ofUsername("user1@example.org");
* }
* </pre>
*/
public class FieldRoleMapperExpression implements RoleMapperExpression {
private final String field;
private final List<Object> values;
public FieldRoleMapperExpression(final String field, final Object... values) {
if (field == null || field.isEmpty()) {
throw new IllegalArgumentException("null or empty field name (" + field + ")");
}
if (values == null || values.length == 0) {
throw new IllegalArgumentException("null or empty values (" + values + ")");
}
this.field = field;
this.values = Collections.unmodifiableList(Arrays.asList(values));
}
public String getField() {
return field;
}
public List<Object> getValues() {
return values;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
final FieldRoleMapperExpression that = (FieldRoleMapperExpression) o;
return Objects.equals(this.getField(), that.getField()) && Objects.equals(this.getValues(), that.getValues());
}
@Override
public int hashCode() {
int result = field.hashCode();
result = 31 * result + values.hashCode();
return result;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.startObject("field");
builder.startArray(this.field);
for (Object value : values) {
builder.value(value);
}
builder.endArray();
builder.endObject();
return builder.endObject();
}
public static FieldRoleMapperExpression ofUsername(Object... values) {
return ofKeyValues("username", values);
}
public static FieldRoleMapperExpression ofGroups(Object... values) {
return ofKeyValues("groups", values);
}
public static FieldRoleMapperExpression ofDN(Object... values) {
return ofKeyValues("dn", values);
}
public static FieldRoleMapperExpression ofMetadata(String key, Object... values) {
if (key.startsWith("metadata.") == false) {
throw new IllegalArgumentException("metadata key must have prefix 'metadata.'");
}
return ofKeyValues(key, values);
}
public static FieldRoleMapperExpression ofKeyValues(String key, Object... values) {
return new FieldRoleMapperExpression(key, values);
}
}

View File

@ -0,0 +1,180 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.parser;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeType;
import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Parses the JSON (XContent) based boolean expression DSL into a tree of
* {@link RoleMapperExpression} objects.
* Note: As this is client side parser, it mostly validates the structure of
* DSL being parsed it does not enforce rules
* like allowing "except" within "except" or "any" expressions.
*/
public final class RoleMapperExpressionParser {
public static final ParseField FIELD = new ParseField("field");
/**
* @param name The name of the expression tree within its containing object.
* Used to provide descriptive error messages.
* @param parser A parser over the XContent (typically JSON) DSL
* representation of the expression
*/
public RoleMapperExpression parse(final String name, final XContentParser parser) throws IOException {
return parseRulesObject(name, parser);
}
private RoleMapperExpression parseRulesObject(final String objectName, final XContentParser parser)
throws IOException {
// find the start of the DSL object
final XContentParser.Token token;
if (parser.currentToken() == null) {
token = parser.nextToken();
} else {
token = parser.currentToken();
}
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("failed to parse rules expression. expected [{}] to be an object but found [{}] instead",
objectName, token);
}
final String fieldName = fieldName(objectName, parser);
final RoleMapperExpression expr = parseExpression(parser, fieldName, objectName);
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields", objectName);
}
return expr;
}
private RoleMapperExpression parseExpression(XContentParser parser, String field, String objectName)
throws IOException {
if (CompositeType.ANY.getParseField().match(field, parser.getDeprecationHandler())) {
final AnyRoleMapperExpression.Builder builder = AnyRoleMapperExpression.builder();
parseExpressionArray(CompositeType.ANY.getParseField(), parser).forEach(builder::addExpression);
return builder.build();
} else if (CompositeType.ALL.getParseField().match(field, parser.getDeprecationHandler())) {
final AllRoleMapperExpression.Builder builder = AllRoleMapperExpression.builder();
parseExpressionArray(CompositeType.ALL.getParseField(), parser).forEach(builder::addExpression);
return builder.build();
} else if (FIELD.match(field, parser.getDeprecationHandler())) {
return parseFieldExpression(parser);
} else if (CompositeType.EXCEPT.getParseField().match(field, parser.getDeprecationHandler())) {
return parseExceptExpression(parser);
} else {
throw new ElasticsearchParseException("failed to parse rules expression. field [{}] is not recognised in object [{}]", field,
objectName);
}
}
private RoleMapperExpression parseFieldExpression(XContentParser parser) throws IOException {
checkStartObject(parser);
final String fieldName = fieldName(FIELD.getPreferredName(), parser);
final List<Object> values;
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
values = parseArray(FIELD, parser, this::parseFieldValue);
} else {
values = Collections.singletonList(parseFieldValue(parser));
}
if (parser.nextToken() != XContentParser.Token.END_OBJECT) {
throw new ElasticsearchParseException("failed to parse rules expression. object [{}] contains multiple fields",
FIELD.getPreferredName());
}
return FieldRoleMapperExpression.ofKeyValues(fieldName, values.toArray());
}
private RoleMapperExpression parseExceptExpression(XContentParser parser) throws IOException {
checkStartObject(parser);
return new ExceptRoleMapperExpression(parseRulesObject(CompositeType.EXCEPT.getName(), parser));
}
private void checkStartObject(XContentParser parser) throws IOException {
final XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("failed to parse rules expression. expected an object but found [{}] instead", token);
}
}
private String fieldName(String objectName, XContentParser parser) throws IOException {
if (parser.nextToken() != XContentParser.Token.FIELD_NAME) {
throw new ElasticsearchParseException("failed to parse rules expression. object [{}] does not contain any fields", objectName);
}
String parsedFieldName = parser.currentName();
return parsedFieldName;
}
private List<RoleMapperExpression> parseExpressionArray(ParseField field, XContentParser parser)
throws IOException {
parser.nextToken(); // parseArray requires that the parser is positioned
// at the START_ARRAY token
return parseArray(field, parser, p -> parseRulesObject(field.getPreferredName(), p));
}
private <T> List<T> parseArray(ParseField field, XContentParser parser, CheckedFunction<XContentParser, T, IOException> elementParser)
throws IOException {
final XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_ARRAY) {
List<T> list = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
list.add(elementParser.apply(parser));
}
return list;
} else {
throw new ElasticsearchParseException("failed to parse rules expression. field [{}] requires an array", field);
}
}
private Object parseFieldValue(XContentParser parser) throws IOException {
switch (parser.currentToken()) {
case VALUE_STRING:
return parser.text();
case VALUE_BOOLEAN:
return parser.booleanValue();
case VALUE_NUMBER:
return parser.longValue();
case VALUE_NULL:
return null;
default:
throw new ElasticsearchParseException("failed to parse rules expression. expected a field value but found [{}] instead", parser
.currentToken());
}
}
}

View File

@ -147,7 +147,7 @@ public class MachineLearningGetResultsIT extends ESRestHighLevelClientTestCase {
@After
public void deleteJob() throws IOException {
new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata();
}
public void testGetCategories() throws IOException {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.client;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.bulk.BulkRequest;
@ -93,7 +92,7 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
@After
public void cleanUp() throws IOException {
new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata();
}
public void testPutJob() throws Exception {
@ -426,7 +425,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
assertTrue(response.isAcknowledged());
}
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33966")
public void testStartDatafeed() throws Exception {
String jobId = "test-start-datafeed";
String indexName = "start_data_1";
@ -437,18 +435,18 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
BulkRequest bulk = new BulkRequest();
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
long now = System.currentTimeMillis();
long oneDayAgo = now - 86400000;
long now = (System.currentTimeMillis()/1000)*1000;
long thePast = now - 60000;
int i = 0;
long dayAgoCopy = oneDayAgo;
while(dayAgoCopy < now) {
long pastCopy = thePast;
while(pastCopy < now) {
IndexRequest doc = new IndexRequest();
doc.index(indexName);
doc.type("doc");
doc.id("id" + i);
doc.source("{\"total\":" +randomInt(1000) + ",\"timestamp\":"+ dayAgoCopy +"}", XContentType.JSON);
doc.source("{\"total\":" +randomInt(1000) + ",\"timestamp\":"+ pastCopy +"}", XContentType.JSON);
bulk.add(doc);
dayAgoCopy += 1000000;
pastCopy += 1000;
i++;
}
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
@ -470,9 +468,9 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
StartDatafeedRequest startDatafeedRequest = new StartDatafeedRequest(datafeedId);
startDatafeedRequest.setStart(String.valueOf(oneDayAgo));
startDatafeedRequest.setStart(String.valueOf(thePast));
// Should only process two documents
startDatafeedRequest.setEnd(String.valueOf(oneDayAgo + 2000000));
startDatafeedRequest.setEnd(String.valueOf(thePast + 2000));
StartDatafeedResponse response = execute(startDatafeedRequest,
machineLearningClient::startDatafeed,
machineLearningClient::startDatafeedAsync);
@ -701,7 +699,8 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase {
.setDetectorDescription(randomAlphaOfLength(10))
.build();
AnalysisConfig.Builder configBuilder = new AnalysisConfig.Builder(Arrays.asList(detector));
configBuilder.setBucketSpan(new TimeValue(randomIntBetween(1, 10), TimeUnit.SECONDS));
//should not be random, see:https://github.com/elastic/ml-cpp/issues/208
configBuilder.setBucketSpan(new TimeValue(5, TimeUnit.SECONDS));
builder.setAnalysisConfig(configBuilder);
DataDescription.Builder dataDescription = new DataDescription.Builder();

View File

@ -1,109 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.rest.ESRestTestCase;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* This is temporarily duplicated from the server side.
* @TODO Replace with an implementation using the HLRC once
* the APIs for managing datafeeds are implemented.
*/
public class MlRestTestStateCleaner {
private final Logger logger;
private final RestClient adminClient;
public MlRestTestStateCleaner(Logger logger, RestClient adminClient) {
this.logger = logger;
this.adminClient = adminClient;
}
public void clearMlMetadata() throws IOException {
deleteAllDatafeeds();
deleteAllJobs();
// indices will be deleted by the ESRestTestCase class
}
@SuppressWarnings("unchecked")
private void deleteAllDatafeeds() throws IOException {
final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds");
datafeedsRequest.addParameter("filter_path", "datafeeds");
final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest);
final List<Map<String, Object>> datafeeds =
(List<Map<String, Object>>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse));
if (datafeeds == null) {
return;
}
try {
adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop"));
} catch (Exception e1) {
logger.warn("failed to stop all datafeeds. Forcing stop", e1);
try {
adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true"));
} catch (Exception e2) {
logger.warn("Force-closing all data feeds failed", e2);
}
throw new RuntimeException(
"Had to resort to force-stopping datafeeds, something went wrong?", e1);
}
for (Map<String, Object> datafeed : datafeeds) {
String datafeedId = (String) datafeed.get("datafeed_id");
adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId));
}
}
private void deleteAllJobs() throws IOException {
final Request jobsRequest = new Request("GET", "/_xpack/ml/anomaly_detectors");
jobsRequest.addParameter("filter_path", "jobs");
final Response response = adminClient.performRequest(jobsRequest);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> jobConfigs =
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response));
if (jobConfigs == null) {
return;
}
try {
adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close"));
} catch (Exception e1) {
logger.warn("failed to close all jobs. Forcing closed", e1);
try {
adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true"));
} catch (Exception e2) {
logger.warn("Force-closing all jobs failed", e2);
}
throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?",
e1);
}
for (Map<String, Object> jobConfig : jobConfigs) {
String jobId = (String) jobConfig.get("job_id");
adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId));
}
}
}

View File

@ -0,0 +1,102 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.job.config.Job;
import java.io.IOException;
/**
* Cleans up and ML resources created during tests
*/
public class MlTestStateCleaner {
private final Logger logger;
private final MachineLearningClient mlClient;
public MlTestStateCleaner(Logger logger, MachineLearningClient mlClient) {
this.logger = logger;
this.mlClient = mlClient;
}
public void clearMlMetadata() throws IOException {
deleteAllDatafeeds();
deleteAllJobs();
}
private void deleteAllDatafeeds() throws IOException {
stopAllDatafeeds();
GetDatafeedResponse getDatafeedResponse = mlClient.getDatafeed(GetDatafeedRequest.getAllDatafeedsRequest(), RequestOptions.DEFAULT);
for (DatafeedConfig datafeed : getDatafeedResponse.datafeeds()) {
mlClient.deleteDatafeed(new DeleteDatafeedRequest(datafeed.getId()), RequestOptions.DEFAULT);
}
}
private void stopAllDatafeeds() {
StopDatafeedRequest stopAllDatafeedsRequest = StopDatafeedRequest.stopAllDatafeedsRequest();
try {
mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT);
} catch (Exception e1) {
logger.warn("failed to stop all datafeeds. Forcing stop", e1);
try {
stopAllDatafeedsRequest.setForce(true);
mlClient.stopDatafeed(stopAllDatafeedsRequest, RequestOptions.DEFAULT);
} catch (Exception e2) {
logger.warn("Force-closing all data feeds failed", e2);
}
throw new RuntimeException("Had to resort to force-stopping datafeeds, something went wrong?", e1);
}
}
private void deleteAllJobs() throws IOException {
closeAllJobs();
GetJobResponse getJobResponse = mlClient.getJob(GetJobRequest.getAllJobsRequest(), RequestOptions.DEFAULT);
for (Job job : getJobResponse.jobs()) {
mlClient.deleteJob(new DeleteJobRequest(job.getId()), RequestOptions.DEFAULT);
}
}
private void closeAllJobs() {
CloseJobRequest closeAllJobsRequest = CloseJobRequest.closeAllJobsRequest();
try {
mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT);
} catch (Exception e1) {
logger.warn("failed to close all jobs. Forcing closed", e1);
closeAllJobsRequest.setForce(true);
try {
mlClient.closeJob(closeAllJobsRequest, RequestOptions.DEFAULT);
} catch (Exception e2) {
logger.warn("Force-closing all jobs failed", e2);
}
throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?", e1);
}
}
}

View File

@ -114,6 +114,7 @@ import static org.hamcrest.Matchers.not;
*/
public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
@SuppressWarnings("unused")
public void testIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -278,6 +279,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testUpdate() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -546,6 +548,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testDelete() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -665,6 +668,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testBulk() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -767,6 +771,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testReindex() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -905,6 +910,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testReindexRethrottle() throws Exception {
RestHighLevelClient client = highLevelClient();
TaskId taskId = new TaskId("oTUltX4IQMOUUVeiohTt8A:124");
@ -947,6 +953,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testUpdateByQuery() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -1066,6 +1073,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testDeleteByQuery() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -1173,6 +1181,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testGet() throws Exception {
RestHighLevelClient client = highLevelClient();
{
@ -1487,6 +1496,7 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testMultiGet() throws Exception {
RestHighLevelClient client = highLevelClient();

View File

@ -192,6 +192,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testClusterGetSettings() throws IOException {
RestHighLevelClient client = highLevelClient();
@ -257,6 +258,7 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testClusterHealth() throws IOException {
RestHighLevelClient client = highLevelClient();
client.indices().create(new CreateIndexRequest("index"), RequestOptions.DEFAULT);

View File

@ -706,6 +706,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testGetFieldMapping() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
@ -891,6 +892,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testRefreshIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -959,6 +961,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testFlushIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1035,6 +1038,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testSyncedFlushIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1308,6 +1312,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testForceMergeIndex() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1381,6 +1386,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testClearCache() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1527,6 +1533,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testExistsAlias() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1590,6 +1597,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testUpdateAliases() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1915,6 +1923,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testGetAlias() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -1985,6 +1994,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testIndexPutSettings() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -2315,6 +2325,7 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testValidateQuery() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();

View File

@ -143,6 +143,7 @@ public class IngestClientDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testGetPipeline() throws IOException {
RestHighLevelClient client = highLevelClient();

View File

@ -30,7 +30,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.MachineLearningGetResultsIT;
import org.elasticsearch.client.MachineLearningIT;
import org.elasticsearch.client.MlRestTestStateCleaner;
import org.elasticsearch.client.MlTestStateCleaner;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.ml.CloseJobRequest;
@ -126,7 +126,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
@After
public void cleanUp() throws IOException {
new MlRestTestStateCleaner(logger, client()).clearMlMetadata();
new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata();
}
public void testCreateJob() throws Exception {

View File

@ -413,6 +413,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testSearchRequestHighlighting() throws IOException {
RestHighLevelClient client = highLevelClient();
{
@ -831,6 +832,8 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
@SuppressWarnings("unused")
public void testMultiSearchTemplateWithInlineScript() throws Exception {
indexSearchTestData();
RestHighLevelClient client = highLevelClient();

View File

@ -577,6 +577,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
@SuppressWarnings("unused")
public void testSnapshotGetSnapshots() throws IOException {
RestHighLevelClient client = highLevelClient();

View File

@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.equalTo;
*/
public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase {
@SuppressWarnings("unused")
public void testGetStoredScript() throws Exception {
RestHighLevelClient client = highLevelClient();
@ -128,6 +129,7 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
}
@SuppressWarnings("unused")
public void testDeleteStoredScript() throws Exception {
RestHighLevelClient client = highLevelClient();

View File

@ -66,6 +66,7 @@ import static org.hamcrest.Matchers.notNullValue;
*/
public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase {
@SuppressWarnings("unused")
public void testListTasks() throws IOException {
RestHighLevelClient client = highLevelClient();
{
@ -149,6 +150,7 @@ public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
@SuppressWarnings("unused")
public void testCancelTasks() throws IOException {
RestHighLevelClient client = highLevelClient();
{

View File

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl;
import org.elasticsearch.client.security.support.expressiondsl.expressions.AllRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.AnyRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.ExceptRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Date;
import static org.hamcrest.Matchers.equalTo;
public class RoleMapperExpressionDslTests extends ESTestCase {
public void testRoleMapperExpressionToXContentType() throws IOException {
final RoleMapperExpression allExpression = AllRoleMapperExpression.builder()
.addExpression(AnyRoleMapperExpression.builder()
.addExpression(FieldRoleMapperExpression.ofDN("*,ou=admin,dc=example,dc=com"))
.addExpression(FieldRoleMapperExpression.ofUsername("es-admin", "es-system"))
.build())
.addExpression(FieldRoleMapperExpression.ofGroups("cn=people,dc=example,dc=com"))
.addExpression(new ExceptRoleMapperExpression(FieldRoleMapperExpression.ofMetadata("metadata.terminated_date", new Date(
1537145401027L))))
.build();
final XContentBuilder builder = XContentFactory.jsonBuilder();
allExpression.toXContent(builder, ToXContent.EMPTY_PARAMS);
final String output = Strings.toString(builder);
final String expected =
"{"+
"\"all\":["+
"{"+
"\"any\":["+
"{"+
"\"field\":{"+
"\"dn\":[\"*,ou=admin,dc=example,dc=com\"]"+
"}"+
"},"+
"{"+
"\"field\":{"+
"\"username\":["+
"\"es-admin\","+
"\"es-system\""+
"]"+
"}"+
"}"+
"]"+
"},"+
"{"+
"\"field\":{"+
"\"groups\":[\"cn=people,dc=example,dc=com\"]"+
"}"+
"},"+
"{"+
"\"except\":{"+
"\"field\":{"+
"\"metadata.terminated_date\":[\"2018-09-17T00:50:01.027Z\"]"+
"}"+
"}"+
"}"+
"]"+
"}";
assertThat(expected, equalTo(output));
}
public void testFieldRoleMapperExpressionThrowsExceptionForMissingMetadataPrefix() {
final IllegalArgumentException ile = expectThrows(IllegalArgumentException.class, () -> FieldRoleMapperExpression.ofMetadata(
"terminated_date", new Date(1537145401027L)));
assertThat(ile.getMessage(), equalTo("metadata key must have prefix 'metadata.'"));
}
}

View File

@ -0,0 +1,129 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.security.support.expressiondsl.parser;
import org.elasticsearch.client.security.support.expressiondsl.RoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.expressions.CompositeRoleMapperExpression;
import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.iterableWithSize;
public class RoleMapperExpressionParserTests extends ESTestCase {
public void testParseSimpleFieldExpression() throws Exception {
String json = "{ \"field\": { \"username\" : [\"*@shield.gov\"] } }";
FieldRoleMapperExpression field = checkExpressionType(parse(json), FieldRoleMapperExpression.class);
assertThat(field.getField(), equalTo("username"));
assertThat(field.getValues(), iterableWithSize(1));
assertThat(field.getValues().get(0), equalTo("*@shield.gov"));
assertThat(toJson(field), equalTo(json.replaceAll("\\s", "")));
}
public void testParseComplexExpression() throws Exception {
String json = "{ \"any\": [" +
" { \"field\": { \"username\" : \"*@shield.gov\" } }, " +
" { \"all\": [" +
" { \"field\": { \"username\" : \"/.*\\\\@avengers\\\\.(net|org)/\" } }, " +
" { \"field\": { \"groups\" : [ \"admin\", \"operators\" ] } }, " +
" { \"except\":" +
" { \"field\": { \"groups\" : \"disavowed\" } }" +
" }" +
" ] }" +
"] }";
final RoleMapperExpression expr = parse(json);
assertThat(expr, instanceOf(CompositeRoleMapperExpression.class));
CompositeRoleMapperExpression any = (CompositeRoleMapperExpression) expr;
assertThat(any.getElements(), iterableWithSize(2));
final FieldRoleMapperExpression fieldShield = checkExpressionType(any.getElements().get(0),
FieldRoleMapperExpression.class);
assertThat(fieldShield.getField(), equalTo("username"));
assertThat(fieldShield.getValues(), iterableWithSize(1));
assertThat(fieldShield.getValues().get(0), equalTo("*@shield.gov"));
final CompositeRoleMapperExpression all = checkExpressionType(any.getElements().get(1),
CompositeRoleMapperExpression.class);
assertThat(all.getElements(), iterableWithSize(3));
final FieldRoleMapperExpression fieldAvengers = checkExpressionType(all.getElements().get(0),
FieldRoleMapperExpression.class);
assertThat(fieldAvengers.getField(), equalTo("username"));
assertThat(fieldAvengers.getValues(), iterableWithSize(1));
assertThat(fieldAvengers.getValues().get(0), equalTo("/.*\\@avengers\\.(net|org)/"));
final FieldRoleMapperExpression fieldGroupsAdmin = checkExpressionType(all.getElements().get(1),
FieldRoleMapperExpression.class);
assertThat(fieldGroupsAdmin.getField(), equalTo("groups"));
assertThat(fieldGroupsAdmin.getValues(), iterableWithSize(2));
assertThat(fieldGroupsAdmin.getValues().get(0), equalTo("admin"));
assertThat(fieldGroupsAdmin.getValues().get(1), equalTo("operators"));
final CompositeRoleMapperExpression except = checkExpressionType(all.getElements().get(2),
CompositeRoleMapperExpression.class);
final FieldRoleMapperExpression fieldDisavowed = checkExpressionType(except.getElements().get(0),
FieldRoleMapperExpression.class);
assertThat(fieldDisavowed.getField(), equalTo("groups"));
assertThat(fieldDisavowed.getValues(), iterableWithSize(1));
assertThat(fieldDisavowed.getValues().get(0), equalTo("disavowed"));
}
private String toJson(final RoleMapperExpression expr) throws IOException {
final XContentBuilder builder = XContentFactory.jsonBuilder();
expr.toXContent(builder, ToXContent.EMPTY_PARAMS);
final String output = Strings.toString(builder);
return output;
}
private <T> T checkExpressionType(RoleMapperExpression expr, Class<T> type) {
assertThat(expr, instanceOf(type));
return type.cast(expr);
}
private RoleMapperExpression parse(String json) throws IOException {
return new RoleMapperExpressionParser().parse("rules", XContentType.JSON.xContent().createParser(new NamedXContentRegistry(
Collections.emptyList()), new DeprecationHandler() {
@Override
public void usedDeprecatedName(String usedName, String modernName) {
}
@Override
public void usedDeprecatedField(String usedName, String replacedWith) {
}
}, json));
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.test.rest;
import org.apache.http.util.EntityUtils;
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.Response;
@ -31,57 +30,53 @@ import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
/**
* Tests that wait for refresh is fired if the index is closed.
*/
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533")
public class WaitForRefreshAndCloseIT extends ESRestTestCase {
@Before
public void setupIndex() throws IOException {
try {
client().performRequest(new Request("DELETE", indexName()));
} catch (ResponseException e) {
// If we get an error, it should be because the index doesn't exist
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
}
Request request = new Request("PUT", indexName());
Request request = new Request("PUT", "/test");
request.setJsonEntity("{\"settings\":{\"refresh_interval\":-1}}");
client().performRequest(request);
}
@After
public void cleanupIndex() throws IOException {
client().performRequest(new Request("DELETE", indexName()));
}
private String indexName() {
return getTestName().toLowerCase(Locale.ROOT);
client().performRequest(new Request("DELETE", "/test"));
}
private String docPath() {
return indexName() + "/test/1";
return "test/_doc/1";
}
public void testIndexAndThenClose() throws Exception {
closeWhileListenerEngaged(start("PUT", "", "{\"test\":\"test\"}"));
Request request = new Request("PUT", docPath());
request.setJsonEntity("{\"test\":\"test\"}");
closeWhileListenerEngaged(start(request));
}
public void testUpdateAndThenClose() throws Exception {
Request request = new Request("PUT", docPath());
request.setJsonEntity("{\"test\":\"test\"}");
client().performRequest(request);
closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}"));
Request createDoc = new Request("PUT", docPath());
createDoc.setJsonEntity("{\"test\":\"test\"}");
client().performRequest(createDoc);
Request updateDoc = new Request("POST", docPath() + "/_update");
updateDoc.setJsonEntity("{\"doc\":{\"name\":\"test\"}}");
closeWhileListenerEngaged(start(updateDoc));
}
public void testDeleteAndThenClose() throws Exception {
Request request = new Request("PUT", docPath());
request.setJsonEntity("{\"test\":\"test\"}");
client().performRequest(request);
closeWhileListenerEngaged(start("DELETE", "", null));
closeWhileListenerEngaged(start(new Request("DELETE", docPath())));
}
private void closeWhileListenerEngaged(ActionFuture<String> future) throws Exception {
@ -89,40 +84,52 @@ public class WaitForRefreshAndCloseIT extends ESRestTestCase {
assertBusy(() -> {
Map<String, Object> stats;
try {
stats = entityAsMap(client().performRequest(new Request("GET", indexName() + "/_stats/refresh")));
stats = entityAsMap(client().performRequest(new Request("GET", "/test/_stats/refresh")));
} catch (IOException e) {
throw new RuntimeException(e);
}
@SuppressWarnings("unchecked")
Map<String, Object> indices = (Map<String, Object>) stats.get("indices");
@SuppressWarnings("unchecked")
Map<String, Object> theIndex = (Map<String, Object>) indices.get(indexName());
@SuppressWarnings("unchecked")
Map<String, Object> total = (Map<String, Object>) theIndex.get("total");
@SuppressWarnings("unchecked")
Map<String, Object> refresh = (Map<String, Object>) total.get("refresh");
int listeners = (int) refresh.get("listeners");
Map<?, ?> indices = (Map<?, ?>) stats.get("indices");
Map<?, ?> theIndex = (Map<?, ?>) indices.get("test");
Map<?, ?> total = (Map<?, ?>) theIndex.get("total");
Map<?, ?> refresh = (Map<?, ?>) total.get("refresh");
int listeners = (Integer) refresh.get("listeners");
assertEquals(1, listeners);
});
// Close the index. That should flush the listener.
client().performRequest(new Request("POST", indexName() + "/_close"));
client().performRequest(new Request("POST", "/test/_close"));
// The request shouldn't fail. It certainly shouldn't hang.
future.get();
/*
* The request may fail, but we really, really, really want to make
* sure that it doesn't time out.
*/
try {
future.get(1, TimeUnit.MINUTES);
} catch (ExecutionException ee) {
/*
* If it *does* fail it should fail with a FORBIDDEN error because
* it attempts to take an action on a closed index. Again, it'd be
* nice if all requests waiting for refresh came back even though
* the index is closed and most do, but sometimes they bump into
* the index being closed. At least they don't hang forever. That'd
* be a nightmare.
*/
assertThat(ee.getCause(), instanceOf(ResponseException.class));
ResponseException re = (ResponseException) ee.getCause();
assertEquals(403, re.getResponse().getStatusLine().getStatusCode());
assertThat(EntityUtils.toString(re.getResponse().getEntity()), containsString("FORBIDDEN/4/index closed"));
}
}
private ActionFuture<String> start(String method, String path, String body) {
private ActionFuture<String> start(Request request) {
PlainActionFuture<String> future = new PlainActionFuture<>();
Request request = new Request(method, docPath() + path);
request.addParameter("refresh", "wait_for");
request.addParameter("error_trace", "");
request.setJsonEntity(body);
client().performRequestAsync(request, new ResponseListener() {
@Override
public void onSuccess(Response response) {
try {
future.onResponse(EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8));
future.onResponse(EntityUtils.toString(response.getEntity()));
} catch (IOException e) {
future.onFailure(e);
}

View File

@ -100,6 +100,7 @@ buildRestTests.docs = fileTree(projectDir) {
exclude 'reference/rollup/apis/delete-job.asciidoc'
exclude 'reference/rollup/apis/get-job.asciidoc'
exclude 'reference/rollup/apis/rollup-caps.asciidoc'
exclude 'reference/graph/explore.asciidoc'
}
listSnippets.docs = buildRestTests.docs

View File

@ -1,6 +1,8 @@
[[java-query-dsl-type-query]]
==== Type Query
deprecated[7.0.0, Types are being removed, prefer filtering on a field instead. For more information, please see {ref}/removal-of-types.html[Removal of mapping types].]
See {ref}/query-dsl-type-query.html[Type Query]
["source","java",subs="attributes,callouts,macros"]

View File

@ -59,7 +59,7 @@ PUT my_index
"analysis": {
"analyzer": {
"my_custom_analyzer": {
"type": "custom",
"type": "custom", <1>
"tokenizer": "standard",
"char_filter": [
"html_strip"
@ -82,6 +82,11 @@ POST my_index/_analyze
--------------------------------
// CONSOLE
<1> Setting `type` to `custom` tells Elasticsearch that we are defining a custom analyzer.
Compare this to how <<configuring-analyzers,built-in analyzers can be configured>>:
`type` will be set to the name of the built-in analyzer, like
<<analysis-standard-analyzer,`standard`>> or <<analysis-simple-analyzer,`simple`>>.
/////////////////////
[source,js]

View File

@ -1,5 +1,5 @@
[[api-conventions]]
= API Conventions
= API conventions
[partintro]
--

View File

@ -108,9 +108,11 @@ The order of precedence for cluster settings is:
2. persistent cluster settings
3. settings in the `elasticsearch.yml` configuration file.
It's best to use the `elasticsearch.yml` file only
for local configurations, and set all cluster-wide settings with the
`settings` API.
It's best to set all cluster-wide settings with the `settings` API and use the
`elasticsearch.yml` file only for local configurations. This way you can be sure that
the setting is the same on all nodes. If, on the other hand, you define different
settings on different nodes by accident using the configuration file, it is very
difficult to notice these discrepancies.
You can find the list of settings that you can dynamically update in <<modules,Modules>>.

View File

@ -1,5 +1,5 @@
[[getting-started]]
= Getting Started
= Getting started
[partintro]
--
@ -158,19 +158,6 @@ And now we are ready to start our node and single cluster:
./elasticsearch
--------------------------------------------------
[float]
=== Installation with Homebrew
On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]:
["source","sh"]
--------------------------------------------------
brew install elasticsearch
--------------------------------------------------
If installation succeeds, Homebrew will finish by saying that you can start Elasticsearch by entering
`elasticsearch`. Do that now. The expected response is described below, under <<successfully-running-node>>.
[float]
=== Installation example with MSI Windows Installer

View File

@ -1,4 +1,5 @@
[role="xpack"]
[testenv="platinum"]
[[graph-explore-api]]
== Explore API

View File

@ -1,6 +1,6 @@
[[index-modules]]
= Index Modules
= Index modules
[partintro]
--

View File

@ -29,8 +29,6 @@ include::setup/bootstrap-checks-xes.asciidoc[]
:edit_url:
include::upgrade.asciidoc[]
include::migration/index.asciidoc[]
include::api-conventions.asciidoc[]
include::docs.asciidoc[]
@ -78,6 +76,8 @@ include::glossary.asciidoc[]
include::release-notes/highlights.asciidoc[]
include::migration/index.asciidoc[]
include::release-notes.asciidoc[]
include::redirects.asciidoc[]

View File

@ -1,5 +1,5 @@
[[ingest]]
= Ingest Node
= Ingest node
[partintro]
--

View File

@ -31,10 +31,6 @@ Many APIs which support date values also support <<date-math,date math>>
expressions, such as `now-1m/d` -- the current time, minus one month, rounded
down to the nearest day.
TIP: The `format` setting can be updated on existing fields using the
<<indices-put-mapping,PUT mapping API>>.
[[custom-date-formats]]
==== Custom date formats

View File

@ -1,30 +1,14 @@
[[breaking-changes-7.0]]
== Breaking changes in 7.0
++++
<titleabbrev>7.0</titleabbrev>
++++
This section discusses the changes that you need to be aware of when migrating
your application to Elasticsearch 7.0.
See also <<release-highlights>> and <<es-release-notes>>.
[float]
=== Indices created before 7.0
Elasticsearch 7.0 can read indices created in version 6.0 or above. An
Elasticsearch 7.0 node will not start in the presence of indices created in a
version of Elasticsearch before 6.0.
[IMPORTANT]
.Reindex indices from Elasticsearch 5.x or before
=========================================
Indices created in Elasticsearch 5.x or before will need to be reindexed with
Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
=========================================
[float]
=== Also see:
* <<breaking_70_aggregations_changes>>
* <<breaking_70_cluster_changes>>
* <<breaking_70_indices_changes>>
@ -41,6 +25,22 @@ Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
* <<breaking_70_restclient_changes>>
* <<breaking_70_low_level_restclient_changes>>
[float]
=== Indices created before 7.0
Elasticsearch 7.0 can read indices created in version 6.0 or above. An
Elasticsearch 7.0 node will not start in the presence of indices created in a
version of Elasticsearch before 6.0.
[IMPORTANT]
.Reindex indices from Elasticsearch 5.x or before
=========================================
Indices created in Elasticsearch 5.x or before will need to be reindexed with
Elasticsearch 6.x in order to be readable by Elasticsearch 7.x.
=========================================
include::migrate_7_0/aggregations.asciidoc[]
include::migrate_7_0/analysis.asciidoc[]
include::migrate_7_0/cluster.asciidoc[]

View File

@ -1,21 +1,26 @@
[float]
[[breaking_70_aggregations_changes]]
=== Aggregations changes
[float]
==== Deprecated `global_ordinals_hash` and `global_ordinals_low_cardinality` execution hints for terms aggregations have been removed
These `execution_hint` are removed and should be replaced by `global_ordinals`.
[float]
==== `search.max_buckets` in the cluster setting
The dynamic cluster setting named `search.max_buckets` now defaults
to 10,000 (instead of unlimited in the previous version).
Requests that try to return more than the limit will fail with an exception.
[float]
==== `missing` option of the `composite` aggregation has been removed
The `missing` option of the `composite` aggregation, deprecated in 6.x,
has been removed. `missing_bucket` should be used instead.
[float]
==== Replaced `params._agg` with `state` context variable in scripted metric aggregations
The object used to share aggregation state between the scripts in a Scripted Metric

View File

@ -1,12 +1,15 @@
[float]
[[breaking_70_analysis_changes]]
=== Analysis changes
[float]
==== Limiting the number of tokens produced by _analyze
To safeguard against out of memory errors, the number of tokens that can be produced
using the `_analyze` endpoint has been limited to 10000. This default limit can be changed
for a particular index with the index setting `index.analyze.max_token_count`.
[float]
==== Limiting the length of an analyzed text during highlighting
Highlighting a text that was indexed without offsets or term vectors,
@ -16,6 +19,7 @@ To protect against this, the maximum number of characters that will be analyzed
limited to 1000000. This default limit can be changed
for a particular index with the index setting `index.highlight.max_analyzed_offset`.
[float]
==== `delimited_payload_filter` renaming
The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` in 6.2.
@ -23,6 +27,7 @@ Using it in indices created before 7.0 will issue deprecation warnings. Using th
name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload`
instead.
[float]
==== `standard` filter has been removed
The `standard` token filter has been removed because it doesn't change anything in the stream.

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_api_changes]]
=== API changes
[float]
==== Camel case and underscore parameters deprecated in 6.x have been removed
A number of duplicate parameters deprecated in 6.x have been removed from
Bulk request, Multi Get request, Term Vectors request, and More Like This Query
@ -22,6 +24,7 @@ The following parameters starting with underscore have been removed:
Instead of these removed parameters, use their non camel case equivalents without
starting underscore, e.g. use `version_type` instead of `_version_type` or `versionType`.
[float]
==== Thread pool info
In previous versions of Elasticsearch, the thread pool info returned in the
@ -48,10 +51,12 @@ aligns the output of the API with the configuration values for thread
pools. Note that `core` and `max` will be populated for scaling thread pools,
and `size` will be populated for fixed thread pools.
[float]
==== The parameter `fields` deprecated in 6.x has been removed from Bulk request
and Update request. The Update API returns `400 - Bad request` if request contains
unknown parameters (instead of ignored in the previous version).
[float]
[[remove-suggest-metric]]
==== Remove support for `suggest` metric/index metric in indices stats and nodes stats APIs
@ -66,6 +71,7 @@ In the past, `fields` could be provided either as a parameter, or as part of the
body. Specifying `fields` in the request body as opposed to a parameter was deprecated
in 6.4.0, and is now unsupported in 7.0.0.
[float]
==== `copy_settings` is deprecated on shrink and split APIs
Versions of Elasticsearch prior to 6.4.0 did not copy index settings on shrink
@ -76,10 +82,12 @@ will be for such settings to be copied on such operations. To enable users in
the only behavior in 8.0.0, this parameter is deprecated in 7.0.0 for removal in
8.0.0.
[float]
==== The deprecated stored script contexts have now been removed
When putting stored scripts, support for storing them with the deprecated `template` context or without a context is
now removed. Scripts must be stored using the `script` context as mentioned in the documentation.
[float]
==== Get Aliases API limitations when {security} is enabled removed
The behavior and response codes of the get aliases API no longer vary
@ -88,6 +96,7 @@ depending on whether {security} is enabled. Previously a
current user was not authorized for any alias. An empty response with
status 200 - OK is now returned instead at all times.
[float]
==== Put User API response no longer has `user` object
The Put User API response was changed in 6.5.0 to add the `created` field

View File

@ -1,16 +1,20 @@
[float]
[[breaking_70_cluster_changes]]
=== Cluster changes
[float]
==== `:` is no longer allowed in cluster name
Due to cross-cluster search using `:` to separate a cluster and index name,
cluster names may no longer contain `:`.
[float]
==== New default for `wait_for_active_shards` parameter of the open index command
The default value for the `wait_for_active_shards` parameter of the open index API
is changed from 0 to 1, which means that the command will now by default wait for all
primary shards of the opened index to be allocated.
[float]
==== Shard preferences `_primary`, `_primary_first`, `_replica`, and `_replica_first` are removed
These shard preferences are removed in favour of the `_prefer_nodes` and `_only_nodes` preferences.

View File

@ -1,17 +1,20 @@
[float]
[[breaking_70_indices_changes]]
=== Indices changes
[float]
==== `:` is no longer allowed in index name
Due to cross-cluster search using `:` to separate a cluster and index name,
index names may no longer contain `:`.
[float]
==== `index.unassigned.node_left.delayed_timeout` may no longer be negative
Negative values were interpreted as zero in earlier versions but are no
longer accepted.
[float]
==== `_flush` and `_force_merge` will no longer refresh
In previous versions issuing a `_flush` or `_force_merge` (with `flush=true`)
@ -20,7 +23,7 @@ visible to searches and non-realtime GET operations. From now on these operation
don't have this side-effect anymore. To make documents visible an explicit `_refresh`
call is needed unless the index is refreshed by the internal scheduler.
[float]
==== Limit to the difference between max_size and min_size in NGramTokenFilter and NGramTokenizer
To safeguard against creating too many index terms, the difference between `max_ngram` and
@ -29,7 +32,7 @@ limit can be changed with the index setting `index.max_ngram_diff`. Note that if
exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation
warning is logged.
[float]
==== Limit to the difference between max_size and min_size in ShingleTokenFilter
To safeguard against creating too many tokens, the difference between `max_shingle_size` and
@ -38,6 +41,7 @@ limit can be changed with the index setting `index.max_shingle_diff`. Note that
exceeded a error is thrown only for new indices. For existing pre-7.0 indices, a deprecation
warning is logged.
[float]
==== Document distribution changes
Indices created with version `7.0.0` onwards will have an automatic `index.number_of_routing_shards`
@ -46,6 +50,7 @@ shards the index has. In order to maintain the exact same distribution as a pre
`index.number_of_routing_shards` must be set to the `index.number_of_shards` at index creation time.
Note: if the number of routing shards equals the number of shards `_split` operations are not supported.
[float]
==== Skipped background refresh on search idle shards
Shards belonging to an index that does not have an explicit
@ -56,6 +61,7 @@ that access a search idle shard will be "parked" until the next refresh
happens. Indexing requests with `wait_for_refresh` will also trigger
a background refresh.
[float]
==== Remove deprecated url parameters for Clear Indices Cache API
The following previously deprecated url parameter have been removed:
@ -65,12 +71,14 @@ The following previously deprecated url parameter have been removed:
* `request_cache` - use `request` instead
* `field_data` - use `fielddata` instead
[float]
==== `network.breaker.inflight_requests.overhead` increased to 2
Previously the in flight requests circuit breaker considered only the raw byte representation.
By bumping the value of `network.breaker.inflight_requests.overhead` from 1 to 2, this circuit
breaker considers now also the memory overhead of representing the request as a structured object.
[float]
==== Parent circuit breaker changes
The parent circuit breaker defines a new setting `indices.breaker.total.use_real_memory` which is
@ -79,6 +87,7 @@ heap memory instead of only considering the reserved memory by child circuit bre
setting is `true`, the default parent breaker limit also changes from 70% to 95% of the JVM heap size.
The previous behavior can be restored by setting `indices.breaker.total.use_real_memory` to `false`.
[float]
==== `fix` value for `index.shard.check_on_startup` is removed
Deprecated option value `fix` for setting `index.shard.check_on_startup` is not supported.

View File

@ -1,23 +1,28 @@
[float]
[[breaking_70_java_changes]]
=== Java API changes
[float]
==== `isShardsAcked` deprecated in `6.2` has been removed
`isShardsAcked` has been replaced by `isShardsAcknowledged` in
`CreateIndexResponse`, `RolloverResponse` and
`CreateIndexClusterStateUpdateResponse`.
[float]
==== `prepareExecute` removed from the client api
The `prepareExecute` method which created a request builder has been
removed from the client api. Instead, construct a builder for the
appropriate request directly.
[float]
==== Some Aggregation classes have moved packages
* All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages
were moved to a single `org.elasticsearch.search.aggregations.metrics` package.
[float]
==== `Retry.withBackoff` methods with `Settings` removed
The variants of `Retry.withBackoff` that included `Settings` have been removed

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_low_level_restclient_changes]]
=== Low-level REST client changes
[float]
==== Deprecated flavors of performRequest have been removed
We deprecated the flavors of `performRequest` and `performRequestAsync` that
@ -8,6 +10,7 @@ do not take `Request` objects in 6.4.0 in favor of the flavors that take
`Request` objects because those methods can be extended without breaking
backwards compatibility.
[float]
==== Removed setHosts
We deprecated `setHosts` in 6.4.0 in favor of `setNodes` because it supports

View File

@ -1,36 +1,44 @@
[float]
[[breaking_70_mappings_changes]]
=== Mapping changes
[float]
==== The `_all` meta field is removed
The `_all` field deprecated in 6 have now been removed.
[float]
==== The `_uid` meta field is removed
This field used to index a composite key formed of the `_type` and the `_id`.
Now that indices cannot have multiple types, this has been removed in favour
of `_id`.
[float]
==== The `_default_` mapping is no longer allowed
The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed
in 7.0. Trying to configure a `_default_` mapping on 7.x indices will result in
an error.
[float]
==== `index_options` for numeric fields has been removed
The `index_options` field for numeric fields has been deprecated in 6 and has now been removed.
[float]
==== Limiting the number of `nested` json objects
To safeguard against out of memory errors, the number of nested json objects within a single
document across all fields has been limited to 10000. This default limit can be changed with
the index setting `index.mapping.nested_objects.limit`.
[float]
==== The `update_all_types` option has been removed
This option is useless now that all indices have at most one type.
[float]
==== The `classic` similarity has been removed
The `classic` similarity relied on coordination factors for scoring to be good
@ -39,6 +47,7 @@ Lucene, which means that the `classic` similarity now produces scores of lower
quality. It is advised to switch to `BM25` instead, which is widely accepted
as a better alternative.
[float]
==== Similarities fail when unsupported options are provided
An error will now be thrown when unknown configuration options are provided

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_packaging_changes]]
=== Packaging changes
[float]
[[systemd-service-file-config]]
==== systemd service file is no longer configuration
@ -9,6 +11,7 @@ was previously marked as a configuration file in rpm and deb packages.
Overrides to the systemd elasticsearch service should be made
in `/etc/systemd/system/elasticsearch.service.d/override.conf`.
[float]
==== tar package no longer includes windows specific files
The tar package previously included files in the `bin` directory meant only

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_plugins_changes]]
=== Plugins changes
[float]
==== Azure Repository plugin
* The legacy azure settings which where starting with `cloud.azure.storage.` prefix have been removed.
@ -12,6 +14,7 @@ You must set it per azure client instead. Like `azure.client.default.timeout: 10
See {plugins}/repository-azure-repository-settings.html#repository-azure-repository-settings[Azure Repository settings].
[float]
==== Google Cloud Storage Repository plugin
* The repository settings `application_name`, `connect_timeout` and `read_timeout` have been removed and
@ -19,11 +22,13 @@ must now be specified in the client settings instead.
See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Storage Client Settings].
[float]
==== Analysis Plugin changes
* The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider<T> provider)` has been
renamed to `requiresAnalysisSettings`
[float]
==== File-based discovery plugin
* This plugin has been removed since its functionality is now part of

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_restclient_changes]]
=== High-level REST client changes
[float]
==== API methods accepting `Header` argument have been removed
All API methods accepting headers as a `Header` varargs argument, deprecated
@ -12,6 +14,7 @@ In case you are specifying headers
e.g. `client.index(indexRequest, new Header("name" "value"))` becomes
`client.index(indexRequest, RequestOptions.DEFAULT.toBuilder().addHeader("name", "value").build());`
[float]
==== Cluster Health API default to `cluster` level
The Cluster Health API used to default to `shards` level to ease migration

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_scripting_changes]]
=== Scripting changes
[float]
==== getDate() and getDates() removed
Fields of type `long` and `date` had `getDate()` and `getDates()` methods
@ -12,6 +14,7 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly
parse `long` fields into a date object using
`Instance.ofEpochMillis(doc["myfield"].value)`.
[float]
==== Script errors will return as `400` error codes
Malformed scripts, either in search templates, ingest pipelines or search

View File

@ -1,6 +1,8 @@
[float]
[[breaking_70_search_changes]]
=== Search and Query DSL changes
[float]
==== Changes to queries
* The default value for `transpositions` parameter of `fuzzy` query
has been changed to `true`.
@ -18,6 +20,7 @@
* Attempts to generate multi-term phrase queries against non-text fields
with a custom analyzer will now throw an exception
[float]
==== Adaptive replica selection enabled by default
Adaptive replica selection has been enabled by default. If you wish to return to
@ -35,6 +38,7 @@ PUT /_cluster/settings
--------------------------------------------------
// CONSOLE
[float]
==== Search API returns `400` for invalid requests
The Search API returns `400 - Bad request` while it would previously return
@ -48,12 +52,14 @@ The Search API returns `400 - Bad request` while it would previously return
* number of filters in the adjacency matrix aggregation is too large
* script compilation errors
[float]
==== Scroll queries cannot use the `request_cache` anymore
Setting `request_cache:true` on a query that creates a scroll (`scroll=1m`)
has been deprecated in 6 and will now return a `400 - Bad request`.
Scroll queries are not meant to be cached.
[float]
==== Scroll queries cannot use `rescore` anymore
Including a rescore clause on a query that creates a scroll (`scroll=1m`) has
been deprecated in 6.5 and will now return a `400 - Bad request`. Allowing
@ -61,6 +67,7 @@ rescore on scroll queries would break the scroll sort. In the 6.x line, the
rescore clause was silently ignored (for scroll queries), and it was allowed in
the 5.x line.
[float]
==== Term Suggesters supported distance algorithms
The following string distance algorithms were given additional names in 6.2 and
@ -70,7 +77,7 @@ removed.
* `levenstein` - replaced by `levenshtein`
* `jarowinkler` - replaced by `jaro_winkler`
[float]
==== Limiting the number of terms that can be used in a Terms Query request
Executing a Terms Query with a lot of terms may degrade the cluster performance,
@ -79,7 +86,7 @@ To safeguard against this, the maximum number of terms that can be used in a
Terms Query request has been limited to 65536. This default maximum can be changed
for a particular index with the index setting `index.max_terms_count`.
[float]
==== Limiting the length of regex that can be used in a Regexp Query request
Executing a Regexp Query with a long regex string may degrade search performance.
@ -87,11 +94,13 @@ To safeguard against this, the maximum length of regex that can be used in a
Regexp Query request has been limited to 1000. This default maximum can be changed
for a particular index with the index setting `index.max_regex_length`.
[float]
==== Invalid `_search` request body
Search requests with extra content after the main object will no longer be accepted
by the `_search` endpoint. A parsing exception will be thrown instead.
[float]
==== Context Completion Suggester
The ability to query and index context enabled suggestions without context,
@ -102,12 +111,14 @@ considerably.
For geo context the value of the `path` parameter is now validated against the mapping,
and the context is only accepted if `path` points to a field with `geo_point` type.
[float]
==== Semantics changed for `max_concurrent_shard_requests`
`max_concurrent_shard_requests` used to limit the total number of concurrent shard
requests a single high level search request can execute. In 7.0 this changed to be the
max number of concurrent shard requests per node. The default is now `5`.
[float]
==== `max_score` set to `null` when scores are not tracked
`max_score` used to be set to `0` whenever scores are not tracked. `null` is now used

View File

@ -1,18 +1,21 @@
[float]
[[breaking_70_settings_changes]]
=== Settings changes
[float]
==== The default for `node.name` is now the hostname
`node.name` now defaults to the hostname at the time when Elasticsearch
is started. Previously the default node name was the first eight characters
of the node id. It can still be configured explicitly in `elasticsearch.yml`.
[float]
==== Percolator
* The deprecated `index.percolator.map_unmapped_fields_as_string` setting has been removed in favour of
the `index.percolator.map_unmapped_fields_as_text` setting.
[float]
==== Index thread pool
* Internally, single-document index/delete/update requests are executed as bulk
@ -21,6 +24,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`.
longer needed and has been removed. As such, the settings
`thread_pool.index.size` and `thread_pool.index.queue_size` have been removed.
[float]
[[write-thread-pool-fallback]]
==== Write thread pool fallback
@ -32,6 +36,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`.
available to keep the display output in APIs as `bulk` instead of `write`.
These fallback settings and this system property have been removed.
[float]
[[remove-http-enabled]]
==== Http enabled setting removed
@ -39,6 +44,7 @@ of the node id. It can still be configured explicitly in `elasticsearch.yml`.
use of the transport client. This setting has been removed, as the transport client
will be removed in the future, thus requiring HTTP to always be enabled.
[float]
[[remove-http-pipelining-setting]]
==== Http pipelining setting removed
@ -47,6 +53,7 @@ This setting has been removed, as disabling http pipelining support on the serve
provided little value. The setting `http.pipelining.max_events` can still be used to
limit the number of pipelined requests in-flight.
[float]
==== Cross-cluster search settings renamed
The cross-cluster search remote cluster connection infrastructure is also used

View File

@ -1,3 +1,4 @@
[float]
[[breaking_70_snapshotstats_changes]]
=== Snapshot stats changes
@ -7,6 +8,7 @@ Snapshot stats details are provided in a new structured way:
* `incremental` section for those files that actually needed to be copied over as part of the incremental snapshotting.
* In case of a snapshot that's still in progress, there's also a `processed` section for files that are in the process of being copied.
[float]
==== Deprecated `number_of_files`, `processed_files`, `total_size_in_bytes` and `processed_size_in_bytes` snapshot stats properties have been removed
* Properties `number_of_files` and `total_size_in_bytes` are removed and should be replaced by values of nested object `total`.

View File

@ -159,7 +159,7 @@ format corresponds to the primary timestamp, but you do not want to specify the
full `grok_pattern`.
If this parameter is not specified, the structure finder chooses the best format from
the formats it knows, which are:
the formats it knows, which are these Joda formats and their Java time equivalents:
* `dd/MMM/YYYY:HH:mm:ss Z`
* `EEE MMM dd HH:mm zzz YYYY`
@ -178,7 +178,7 @@ the formats it knows, which are:
* `MMM dd HH:mm:ss`
* `MMM dd HH:mm:ss,SSS`
* `MMM dd YYYY HH:mm:ss`
* `MMM dd, YYYY K:mm:ss a`
* `MMM dd, YYYY h:mm:ss a`
* `TAI64N`
* `UNIX`
* `UNIX_MS`
@ -487,3 +487,791 @@ If the request does not encounter errors, you receive the following result:
may provide clues that the data needs to be cleaned or transformed prior
to use by other {ml} functionality.
The next example shows how it's possible to find the structure of some New York
City yellow cab trip data. The first `curl` command downloads the data, the
first 20000 lines of which are then piped into the `find_file_structure`
endpoint. The `lines_to_sample` query parameter of the endpoint is set to 20000
to match what is specified in the `head` command.
[source,js]
----
curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -20000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=20000" -T -
----
// NOTCONSOLE
// Not converting to console because this shows how curl can be used
--
NOTE: The `Content-Type: application/json` header must be set even though in
this case the data is not JSON. (Alternatively the `Content-Type` can be set
to any other supported by Elasticsearch, but it must be set.)
--
If the request does not encounter errors, you receive the following result:
[source,js]
----
{
"num_lines_analyzed" : 20000,
"num_messages_analyzed" : 19998, <1>
"sample_start" : "VendorID,tpep_pickup_datetime,tpep_dropoff_datetime,passenger_count,trip_distance,RatecodeID,store_and_fwd_flag,PULocationID,DOLocationID,payment_type,fare_amount,extra,mta_tax,tip_amount,tolls_amount,improvement_surcharge,total_amount\n\n1,2018-06-01 00:15:40,2018-06-01 00:16:46,1,.00,1,N,145,145,2,3,0.5,0.5,0,0,0.3,4.3\n",
"charset" : "UTF-8",
"has_byte_order_marker" : false,
"format" : "delimited", <2>
"multiline_start_pattern" : "^.*?,\"?\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}",
"exclude_lines_pattern" : "^\"?VendorID\"?,\"?tpep_pickup_datetime\"?,\"?tpep_dropoff_datetime\"?,\"?passenger_count\"?,\"?trip_distance\"?,\"?RatecodeID\"?,\"?store_and_fwd_flag\"?,\"?PULocationID\"?,\"?DOLocationID\"?,\"?payment_type\"?,\"?fare_amount\"?,\"?extra\"?,\"?mta_tax\"?,\"?tip_amount\"?,\"?tolls_amount\"?,\"?improvement_surcharge\"?,\"?total_amount\"?",
"column_names" : [ <3>
"VendorID",
"tpep_pickup_datetime",
"tpep_dropoff_datetime",
"passenger_count",
"trip_distance",
"RatecodeID",
"store_and_fwd_flag",
"PULocationID",
"DOLocationID",
"payment_type",
"fare_amount",
"extra",
"mta_tax",
"tip_amount",
"tolls_amount",
"improvement_surcharge",
"total_amount"
],
"has_header_row" : true, <4>
"delimiter" : ",", <5>
"quote" : "\"", <6>
"timestamp_field" : "tpep_pickup_datetime", <7>
"joda_timestamp_formats" : [ <8>
"YYYY-MM-dd HH:mm:ss"
],
"java_timestamp_formats" : [ <9>
"yyyy-MM-dd HH:mm:ss"
],
"need_client_timezone" : true, <10>
"mappings" : {
"@timestamp" : {
"type" : "date"
},
"DOLocationID" : {
"type" : "long"
},
"PULocationID" : {
"type" : "long"
},
"RatecodeID" : {
"type" : "long"
},
"VendorID" : {
"type" : "long"
},
"extra" : {
"type" : "double"
},
"fare_amount" : {
"type" : "double"
},
"improvement_surcharge" : {
"type" : "double"
},
"mta_tax" : {
"type" : "double"
},
"passenger_count" : {
"type" : "long"
},
"payment_type" : {
"type" : "long"
},
"store_and_fwd_flag" : {
"type" : "keyword"
},
"tip_amount" : {
"type" : "double"
},
"tolls_amount" : {
"type" : "double"
},
"total_amount" : {
"type" : "double"
},
"tpep_dropoff_datetime" : {
"type" : "date",
"format" : "YYYY-MM-dd HH:mm:ss"
},
"tpep_pickup_datetime" : {
"type" : "date",
"format" : "YYYY-MM-dd HH:mm:ss"
},
"trip_distance" : {
"type" : "double"
}
},
"field_stats" : {
"DOLocationID" : {
"count" : 19998,
"cardinality" : 240,
"min_value" : 1,
"max_value" : 265,
"mean_value" : 150.26532653265312,
"median_value" : 148,
"top_hits" : [
{
"value" : 79,
"count" : 760
},
{
"value" : 48,
"count" : 683
},
{
"value" : 68,
"count" : 529
},
{
"value" : 170,
"count" : 506
},
{
"value" : 107,
"count" : 468
},
{
"value" : 249,
"count" : 457
},
{
"value" : 230,
"count" : 441
},
{
"value" : 186,
"count" : 432
},
{
"value" : 141,
"count" : 409
},
{
"value" : 263,
"count" : 386
}
]
},
"PULocationID" : {
"count" : 19998,
"cardinality" : 154,
"min_value" : 1,
"max_value" : 265,
"mean_value" : 153.4042404240424,
"median_value" : 148,
"top_hits" : [
{
"value" : 79,
"count" : 1067
},
{
"value" : 230,
"count" : 949
},
{
"value" : 148,
"count" : 940
},
{
"value" : 132,
"count" : 897
},
{
"value" : 48,
"count" : 853
},
{
"value" : 161,
"count" : 820
},
{
"value" : 234,
"count" : 750
},
{
"value" : 249,
"count" : 722
},
{
"value" : 164,
"count" : 663
},
{
"value" : 114,
"count" : 646
}
]
},
"RatecodeID" : {
"count" : 19998,
"cardinality" : 5,
"min_value" : 1,
"max_value" : 5,
"mean_value" : 1.0656565656565653,
"median_value" : 1,
"top_hits" : [
{
"value" : 1,
"count" : 19311
},
{
"value" : 2,
"count" : 468
},
{
"value" : 5,
"count" : 195
},
{
"value" : 4,
"count" : 17
},
{
"value" : 3,
"count" : 7
}
]
},
"VendorID" : {
"count" : 19998,
"cardinality" : 2,
"min_value" : 1,
"max_value" : 2,
"mean_value" : 1.59005900590059,
"median_value" : 2,
"top_hits" : [
{
"value" : 2,
"count" : 11800
},
{
"value" : 1,
"count" : 8198
}
]
},
"extra" : {
"count" : 19998,
"cardinality" : 3,
"min_value" : -0.5,
"max_value" : 0.5,
"mean_value" : 0.4815981598159816,
"median_value" : 0.5,
"top_hits" : [
{
"value" : 0.5,
"count" : 19281
},
{
"value" : 0,
"count" : 698
},
{
"value" : -0.5,
"count" : 19
}
]
},
"fare_amount" : {
"count" : 19998,
"cardinality" : 208,
"min_value" : -100,
"max_value" : 300,
"mean_value" : 13.937719771977209,
"median_value" : 9.5,
"top_hits" : [
{
"value" : 6,
"count" : 1004
},
{
"value" : 6.5,
"count" : 935
},
{
"value" : 5.5,
"count" : 909
},
{
"value" : 7,
"count" : 903
},
{
"value" : 5,
"count" : 889
},
{
"value" : 7.5,
"count" : 854
},
{
"value" : 4.5,
"count" : 802
},
{
"value" : 8.5,
"count" : 790
},
{
"value" : 8,
"count" : 789
},
{
"value" : 9,
"count" : 711
}
]
},
"improvement_surcharge" : {
"count" : 19998,
"cardinality" : 3,
"min_value" : -0.3,
"max_value" : 0.3,
"mean_value" : 0.29915991599159913,
"median_value" : 0.3,
"top_hits" : [
{
"value" : 0.3,
"count" : 19964
},
{
"value" : -0.3,
"count" : 22
},
{
"value" : 0,
"count" : 12
}
]
},
"mta_tax" : {
"count" : 19998,
"cardinality" : 3,
"min_value" : -0.5,
"max_value" : 0.5,
"mean_value" : 0.4962246224622462,
"median_value" : 0.5,
"top_hits" : [
{
"value" : 0.5,
"count" : 19868
},
{
"value" : 0,
"count" : 109
},
{
"value" : -0.5,
"count" : 21
}
]
},
"passenger_count" : {
"count" : 19998,
"cardinality" : 7,
"min_value" : 0,
"max_value" : 6,
"mean_value" : 1.6201620162016201,
"median_value" : 1,
"top_hits" : [
{
"value" : 1,
"count" : 14219
},
{
"value" : 2,
"count" : 2886
},
{
"value" : 5,
"count" : 1047
},
{
"value" : 3,
"count" : 804
},
{
"value" : 6,
"count" : 523
},
{
"value" : 4,
"count" : 406
},
{
"value" : 0,
"count" : 113
}
]
},
"payment_type" : {
"count" : 19998,
"cardinality" : 4,
"min_value" : 1,
"max_value" : 4,
"mean_value" : 1.315631563156316,
"median_value" : 1,
"top_hits" : [
{
"value" : 1,
"count" : 13936
},
{
"value" : 2,
"count" : 5857
},
{
"value" : 3,
"count" : 160
},
{
"value" : 4,
"count" : 45
}
]
},
"store_and_fwd_flag" : {
"count" : 19998,
"cardinality" : 2,
"top_hits" : [
{
"value" : "N",
"count" : 19910
},
{
"value" : "Y",
"count" : 88
}
]
},
"tip_amount" : {
"count" : 19998,
"cardinality" : 717,
"min_value" : 0,
"max_value" : 128,
"mean_value" : 2.010959095909593,
"median_value" : 1.45,
"top_hits" : [
{
"value" : 0,
"count" : 6917
},
{
"value" : 1,
"count" : 1178
},
{
"value" : 2,
"count" : 624
},
{
"value" : 3,
"count" : 248
},
{
"value" : 1.56,
"count" : 206
},
{
"value" : 1.46,
"count" : 205
},
{
"value" : 1.76,
"count" : 196
},
{
"value" : 1.45,
"count" : 195
},
{
"value" : 1.36,
"count" : 191
},
{
"value" : 1.5,
"count" : 187
}
]
},
"tolls_amount" : {
"count" : 19998,
"cardinality" : 26,
"min_value" : 0,
"max_value" : 35,
"mean_value" : 0.2729697969796978,
"median_value" : 0,
"top_hits" : [
{
"value" : 0,
"count" : 19107
},
{
"value" : 5.76,
"count" : 791
},
{
"value" : 10.5,
"count" : 36
},
{
"value" : 2.64,
"count" : 21
},
{
"value" : 11.52,
"count" : 8
},
{
"value" : 5.54,
"count" : 4
},
{
"value" : 8.5,
"count" : 4
},
{
"value" : 17.28,
"count" : 4
},
{
"value" : 2,
"count" : 2
},
{
"value" : 2.16,
"count" : 2
}
]
},
"total_amount" : {
"count" : 19998,
"cardinality" : 1267,
"min_value" : -100.3,
"max_value" : 389.12,
"mean_value" : 17.499898989898995,
"median_value" : 12.35,
"top_hits" : [
{
"value" : 7.3,
"count" : 478
},
{
"value" : 8.3,
"count" : 443
},
{
"value" : 8.8,
"count" : 420
},
{
"value" : 6.8,
"count" : 406
},
{
"value" : 7.8,
"count" : 405
},
{
"value" : 6.3,
"count" : 371
},
{
"value" : 9.8,
"count" : 368
},
{
"value" : 5.8,
"count" : 362
},
{
"value" : 9.3,
"count" : 332
},
{
"value" : 10.3,
"count" : 332
}
]
},
"tpep_dropoff_datetime" : {
"count" : 19998,
"cardinality" : 9066,
"top_hits" : [
{
"value" : "2018-06-01 01:12:12",
"count" : 10
},
{
"value" : "2018-06-01 00:32:15",
"count" : 9
},
{
"value" : "2018-06-01 00:44:27",
"count" : 9
},
{
"value" : "2018-06-01 00:46:42",
"count" : 9
},
{
"value" : "2018-06-01 01:03:22",
"count" : 9
},
{
"value" : "2018-06-01 01:05:13",
"count" : 9
},
{
"value" : "2018-06-01 00:11:20",
"count" : 8
},
{
"value" : "2018-06-01 00:16:03",
"count" : 8
},
{
"value" : "2018-06-01 00:19:47",
"count" : 8
},
{
"value" : "2018-06-01 00:25:17",
"count" : 8
}
]
},
"tpep_pickup_datetime" : {
"count" : 19998,
"cardinality" : 8760,
"top_hits" : [
{
"value" : "2018-06-01 00:01:23",
"count" : 12
},
{
"value" : "2018-06-01 00:04:31",
"count" : 10
},
{
"value" : "2018-06-01 00:05:38",
"count" : 10
},
{
"value" : "2018-06-01 00:09:50",
"count" : 10
},
{
"value" : "2018-06-01 00:12:01",
"count" : 10
},
{
"value" : "2018-06-01 00:14:17",
"count" : 10
},
{
"value" : "2018-06-01 00:00:34",
"count" : 9
},
{
"value" : "2018-06-01 00:00:40",
"count" : 9
},
{
"value" : "2018-06-01 00:02:53",
"count" : 9
},
{
"value" : "2018-06-01 00:05:40",
"count" : 9
}
]
},
"trip_distance" : {
"count" : 19998,
"cardinality" : 1687,
"min_value" : 0,
"max_value" : 64.63,
"mean_value" : 3.6521062106210715,
"median_value" : 2.16,
"top_hits" : [
{
"value" : 0.9,
"count" : 335
},
{
"value" : 0.8,
"count" : 320
},
{
"value" : 1.1,
"count" : 316
},
{
"value" : 0.7,
"count" : 304
},
{
"value" : 1.2,
"count" : 303
},
{
"value" : 1,
"count" : 296
},
{
"value" : 1.3,
"count" : 280
},
{
"value" : 1.5,
"count" : 268
},
{
"value" : 1.6,
"count" : 268
},
{
"value" : 0.6,
"count" : 256
}
]
}
}
}
----
// NOTCONSOLE
<1> `num_messages_analyzed` is 2 lower than `num_lines_analyzed` because only
data records count as messages. The first line contains the column names
and in this sample the second line is blank.
<2> Unlike the first example, in this case the `format` has been identified as
`delimited`.
<3> Because the `format` is `delimited`, the `column_names` field in the output
lists the column names in the order they appear in the sample.
<4> `has_header_row` indicates that for this sample the column names were in
the first row of the sample. (If they hadn't been then it would have been
a good idea to specify them in the `column_names` query parameter.)
<5> The `delimiter` for this sample is a comma, as it's a CSV file.
<6> The `quote` character is the default double quote. (The structure finder
does not attempt to deduce any other quote character, so if you have a
delimited file that's quoted with some other character you must specify it
using the `quote` query parameter.)
<7> The `timestamp_field` has been chosen to be `tpep_pickup_datetime`.
`tpep_dropoff_datetime` would work just as well, but `tpep_pickup_datetime`
was chosen because it comes first in the column order. If you prefer
`tpep_dropoff_datetime` then force it to be chosen using the
`timestamp_field` query parameter.
<8> `joda_timestamp_formats` are used to tell Logstash and Ingest pipeline how
to parse timestamps.
<9> `java_timestamp_formats` are the Java time formats recognized in the time
fields. In future Ingest pipeline will switch to use this format.
<10> The timestamp format in this sample doesn't specify a timezone, so to
accurately convert them to UTC timestamps to store in Elasticsearch it's
necessary to supply the timezone they relate to. `need_client_timezone`
will be `false` for timestamp formats that include the timezone.

View File

@ -1,6 +1,8 @@
[[query-dsl-type-query]]
=== Type Query
deprecated[7.0.0, Types are being removed, prefer filtering on a field instead. For more information, please see <<removal-of-types>>.]
Filters documents matching the provided document / mapping type.
[source,js]

View File

@ -1,5 +1,5 @@
[[es-release-notes]]
= Release Notes
= Release notes
[partintro]
--

View File

@ -1,5 +1,5 @@
[[release-notes-7.0.0-alpha1]]
== 7.0.0-alpha1 Release Notes
== 7.0.0-alpha1 release notes
The changes listed below have been released for the first time in Elasticsearch 7.0.0-alpha1.

View File

@ -1,7 +1,7 @@
[[release-highlights]]
= {es} Release Highlights
= {es} Release highlights
++++
<titleabbrev>Release Highlights</titleabbrev>
<titleabbrev>Release highlights</titleabbrev>
++++
[partintro]

View File

@ -19,7 +19,7 @@ directly to configure and access {xpack} features.
include::info.asciidoc[]
include::{xes-repo-dir}/rest-api/graph/explore.asciidoc[]
include::{es-repo-dir}/graph/explore.asciidoc[]
include::{es-repo-dir}/licensing/index.asciidoc[]
include::{es-repo-dir}/migration/migration.asciidoc[]
include::{es-repo-dir}/ml/apis/ml-api.asciidoc[]

View File

@ -2,7 +2,7 @@
[testenv="basic"]
[appendix]
[[sql-syntax-reserved]]
= Reserved Keywords
= Reserved keywords
Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious.

View File

@ -8,68 +8,276 @@ to be numeric.
==== Generic
* `ABS`
[[sql-functions-math-abs]]
===== `ABS`
https://en.wikipedia.org/wiki/Absolute_value[Absolute value], returns \[same type as input]
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[abs]
ABS(numeric_exp<1>)
--------------------------------------------------
* `CBRT`
*Input*:
https://en.wikipedia.org/wiki/Cube_root[Cube root], returns `double`
<1> numeric expression
// TODO make the example in the tests presentable
*Output*: numeric
* `CEIL`
.Description:
https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Ceiling], returns `double`
Returns the https://en.wikipedia.org/wiki/Absolute_value[absolute value] of `numeric_exp`. The return type is the same as the input type.
* `CEILING`
Same as `CEIL`
// TODO make the example in the tests presentable
* `E`
https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number], returns `2.7182818284590452354`
* https://en.wikipedia.org/wiki/Exponential_function[e^x^] (`EXP`)
["source","sql",subs="attributes,callouts,macros"]
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[exp]
include-tagged::{sql-specs}/docs.csv-spec[abs]
--------------------------------------------------
* https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[e^x^ - 1] (`EXPM1`)
[[sql-functions-math-cbrt]]
===== `CBRT`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[expm1]
CBRT(numeric_exp<1>)
--------------------------------------------------
* https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Floor] (`FLOOR`)
*Input*:
// TODO make the example in the tests presentable
<1> numeric expression
* https://en.wikipedia.org/wiki/Natural_logarithm[Natural logarithm] (`LOG`)
*Output*: double numeric value
["source","sql",subs="attributes,callouts,macros"]
.Description:
Returns the https://en.wikipedia.org/wiki/Cube_root[cube root] of `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[log]
include-tagged::{sql-specs}/docs.csv-spec[mathInlineCbrtWithNegativeValue]
--------------------------------------------------
* https://en.wikipedia.org/wiki/Logarithm[Logarithm] base 10 (`LOG10`)
[[sql-functions-math-ceil]]
===== `CEIL/CEILING`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[log10]
CEIL(numeric_exp<1>)
CEILING(numeric_exp<2>)
--------------------------------------------------
* `ROUND`
*Input*:
<1> numeric expression
<2> numeric expression
*Output*: integer or long numeric value
.Description:
Returns the smallest integer greater than or equal to `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineCeiling]
--------------------------------------------------
[[sql-functions-math-e]]
===== `E`
.Synopsis:
[source, sql]
--------------------------------------------------
E()
--------------------------------------------------
*Input*: _none_
*Output*: `2.718281828459045`
.Description:
Returns https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number].
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathEulersNumber]
--------------------------------------------------
[[sql-functions-math-exp]]
===== `EXP`
.Synopsis:
[source, sql]
--------------------------------------------------
EXP(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> float numeric expression
*Output*: double numeric value
.Description:
Returns https://en.wikipedia.org/wiki/Exponential_function[Euler's number at the power] of `numeric_exp` e^numeric_exp^.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathExpInline]
--------------------------------------------------
[[sql-functions-math-expm1]]
===== `EXPM1`
.Synopsis:
[source, sql]
--------------------------------------------------
EXPM1(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> float numeric expression
*Output*: double numeric value
.Description:
Returns https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[Euler's number at the power] of `numeric_exp` minus 1 (e^numeric_exp^ - 1).
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathExpm1Inline]
--------------------------------------------------
[[sql-functions-math-floor]]
===== `FLOOR`
.Synopsis:
[source, sql]
--------------------------------------------------
FLOOR(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: integer or long numeric value
.Description:
Returns the largest integer less than or equal to `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineFloor]
--------------------------------------------------
[[sql-functions-math-log]]
===== `LOG`
.Synopsis:
[source, sql]
--------------------------------------------------
LOG(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Natural_logarithm[natural logarithm] of `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineLog]
--------------------------------------------------
[[sql-functions-math-log10]]
===== `LOG10`
.Synopsis:
[source, sql]
--------------------------------------------------
LOG10(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Common_logarithm[base 10 logarithm] of `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineLog10]
--------------------------------------------------
[[sql-functions-math-pi]]
===== `PI`
.Synopsis:
[source, sql]
--------------------------------------------------
PI()
--------------------------------------------------
*Input*: _none_
*Output*: `3.141592653589793`
.Description:
Returns https://en.wikipedia.org/wiki/Pi[PI number].
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathPINumber]
--------------------------------------------------
[[sql-functions-math-power]]
===== `POWER`
.Synopsis:
[source, sql]
--------------------------------------------------
POWER(numeric_exp<1>, integer_exp<2>)
--------------------------------------------------
*Input*:
<1> numeric expression
<2> integer expression
*Output*: double numeric value
.Description:
Returns the value of `numeric_exp` to the power of `integer_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerPositive]
--------------------------------------------------
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlinePowerNegative]
--------------------------------------------------
[[sql-functions-math-round]]
===== `ROUND`
.Synopsis:
[source, sql]
@ -78,8 +286,8 @@ ROUND(numeric_exp<1>[, integer_exp<2>])
----
*Input*:
<1> numeric expression
<2> integer expression; optional
<1> numeric expression
<2> integer expression; optional
*Output*: numeric
@ -89,24 +297,67 @@ Returns `numeric_exp` rounded to `integer_exp` places right of the decimal point
the function will perform as if `integer_exp` would be 0. The returned numeric data type is the same as the data type
of `numeric_exp`.
["source","sql",subs="attributes,callouts,macros"]
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathRoundWithPositiveParameter]
--------------------------------------------------
["source","sql",subs="attributes,callouts,macros"]
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathRoundWithNegativeParameter]
--------------------------------------------------
* https://en.wikipedia.org/wiki/Square_root[Square root] (`SQRT`)
[[sql-functions-math-sign]]
===== `SIGN`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[sqrt]
SIGN(numeric_exp<1>)
--------------------------------------------------
* `TRUNCATE`
*Input*:
<1> numeric expression
*Output*: [-1, 0, 1]
.Description:
Returns an indicator of the sign of `numeric_exp`. If `numeric_exp` is less than zero, 1 is returned. If `numeric_exp` equals zero, 0 is returned. If `numeric_exp` is greater than zero, 1 is returned.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineSign]
--------------------------------------------------
[[sql-functions-math-sqrt]]
===== `SQRT`
.Synopsis:
[source, sql]
--------------------------------------------------
SQRT(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns https://en.wikipedia.org/wiki/Square_root[square root] of `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineSqrt]
--------------------------------------------------
[[sql-functions-math-truncate]]
===== `TRUNCATE`
.Synopsis:
[source, sql]
@ -115,8 +366,8 @@ TRUNCATE(numeric_exp<1>[, integer_exp<2>])
----
*Input*:
<1> numeric expression
<2> integer expression; optional
<1> numeric expression
<2> integer expression; optional
*Output*: numeric
@ -126,86 +377,305 @@ Returns `numeric_exp` truncated to `integer_exp` places right of the decimal poi
the function will perform as if `integer_exp` would be 0. The returned numeric data type is the same as the data type
of `numeric_exp`.
["source","sql",subs="attributes,callouts,macros"]
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathTruncateWithPositiveParameter]
--------------------------------------------------
["source","sql",subs="attributes,callouts,macros"]
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathTruncateWithNegativeParameter]
--------------------------------------------------
==== Trigonometric
* Convert from https://en.wikipedia.org/wiki/Radian[radians]
to https://en.wikipedia.org/wiki/Degree_(angle)[degrees] (`DEGREES`)
[[sql-functions-math-acos]]
===== `ACOS`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[degrees]
ACOS(numeric_exp<1>)
--------------------------------------------------
* Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees]
to https://en.wikipedia.org/wiki/Radian[radians] (`RADIANS`)
*Input*:
["source","sql",subs="attributes,callouts,macros"]
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arccosine] of `numeric_exp` as an angle, expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[degrees]
include-tagged::{sql-specs}/docs.csv-spec[mathInlineAcos]
--------------------------------------------------
* https://en.wikipedia.org/wiki/Trigonometric_functions#sine[Sine] (`SIN`)
[[sql-functions-math-asin]]
===== `ASIN`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[sin]
ASIN(numeric_exp<1>)
--------------------------------------------------
* https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[Cosine] (`COS`)
*Input*:
["source","sql",subs="attributes,callouts,macros"]
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arcsine] of `numeric_exp` as an angle, expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[cos]
include-tagged::{sql-specs}/docs.csv-spec[mathInlineAsin]
--------------------------------------------------
* https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[Tangent] (`TAN`)
[[sql-functions-math-atan]]
===== `ATAN`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[tan]
ATAN(numeric_exp<1>)
--------------------------------------------------
* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc sine] (`ASIN`)
*Input*:
["source","sql",subs="attributes,callouts,macros"]
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arctangent] of `numeric_exp` as an angle, expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[asin]
include-tagged::{sql-specs}/docs.csv-spec[mathInlineAtan]
--------------------------------------------------
* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc cosine] (`ACOS`)
[[sql-functions-math-atan2]]
===== `ATAN2`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[acos]
ATAN2(ordinate<1>, abscisa<2>)
--------------------------------------------------
* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc tangent] (`ATAN`)
*Input*:
["source","sql",subs="attributes,callouts,macros"]
<1> numeric expression
<2> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Atan2[arctangent of the `ordinate` and `abscisa` coordinates] specified as an angle, expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[atan]
include-tagged::{sql-specs}/docs.csv-spec[mathInlineAtan2]
--------------------------------------------------
* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic sine] (`SINH`)
[[sql-functions-math-cos]]
===== `COS`
["source","sql",subs="attributes,callouts,macros"]
.Synopsis:
[source, sql]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[sinh]
COS(numeric_exp<1>)
--------------------------------------------------
* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic cosine] (`COSH`)
*Input*:
["source","sql",subs="attributes,callouts,macros"]
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[cosine] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/math.sql-spec[cosh]
include-tagged::{sql-specs}/docs.csv-spec[mathInlineCosine]
--------------------------------------------------
[[sql-functions-math-cosh]]
===== `COSH`
.Synopsis:
[source, sql]
--------------------------------------------------
COSH(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Hyperbolic_function[hyperbolic cosine] of `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineCosh]
--------------------------------------------------
[[sql-functions-math-cot]]
===== `COT`
.Synopsis:
[source, sql]
--------------------------------------------------
COT(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#Cosecant,_secant,_and_cotangent[cotangent] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineCotangent]
--------------------------------------------------
[[sql-functions-math-degrees]]
===== `DEGREES`
.Synopsis:
[source, sql]
--------------------------------------------------
DEGREES(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Convert from https://en.wikipedia.org/wiki/Radian[radians]
to https://en.wikipedia.org/wiki/Degree_(angle)[degrees].
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineDegrees]
--------------------------------------------------
[[sql-functions-math-radians]]
===== `RADIANS`
.Synopsis:
[source, sql]
--------------------------------------------------
RADIANS(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees]
to https://en.wikipedia.org/wiki/Radian[radians].
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineRadians]
--------------------------------------------------
[[sql-functions-math-sin]]
===== `SIN`
.Synopsis:
[source, sql]
--------------------------------------------------
SIN(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#sine[sine] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineSine]
--------------------------------------------------
[[sql-functions-math-sinh]]
===== `SINH`
.Synopsis:
[source, sql]
--------------------------------------------------
SINH(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Hyperbolic_function[hyperbolic sine] of `numeric_exp`.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineSinh]
--------------------------------------------------
[[sql-functions-math-tan]]
===== `TAN`
.Synopsis:
[source, sql]
--------------------------------------------------
TAN(numeric_exp<1>)
--------------------------------------------------
*Input*:
<1> numeric expression
*Output*: double numeric value
.Description:
Returns the https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[tangent] of `numeric_exp`, where `numeric_exp` is an angle expressed in radians.
["source","sql",subs="attributes,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[mathInlineTanget]
--------------------------------------------------

View File

@ -8,233 +8,456 @@ Functions for performing string manipulation.
[[sql-functions-string-ascii]]
==== `ASCII`
*Input*: `string`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
ASCII(string_exp<1>)
--------------------------------------------------
Returns the ASCII code value of the leftmost character of string_exp as an integer.
*Input*:
<1> string expression
*Output*: integer
.Description:
Returns the ASCII code value of the leftmost character of `string_exp` as an integer.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringAscii]
----
--------------------------------------------------
[[sql-functions-string-bit-length]]
==== `BIT_LENGTH`
*Input*: `string`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
BIT_LENGTH(string_exp<1>)
--------------------------------------------------
*Input*:
Returns the length in bits of the input.
<1> string expression
*Output*: integer
.Description:
Returns the length in bits of the `string_exp` input expression.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringBitLength]
----
--------------------------------------------------
[[sql-functions-string-char]]
==== `CHAR`
*Input*: `numeric`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
CHAR(code<1>)
--------------------------------------------------
*Input*:
<1> integer expression
*Output*: string
.Description:
Returns the character that has the ASCII code value specified by the numeric input. The value should be between 0 and 255; otherwise, the return value is data sourcedependent.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringChar]
----
--------------------------------------------------
[[sql-functions-string-char-length]]
==== `CHAR_LENGTH`
*Input*: `string`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
CHAR_LENGTH(string_exp<1>)
--------------------------------------------------
*Input*:
<1> string expression
*Output*: integer
.Description:
Returns the length in characters of the input, if the string expression is of a character data type; otherwise, returns the length in bytes of the string expression (the smallest integer not less than the number of bits divided by 8).
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringCharLength]
----
--------------------------------------------------
[[sql-functions-string-concat]]
==== `CONCAT`
*Input*: `string1`, `string2`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
CONCAT(string_exp1<1>,string_exp2<2>)
--------------------------------------------------
*Input*:
turns a character string that is the result of concatenating string1 to string2. If one of the string is `NULL`,
the other string will be returned.
<1> string expression
<2> string expression
*Output*: string
.Description:
Returns a character string that is the result of concatenating `string_exp1` to `string_exp2`. If one of the string is `NULL`, the other string will be returned.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringConcat]
----
--------------------------------------------------
[[sql-functions-string-insert]]
==== `INSERT`
*Input*: `string1`, `start`, `length`, `string2`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
INSERT(source<1>, start<2>, length<3>, replacement<4>)
--------------------------------------------------
*Input*:
Returns a string where length characters have been deleted from string1, beginning at start, and where string2 has been inserted into string1, beginning at start.
<1> string expression
<2> integer expression
<3> integer expression
<4> string expression
*Output*: string
.Description:
Returns a string where `length` characters have been deleted from `source`, beginning at `start`, and where `replacement` has been inserted into `source`, beginning at `start`.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringInsert]
----
--------------------------------------------------
[[sql-functions-string-lcase]]
==== `LCASE`
*Input*: `string`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
LCASE(string_exp<1>)
--------------------------------------------------
*Input*:
Returns a string equal to that in string, with all uppercase characters converted to lowercase.
<1> string expression
*Output*: string
.Description:
Returns a string equal to that in `string_exp`, with all uppercase characters converted to lowercase.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringLCase]
----
--------------------------------------------------
[[sql-functions-string-left]]
==== `LEFT`
*Input*: `string`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
LEFT(string_exp<1>, count<2>)
--------------------------------------------------
*Input*:
Returns the leftmost count characters of string.
<1> string expression
<2> integer expression
*Output*: string
.Description:
Returns the leftmost count characters of `string_exp`.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringLeft]
----
--------------------------------------------------
[[sql-functions-string-length]]
==== `LENGTH`
*Input*: `string`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
LENGTH(string_exp<1>)
--------------------------------------------------
*Input*:
Returns the number of characters in string, excluding trailing blanks.
<1> string expression
*Output*: integer
.Description:
Returns the number of characters in `string_exp`, excluding trailing blanks.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringLength]
----
--------------------------------------------------
[[sql-functions-string-locate]]
==== `LOCATE`
*Input*: `string1`, `string2`[, `start`]`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
LOCATE(pattern<1>, source<2>[, start]<3>)
--------------------------------------------------
*Input*:
Returns the starting position of the first occurrence of string1 within string2. The search for the first occurrence of string1 begins with the first character position in string2 unless the optional argument, start, is specified. If start is specified, the search begins with the character position indicated by the value of start. The first character position in string2 is indicated by the value 1. If string1 is not found within string2, the value 0 is returned.
<1> string expression
<2> string expression
<3> integer expression; optional
*Output*: integer
.Description:
Returns the starting position of the first occurrence of `pattern` within `source`. The search for the first occurrence of `pattern` begins with the first character position in `source` unless the optional argument, `start`, is specified. If `start` is specified, the search begins with the character position indicated by the value of `start`. The first character position in `source` is indicated by the value 1. If `pattern` is not found within `source`, the value 0 is returned.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringLocateWoStart]
----
--------------------------------------------------
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringLocateWithStart]
----
--------------------------------------------------
[[sql-functions-string-ltrim]]
==== `LTRIM`
*Input*: `string`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
LTRIM(string_exp<1>)
--------------------------------------------------
*Input*:
Returns the characters of string_exp, with leading blanks removed.
<1> string expression
*Output*: string
.Description:
Returns the characters of `string_exp`, with leading blanks removed.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringLTrim]
----
--------------------------------------------------
[[sql-functions-string-position]]
==== `POSITION`
*Input*: `string1`, `string2`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
POSITION(string_exp1<1>, string_exp2<2>)
--------------------------------------------------
*Input*:
Returns the position of the string1 in string2. The result is an exact numeric.
<1> string expression
<2> string expression
*Output*: integer
.Description:
Returns the position of the `string_exp1` in `string_exp2`. The result is an exact numeric.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringPosition]
----
--------------------------------------------------
[[sql-functions-string-repeat]]
==== `REPEAT`
*Input*: `string`, `count`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
REPEAT(string_exp<1>, count<2>)
--------------------------------------------------
*Input*:
Returns a character string composed of string1 repeated count times.
<1> string expression
<2> integer expression
*Output*: string
.Description:
Returns a character string composed of `string_exp` repeated `count` times.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringRepeat]
----
--------------------------------------------------
[[sql-functions-string-replace]]
==== `REPLACE`
*Input*: `string1`, `string2`, `string3`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
REPLACE(source<1>, pattern<2>, replacement<3>)
--------------------------------------------------
*Input*:
Search string1 for occurrences of string2, and replace with string3.
<1> string expression
<2> string expression
<3> string expression
*Output*: string
.Description:
Search `source` for occurrences of `pattern`, and replace with `replacement`.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringReplace]
----
--------------------------------------------------
[[sql-functions-string-right]]
==== `RIGHT`
*Input*: `string`, `count`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
RIGHT(string_exp<1>, count<2>)
--------------------------------------------------
*Input*:
Returns the rightmost count characters of string.
<1> string expression
<2> integer expression
*Output*: string
.Description:
Returns the rightmost count characters of `string_exp`.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringRight]
----
--------------------------------------------------
[[sql-functions-string-rtrim]]
==== `RTRIM`
*Input*: `string`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
RTRIM(string_exp<1>)
--------------------------------------------------
*Input*:
Returns the characters of string with trailing blanks removed.
<1> string expression
*Output*: string
.Description:
Returns the characters of `string_exp` with trailing blanks removed.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringRTrim]
----
--------------------------------------------------
[[sql-functions-string-space]]
==== `SPACE`
*Input*: `integer`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
SPACE(count<1>)
--------------------------------------------------
*Input*:
Returns a character string consisting of count spaces.
<1> integer expression
*Output*: string
.Description:
Returns a character string consisting of `count` spaces.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringSpace]
----
--------------------------------------------------
[[sql-functions-string-substring]]
==== `SUBSTRING`
*Input*: `string`, `start`, `length`, *Output*: `integer`
.Synopsis:
[source, sql]
--------------------------------------------------
SUBSTRING(source<1>, start<2>, length<3>)
--------------------------------------------------
*Input*:
Returns a character string that is derived from the string, beginning at the character position specified by `start` for `length` characters.
<1> string expression
<2> integer expression
<3> integer expression
*Output*: string
.Description:
Returns a character string that is derived from `source`, beginning at the character position specified by `start` for `length` characters.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringSubString]
----
--------------------------------------------------
[[sql-functions-string-ucase]]
==== `UCASE`
*Input*: `string`, *Output*: `string`
.Synopsis:
[source, sql]
--------------------------------------------------
UCASE(string_exp<1>)
--------------------------------------------------
*Input*:
<1> string expression
*Output*: string
.Description:
Returns a string equal to that of the input, with all lowercase characters converted to uppercase.
["source","sql",subs="attributes,callouts,macros"]
----
--------------------------------------------------
include-tagged::{sql-specs}/docs.csv-spec[stringUCase]
----
--------------------------------------------------

View File

@ -1,7 +1,7 @@
[role="xpack"]
[testenv="basic"]
[[xpack-sql]]
= SQL Access
= SQL access
:sql-tests: {xes-repo-dir}/../../qa/sql
:sql-specs: {sql-tests}/src/main/resources

View File

@ -35,12 +35,12 @@ import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Collections;
public final class Grok {
@ -184,6 +184,7 @@ public final class Grok {
String namedPatternRef = groupMatch(NAME_GROUP, region, grokPattern);
String subName = groupMatch(SUBNAME_GROUP, region, grokPattern);
// TODO(tal): Support definitions
@SuppressWarnings("unused")
String definition = groupMatch(DEFINITION_GROUP, region, grokPattern);
String patternName = groupMatch(PATTERN_GROUP, region, grokPattern);

View File

@ -262,8 +262,6 @@ public class GrokTests extends ESTestCase {
}
public void testBooleanCaptures() {
Map<String, String> bank = new HashMap<>();
String pattern = "%{WORD:name}=%{WORD:status:boolean}";
Grok g = new Grok(basePatterns, pattern);

View File

@ -21,7 +21,6 @@ package org.elasticsearch.nio;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -61,8 +60,6 @@ public class FlushOperationTests extends ESTestCase {
ByteBuffer[] buffers = {ByteBuffer.allocate(10), ByteBuffer.allocate(15), ByteBuffer.allocate(3)};
FlushOperation writeOp = new FlushOperation(buffers, listener);
ArgumentCaptor<ByteBuffer[]> buffersCaptor = ArgumentCaptor.forClass(ByteBuffer[].class);
writeOp.incrementIndex(5);
assertFalse(writeOp.isFullyFlushed());
ByteBuffer[] byteBuffers = writeOp.getBuffersToWrite();

View File

@ -25,6 +25,8 @@ import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.file.Path;
import java.time.ZonedDateTime;
import java.util.Arrays;
@ -103,7 +105,8 @@ public final class XContentBuilder implements Closeable, Flushable {
writers.put(ZonedDateTime.class, (b, v) -> b.value(v.toString()));
writers.put(Calendar.class, XContentBuilder::timeValue);
writers.put(GregorianCalendar.class, XContentBuilder::timeValue);
writers.put(BigInteger.class, (b, v) -> b.value((BigInteger) v));
writers.put(BigDecimal.class, (b, v) -> b.value((BigDecimal) v));
Map<Class<?>, HumanReadableTransformer> humanReadableTransformer = new HashMap<>();
Map<Class<?>, Function<Object, Object>> dateTransformers = new HashMap<>();
@ -546,6 +549,81 @@ public final class XContentBuilder implements Closeable, Flushable {
return this;
}
////////////////////////////////////////////////////////////////////////////
// BigInteger
//////////////////////////////////
public XContentBuilder field(String name, BigInteger value) throws IOException {
if (value == null) {
return nullField(name);
}
ensureNameNotNull(name);
generator.writeNumberField(name, value);
return this;
}
public XContentBuilder array(String name, BigInteger[] values) throws IOException {
return field(name).values(values);
}
private XContentBuilder values(BigInteger[] values) throws IOException {
if (values == null) {
return nullValue();
}
startArray();
for (BigInteger b : values) {
value(b);
}
endArray();
return this;
}
public XContentBuilder value(BigInteger value) throws IOException {
if (value == null) {
return nullValue();
}
generator.writeNumber(value);
return this;
}
////////////////////////////////////////////////////////////////////////////
// BigDecimal
//////////////////////////////////
public XContentBuilder field(String name, BigDecimal value) throws IOException {
if (value == null) {
return nullField(name);
}
ensureNameNotNull(name);
generator.writeNumberField(name, value);
return this;
}
public XContentBuilder array(String name, BigDecimal[] values) throws IOException {
return field(name).values(values);
}
private XContentBuilder values(BigDecimal[] values) throws IOException {
if (values == null) {
return nullValue();
}
startArray();
for (BigDecimal b : values) {
value(b);
}
endArray();
return this;
}
public XContentBuilder value(BigDecimal value) throws IOException {
if (value == null) {
return nullValue();
}
generator.writeNumber(value);
return this;
}
////////////////////////////////////////////////////////////////////////////
// String
//////////////////////////////////

View File

@ -23,6 +23,8 @@ import java.io.Closeable;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.math.BigInteger;
public interface XContentGenerator extends Closeable, Flushable {
@ -70,6 +72,14 @@ public interface XContentGenerator extends Closeable, Flushable {
void writeNumber(short value) throws IOException;
void writeNumber(BigInteger value) throws IOException;
void writeNumberField(String name, BigInteger value) throws IOException;
void writeNumber(BigDecimal value) throws IOException;
void writeNumberField(String name, BigDecimal value) throws IOException;
void writeStringField(String name, String value) throws IOException;
void writeString(String value) throws IOException;

View File

@ -42,6 +42,8 @@ import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Objects;
import java.util.Set;
@ -226,6 +228,19 @@ public class JsonXContentGenerator implements XContentGenerator {
generator.writeNumberField(name, value);
}
@Override
public void writeNumberField(String name, BigInteger value) throws IOException {
// as jackson's JsonGenerator doesn't have this method for BigInteger
// we have to implement it ourselves
generator.writeFieldName(name);
generator.writeNumber(value);
}
@Override
public void writeNumberField(String name, BigDecimal value) throws IOException {
generator.writeNumberField(name, value);
}
@Override
public void writeNumber(int value) throws IOException {
generator.writeNumber(value);
@ -246,6 +261,16 @@ public class JsonXContentGenerator implements XContentGenerator {
generator.writeNumber(value);
}
@Override
public void writeNumber(BigInteger value) throws IOException {
generator.writeNumber(value);
}
@Override
public void writeNumber(BigDecimal value) throws IOException {
generator.writeNumber(value);
}
@Override
public void writeStringField(String name, String value) throws IOException {
generator.writeStringField(name, value);

View File

@ -222,6 +222,7 @@ public class ObjectParserTests extends ESTestCase {
public void testFailOnValueType() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"numeric_value\" : false}");
class TestStruct {
@SuppressWarnings("unused")
public String test;
}
ObjectParser<TestStruct, Void> objectParser = new ObjectParser<>("foo");

View File

@ -54,6 +54,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory {
private final CharArraySet keepWords;
private static final String KEEP_WORDS_KEY = "keep_words";
private static final String KEEP_WORDS_PATH_KEY = KEEP_WORDS_KEY + "_path";
@SuppressWarnings("unused")
private static final String KEEP_WORDS_CASE_KEY = KEEP_WORDS_KEY + "_case"; // for javadoc
// unsupported ancient option

View File

@ -53,7 +53,6 @@ import org.apache.lucene.analysis.pt.PortugueseStemFilter;
import org.apache.lucene.analysis.ru.RussianLightStemFilter;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.lucene.analysis.sv.SwedishLightStemFilter;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@ -94,8 +93,6 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
@Override
public TokenStream create(TokenStream tokenStream) {
final Version indexVersion = indexSettings.getIndexVersionCreated();
if ("arabic".equalsIgnoreCase(language)) {
return new ArabicStemFilter(tokenStream);
} else if ("armenian".equalsIgnoreCase(language)) {

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.grok.Grok;
import org.elasticsearch.grok.ThreadWatchdog;
import org.elasticsearch.ingest.DropProcessor;
import org.elasticsearch.ingest.PipelineProcessor;
import org.elasticsearch.ingest.Processor;
import org.elasticsearch.plugins.ActionPlugin;

View File

@ -0,0 +1,59 @@
---
teardown:
- do:
ingest.delete_pipeline:
id: "my_pipeline"
ignore: 404
---
"Test Drop Processor":
- do:
ingest.put_pipeline:
id: "my_pipeline"
body: >
{
"description" : "pipeline with drop",
"processors" : [
{
"drop" : {
"if": "ctx.foo == 'bar'"
}
}
]
}
- match: { acknowledged: true }
- do:
index:
index: test
type: test
id: 1
pipeline: "my_pipeline"
body: {
foo: "bar"
}
- do:
index:
index: test
type: test
id: 2
pipeline: "my_pipeline"
body: {
foo: "blub"
}
- do:
catch: missing
get:
index: test
type: test
id: 1
- match: { found: false }
- do:
get:
index: test
type: test
id: 2
- match: { _source.foo: "blub" }

View File

@ -28,11 +28,14 @@ import org.elasticsearch.painless.spi.Whitelist;
import org.objectweb.asm.util.Printer;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.CodeSource;
import java.security.SecureClassLoader;
import java.security.cert.Certificate;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
@ -89,16 +92,11 @@ final class Compiler {
*/
@Override
public Class<?> findClass(String name) throws ClassNotFoundException {
if (scriptClass.getName().equals(name)) {
return scriptClass;
Class<?> found = additionalClasses.get(name);
if (found != null) {
return found;
}
if (factoryClass != null && factoryClass.getName().equals(name)) {
return factoryClass;
}
if (statefulFactoryClass != null && statefulFactoryClass.getName().equals(name)) {
return statefulFactoryClass;
}
Class<?> found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.'));
found = painlessLookup.canonicalTypeNameToType(name.replace('$', '.'));
return found != null ? found : super.findClass(name);
}
@ -155,21 +153,16 @@ final class Compiler {
*/
private final Class<?> scriptClass;
/**
* The class/interface to create the {@code scriptClass} instance.
*/
private final Class<?> factoryClass;
/**
* An optional class/interface to create the {@code factoryClass} instance.
*/
private final Class<?> statefulFactoryClass;
/**
* The whitelist the script will use.
*/
private final PainlessLookup painlessLookup;
/**
* Classes that do not exist in the lookup, but are needed by the script factories.
*/
private final Map<String, Class<?>> additionalClasses;
/**
* Standard constructor.
* @param scriptClass The class/interface the script will implement.
@ -179,9 +172,36 @@ final class Compiler {
*/
Compiler(Class<?> scriptClass, Class<?> factoryClass, Class<?> statefulFactoryClass, PainlessLookup painlessLookup) {
this.scriptClass = scriptClass;
this.factoryClass = factoryClass;
this.statefulFactoryClass = statefulFactoryClass;
this.painlessLookup = painlessLookup;
Map<String, Class<?>> additionalClasses = new HashMap<>();
additionalClasses.put(scriptClass.getName(), scriptClass);
addFactoryMethod(additionalClasses, factoryClass, "newInstance");
addFactoryMethod(additionalClasses, statefulFactoryClass, "newFactory");
addFactoryMethod(additionalClasses, statefulFactoryClass, "newInstance");
this.additionalClasses = Collections.unmodifiableMap(additionalClasses);
}
private static void addFactoryMethod(Map<String, Class<?>> additionalClasses, Class<?> factoryClass, String methodName) {
if (factoryClass == null) {
return;
}
Method factoryMethod = null;
for (Method method : factoryClass.getMethods()) {
if (methodName.equals(method.getName())) {
factoryMethod = method;
break;
}
}
if (factoryMethod == null) {
return;
}
additionalClasses.put(factoryClass.getName(), factoryClass);
for (int i = 0; i < factoryMethod.getParameterTypes().length; ++i) {
Class<?> parameterClazz = factoryMethod.getParameterTypes()[i];
additionalClasses.put(parameterClazz.getName(), parameterClazz);
}
}
/**

View File

@ -295,7 +295,6 @@ public final class Locals {
public final Class<?> clazz;
public final boolean readonly;
private final int slot;
private boolean used;
public Variable(Location location, String name, Class<?> clazz, int slot, boolean readonly) {
this.location = location;

View File

@ -19,10 +19,15 @@
package org.elasticsearch.painless.lookup;
import java.util.Objects;
public class PainlessCast {
/** Create a standard cast with no boxing/unboxing. */
public static PainlessCast originalTypetoTargetType(Class<?> originalType, Class<?> targetType, boolean explicitCast) {
Objects.requireNonNull(originalType);
Objects.requireNonNull(targetType);
return new PainlessCast(originalType, targetType, explicitCast, null, null, null, null);
}
@ -30,6 +35,10 @@ public class PainlessCast {
public static PainlessCast unboxOriginalType(
Class<?> originalType, Class<?> targetType, boolean explicitCast, Class<?> unboxOriginalType) {
Objects.requireNonNull(originalType);
Objects.requireNonNull(targetType);
Objects.requireNonNull(unboxOriginalType);
return new PainlessCast(originalType, targetType, explicitCast, unboxOriginalType, null, null, null);
}
@ -37,6 +46,10 @@ public class PainlessCast {
public static PainlessCast unboxTargetType(
Class<?> originalType, Class<?> targetType, boolean explicitCast, Class<?> unboxTargetType) {
Objects.requireNonNull(originalType);
Objects.requireNonNull(targetType);
Objects.requireNonNull(unboxTargetType);
return new PainlessCast(originalType, targetType, explicitCast, null, unboxTargetType, null, null);
}
@ -44,6 +57,10 @@ public class PainlessCast {
public static PainlessCast boxOriginalType(
Class<?> originalType, Class<?> targetType, boolean explicitCast, Class<?> boxOriginalType) {
Objects.requireNonNull(originalType);
Objects.requireNonNull(targetType);
Objects.requireNonNull(boxOriginalType);
return new PainlessCast(originalType, targetType, explicitCast, null, null, boxOriginalType, null);
}
@ -51,6 +68,10 @@ public class PainlessCast {
public static PainlessCast boxTargetType(
Class<?> originalType, Class<?> targetType, boolean explicitCast, Class<?> boxTargetType) {
Objects.requireNonNull(originalType);
Objects.requireNonNull(targetType);
Objects.requireNonNull(boxTargetType);
return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType);
}
@ -73,4 +94,30 @@ public class PainlessCast {
this.boxOriginalType = boxOriginalType;
this.boxTargetType = boxTargetType;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessCast that = (PainlessCast)object;
return explicitCast == that.explicitCast &&
Objects.equals(originalType, that.originalType) &&
Objects.equals(targetType, that.targetType) &&
Objects.equals(unboxOriginalType, that.unboxOriginalType) &&
Objects.equals(unboxTargetType, that.unboxTargetType) &&
Objects.equals(boxOriginalType, that.boxOriginalType) &&
Objects.equals(boxTargetType, that.boxTargetType);
}
@Override
public int hashCode() {
return Objects.hash(originalType, targetType, explicitCast, unboxOriginalType, unboxTargetType, boxOriginalType, boxTargetType);
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup;
import java.lang.invoke.MethodHandle;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
public final class PainlessClass {
@ -57,4 +58,29 @@ public final class PainlessClass {
this.functionalInterfaceMethod = functionalInterfaceMethod;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessClass that = (PainlessClass)object;
return Objects.equals(constructors, that.constructors) &&
Objects.equals(staticMethods, that.staticMethods) &&
Objects.equals(methods, that.methods) &&
Objects.equals(staticFields, that.staticFields) &&
Objects.equals(fields, that.fields) &&
Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod);
}
@Override
public int hashCode() {
return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod);
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Objects;
public class PainlessClassBinding {
@ -38,4 +39,28 @@ public class PainlessClassBinding {
this.returnType = returnType;
this.typeParameters = typeParameters;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessClassBinding that = (PainlessClassBinding)object;
return Objects.equals(javaConstructor, that.javaConstructor) &&
Objects.equals(javaMethod, that.javaMethod) &&
Objects.equals(returnType, that.returnType) &&
Objects.equals(typeParameters, that.typeParameters);
}
@Override
public int hashCode() {
return Objects.hash(javaConstructor, javaMethod, returnType, typeParameters);
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.painless.lookup;
import java.lang.invoke.MethodHandle;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
final class PainlessClassBuilder {
@ -57,4 +58,29 @@ final class PainlessClassBuilder {
return new PainlessClass(constructors, staticMethods, methods, staticFields, fields,
getterMethodHandles, setterMethodHandles, functionalInterfaceMethod);
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessClassBuilder that = (PainlessClassBuilder)object;
return Objects.equals(constructors, that.constructors) &&
Objects.equals(staticMethods, that.staticMethods) &&
Objects.equals(methods, that.methods) &&
Objects.equals(staticFields, that.staticFields) &&
Objects.equals(fields, that.fields) &&
Objects.equals(functionalInterfaceMethod, that.functionalInterfaceMethod);
}
@Override
public int hashCode() {
return Objects.hash(constructors, staticMethods, methods, staticFields, fields, functionalInterfaceMethod);
}
}

View File

@ -23,6 +23,7 @@ import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodType;
import java.lang.reflect.Constructor;
import java.util.List;
import java.util.Objects;
public class PainlessConstructor {
@ -37,4 +38,26 @@ public class PainlessConstructor {
this.methodHandle = methodHandle;
this.methodType = methodType;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessConstructor that = (PainlessConstructor)object;
return Objects.equals(javaConstructor, that.javaConstructor) &&
Objects.equals(typeParameters, that.typeParameters) &&
Objects.equals(methodType, that.methodType);
}
@Override
public int hashCode() {
return Objects.hash(javaConstructor, typeParameters, methodType);
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.painless.lookup;
import java.lang.invoke.MethodHandle;
import java.lang.reflect.Field;
import java.util.Objects;
public final class PainlessField {
@ -37,4 +38,25 @@ public final class PainlessField {
this.getterMethodHandle = getterMethodHandle;
this.setterMethodHandle = setterMethodHandle;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessField that = (PainlessField)object;
return Objects.equals(javaField, that.javaField) &&
Objects.equals(typeParameter, that.typeParameter);
}
@Override
public int hashCode() {
return Objects.hash(javaField, typeParameter);
}
}

View File

@ -20,8 +20,8 @@
package org.elasticsearch.painless.lookup;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistClassBinding;
import org.elasticsearch.painless.spi.WhitelistClass;
import org.elasticsearch.painless.spi.WhitelistClassBinding;
import org.elasticsearch.painless.spi.WhitelistConstructor;
import org.elasticsearch.painless.spi.WhitelistField;
import org.elasticsearch.painless.spi.WhitelistMethod;
@ -34,7 +34,6 @@ import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -51,155 +50,10 @@ import static org.elasticsearch.painless.lookup.PainlessLookupUtility.typesToCan
public final class PainlessLookupBuilder {
private static class PainlessConstructorCacheKey {
private final Class<?> targetClass;
private final List<Class<?>> typeParameters;
private PainlessConstructorCacheKey(Class<?> targetClass, List<Class<?>> typeParameters) {
this.targetClass = targetClass;
this.typeParameters = Collections.unmodifiableList(typeParameters);
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessConstructorCacheKey that = (PainlessConstructorCacheKey)object;
return Objects.equals(targetClass, that.targetClass) &&
Objects.equals(typeParameters, that.typeParameters);
}
@Override
public int hashCode() {
return Objects.hash(targetClass, typeParameters);
}
}
private static class PainlessMethodCacheKey {
private final Class<?> targetClass;
private final String methodName;
private final Class<?> returnType;
private final List<Class<?>> typeParameters;
private PainlessMethodCacheKey(Class<?> targetClass, String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
this.targetClass = targetClass;
this.methodName = methodName;
this.returnType = returnType;
this.typeParameters = Collections.unmodifiableList(typeParameters);
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessMethodCacheKey that = (PainlessMethodCacheKey)object;
return Objects.equals(targetClass, that.targetClass) &&
Objects.equals(methodName, that.methodName) &&
Objects.equals(returnType, that.returnType) &&
Objects.equals(typeParameters, that.typeParameters);
}
@Override
public int hashCode() {
return Objects.hash(targetClass, methodName, returnType, typeParameters);
}
}
private static class PainlessFieldCacheKey {
private final Class<?> targetClass;
private final String fieldName;
private final Class<?> typeParameter;
private PainlessFieldCacheKey(Class<?> targetClass, String fieldName, Class<?> typeParameter) {
this.targetClass = targetClass;
this.fieldName = fieldName;
this.typeParameter = typeParameter;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessFieldCacheKey that = (PainlessFieldCacheKey) object;
return Objects.equals(targetClass, that.targetClass) &&
Objects.equals(fieldName, that.fieldName) &&
Objects.equals(typeParameter, that.typeParameter);
}
@Override
public int hashCode() {
return Objects.hash(targetClass, fieldName, typeParameter);
}
}
private static class PainlessClassBindingCacheKey {
private final Class<?> targetClass;
private final String methodName;
private final Class<?> methodReturnType;
private final List<Class<?>> methodTypeParameters;
private PainlessClassBindingCacheKey(Class<?> targetClass,
String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
this.targetClass = targetClass;
this.methodName = methodName;
this.methodReturnType = returnType;
this.methodTypeParameters = Collections.unmodifiableList(typeParameters);
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessClassBindingCacheKey that = (PainlessClassBindingCacheKey)object;
return Objects.equals(targetClass, that.targetClass) &&
Objects.equals(methodName, that.methodName) &&
Objects.equals(methodReturnType, that.methodReturnType) &&
Objects.equals(methodTypeParameters, that.methodTypeParameters);
}
@Override
public int hashCode() {
return Objects.hash(targetClass, methodName, methodReturnType, methodTypeParameters);
}
}
private static final Map<PainlessConstructorCacheKey, PainlessConstructor> painlessConstructorCache = new HashMap<>();
private static final Map<PainlessMethodCacheKey, PainlessMethod> painlessMethodCache = new HashMap<>();
private static final Map<PainlessFieldCacheKey, PainlessField> painlessFieldCache = new HashMap<>();
private static final Map<PainlessClassBindingCacheKey, PainlessClassBinding> painlessClassBindingCache = new HashMap<>();
private static final Map<PainlessConstructor , PainlessConstructor> painlessConstructorCache = new HashMap<>();
private static final Map<PainlessMethod , PainlessMethod> painlessMethodCache = new HashMap<>();
private static final Map<PainlessField , PainlessField> painlessFieldCache = new HashMap<>();
private static final Map<PainlessClassBinding, PainlessClassBinding> painlessClassBindingCache = new HashMap<>();
private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$");
@ -335,8 +189,7 @@ public final class PainlessLookupBuilder {
throw new IllegalArgumentException("invalid class name [" + canonicalClassName + "]");
}
Class<?> existingClass = canonicalClassNamesToClasses.get(typeToCanonicalTypeName(clazz));
Class<?> existingClass = canonicalClassNamesToClasses.get(canonicalClassName);
if (existingClass != null && existingClass != clazz) {
throw new IllegalArgumentException("class [" + canonicalClassName + "] " +
@ -360,22 +213,22 @@ public final class PainlessLookupBuilder {
throw new IllegalArgumentException("must use no_import parameter on class [" + canonicalClassName + "] with no package");
}
} else {
Class<?> importedPainlessClass = canonicalClassNamesToClasses.get(importedCanonicalClassName);
Class<?> importedClass = canonicalClassNamesToClasses.get(importedCanonicalClassName);
if (importedPainlessClass == null) {
if (importedClass == null) {
if (importClassName) {
if (existingPainlessClassBuilder != null) {
throw new IllegalArgumentException(
"inconsistent no_import parameters found for class [" + canonicalClassName + "]");
"inconsistent no_import parameter found for class [" + canonicalClassName + "]");
}
canonicalClassNamesToClasses.put(importedCanonicalClassName, clazz);
}
} else if (importedPainlessClass != clazz) {
} else if (importedClass != clazz) {
throw new IllegalArgumentException("imported class [" + importedCanonicalClassName + "] cannot represent multiple " +
"classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedPainlessClass) + "]");
"classes [" + canonicalClassName + "] and [" + typeToCanonicalTypeName(importedClass) + "]");
} else if (importClassName == false) {
throw new IllegalArgumentException("inconsistent no_import parameters found for class [" + canonicalClassName + "]");
throw new IllegalArgumentException("inconsistent no_import parameter found for class [" + canonicalClassName + "]");
}
}
}
@ -440,36 +293,32 @@ public final class PainlessLookupBuilder {
try {
javaConstructor = targetClass.getConstructor(javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
} catch (NoSuchMethodException nsme) {
throw new IllegalArgumentException("constructor reflection object " +
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme);
throw new IllegalArgumentException("reflection object not found for constructor " +
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme);
}
MethodHandle methodHandle;
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("method handle not found for constructor " +
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", iae);
}
MethodType methodType = methodHandle.type();
String painlessConstructorKey = buildPainlessConstructorKey(typeParametersSize);
PainlessConstructor painlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey);
PainlessConstructor existingPainlessConstructor = painlessClassBuilder.constructors.get(painlessConstructorKey);
PainlessConstructor newPainlessConstructor = new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType);
if (painlessConstructor == null) {
MethodHandle methodHandle;
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflectConstructor(javaConstructor);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("constructor method handle " +
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
}
MethodType methodType = methodHandle.type();
painlessConstructor = painlessConstructorCache.computeIfAbsent(
new PainlessConstructorCacheKey(targetClass, typeParameters),
key -> new PainlessConstructor(javaConstructor, typeParameters, methodHandle, methodType)
);
painlessClassBuilder.constructors.put(painlessConstructorKey, painlessConstructor);
} else if (painlessConstructor.typeParameters.equals(typeParameters) == false){
throw new IllegalArgumentException("cannot have constructors " +
if (existingPainlessConstructor == null) {
newPainlessConstructor = painlessConstructorCache.computeIfAbsent(newPainlessConstructor, key -> key);
painlessClassBuilder.constructors.put(painlessConstructorKey, newPainlessConstructor);
} else if (newPainlessConstructor.equals(existingPainlessConstructor) == false){
throw new IllegalArgumentException("cannot add constructors with the same arity but are not equivalent for constructors " +
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(painlessConstructor.typeParameters) + "] " +
"with the same arity and different type parameters");
"[[" + targetCanonicalClassName + "], " + typesToCanonicalTypeNames(existingPainlessConstructor.typeParameters) + "]");
}
}
@ -578,8 +427,8 @@ public final class PainlessLookupBuilder {
try {
javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class<?>[typeParametersSize]));
} catch (NoSuchMethodException nsme) {
throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme);
throw new IllegalArgumentException("reflection object not found for method [[" + targetCanonicalClassName + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]", nsme);
}
} else {
try {
@ -591,9 +440,9 @@ public final class PainlessLookupBuilder {
"[" + typeToCanonicalTypeName(augmentedClass) + "] must be static");
}
} catch (NoSuchMethodException nsme) {
throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " +
"with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme);
throw new IllegalArgumentException("reflection object not found for method " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] " +
"with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", nsme);
}
}
@ -604,79 +453,54 @@ public final class PainlessLookupBuilder {
typesToCanonicalTypeNames(typeParameters) + "]");
}
String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize);
MethodHandle methodHandle;
if (augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey);
if (painlessMethod == null) {
MethodHandle methodHandle;
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("static method handle [[" + targetClass.getCanonicalName() + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
}
MethodType methodType = methodHandle.type();
painlessMethod = painlessMethodCache.computeIfAbsent(
new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters),
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
painlessClassBuilder.staticMethods.put(painlessMethodKey, painlessMethod);
} else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) {
throw new IllegalArgumentException("cannot have static methods " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " +
"with the same arity and different return type or type parameters");
if (augmentedClass == null) {
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("method handle not found for method " +
"[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " +
typesToCanonicalTypeNames(typeParameters) + "]", iae);
}
} else {
PainlessMethod painlessMethod = painlessClassBuilder.methods.get(painlessMethodKey);
if (painlessMethod == null) {
MethodHandle methodHandle;
if (augmentedClass == null) {
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
}
} else {
try {
methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("method handle [[" + targetClass.getCanonicalName() + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " +
"with augmented target class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae);
}
}
MethodType methodType = methodHandle.type();
painlessMethod = painlessMethodCache.computeIfAbsent(
new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters),
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
painlessClassBuilder.methods.put(painlessMethodKey, painlessMethod);
} else if (painlessMethod.returnType == returnType && painlessMethod.typeParameters.equals(typeParameters) == false) {
throw new IllegalArgumentException("cannot have methods " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(painlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(painlessMethod.typeParameters) + "] " +
"with the same arity and different return type or type parameters");
try {
methodHandle = MethodHandles.publicLookup().in(augmentedClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("method handle not found for method " +
"[[" + targetClass.getCanonicalName() + "], [" + methodName + "], " +
typesToCanonicalTypeNames(typeParameters) + "]" +
"with augmented class [" + typeToCanonicalTypeName(augmentedClass) + "]", iae);
}
}
MethodType methodType = methodHandle.type();
boolean isStatic = augmentedClass == null && Modifier.isStatic(javaMethod.getModifiers());
String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize);
PainlessMethod existingPainlessMethod = isStatic ?
painlessClassBuilder.staticMethods.get(painlessMethodKey) :
painlessClassBuilder.methods.get(painlessMethodKey);
PainlessMethod newPainlessMethod =
new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType);
if (existingPainlessMethod == null) {
newPainlessMethod = painlessMethodCache.computeIfAbsent(newPainlessMethod, key -> key);
if (isStatic) {
painlessClassBuilder.staticMethods.put(painlessMethodKey, newPainlessMethod);
} else {
painlessClassBuilder.methods.put(painlessMethodKey, newPainlessMethod);
}
} else if (newPainlessMethod.equals(existingPainlessMethod) == false) {
throw new IllegalArgumentException("cannot add methods with the same name and arity but are not equivalent for methods " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(existingPainlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(existingPainlessMethod.typeParameters) + "]");
}
}
public void addPainlessField(String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter) {
@ -687,7 +511,8 @@ public final class PainlessLookupBuilder {
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
if (targetClass == null) {
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + canonicalTypeNameParameter + "]]");
}
Class<?> typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter);
@ -721,7 +546,8 @@ public final class PainlessLookupBuilder {
PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass);
if (painlessClassBuilder == null) {
throw new IllegalArgumentException("class [" + targetCanonicalClassName + "] not found");
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for field " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" + typeToCanonicalTypeName(typeParameter) + "]]");
}
if (isValidType(typeParameter) == false) {
@ -735,7 +561,7 @@ public final class PainlessLookupBuilder {
javaField = targetClass.getField(fieldName);
} catch (NoSuchFieldException nsme) {
throw new IllegalArgumentException(
"field reflection object [[" + targetCanonicalClassName + "], [" + fieldName + "] not found", nsme);
"reflection object not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]", nsme);
}
if (javaField.getType() != typeToJavaType(typeParameter)) {
@ -760,20 +586,18 @@ public final class PainlessLookupBuilder {
throw new IllegalArgumentException("static field [[" + targetCanonicalClassName + "], [" + fieldName + "]] must be final");
}
PainlessField painlessField = painlessClassBuilder.staticFields.get(painlessFieldKey);
PainlessField existingPainlessField = painlessClassBuilder.staticFields.get(painlessFieldKey);
PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, null);
if (painlessField == null) {
painlessField = painlessFieldCache.computeIfAbsent(
new PainlessFieldCacheKey(targetClass, fieldName, typeParameter),
key -> new PainlessField(javaField, typeParameter, methodHandleGetter, null));
painlessClassBuilder.staticFields.put(painlessFieldKey, painlessField);
} else if (painlessField.typeParameter != typeParameter) {
throw new IllegalArgumentException("cannot have static fields " +
if (existingPainlessField == null) {
newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key);
painlessClassBuilder.staticFields.put(painlessFieldKey, newPainlessField);
} else if (newPainlessField.equals(existingPainlessField) == false) {
throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
typeToCanonicalTypeName(typeParameter) + "] and " +
"[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " +
typeToCanonicalTypeName(painlessField.typeParameter) + "] " +
"[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " +
typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " +
"with the same name and different type parameters");
}
} else {
@ -786,35 +610,41 @@ public final class PainlessLookupBuilder {
"setter method handle not found for field [[" + targetCanonicalClassName + "], [" + fieldName + "]]");
}
PainlessField painlessField = painlessClassBuilder.fields.get(painlessFieldKey);
PainlessField existingPainlessField = painlessClassBuilder.fields.get(painlessFieldKey);
PainlessField newPainlessField = new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter);
if (painlessField == null) {
painlessField = painlessFieldCache.computeIfAbsent(
new PainlessFieldCacheKey(targetClass, painlessFieldKey, typeParameter),
key -> new PainlessField(javaField, typeParameter, methodHandleGetter, methodHandleSetter));
painlessClassBuilder.fields.put(fieldName, painlessField);
} else if (painlessField.typeParameter != typeParameter) {
throw new IllegalArgumentException("cannot have fields " +
if (existingPainlessField == null) {
newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key);
painlessClassBuilder.fields.put(painlessFieldKey, newPainlessField);
} else if (newPainlessField.equals(existingPainlessField) == false) {
throw new IllegalArgumentException("cannot add fields with the same name but are not equivalent for fields " +
"[[" + targetCanonicalClassName + "], [" + fieldName + "], [" +
typeToCanonicalTypeName(typeParameter) + "] and " +
"[[" + targetCanonicalClassName + "], [" + painlessField.javaField.getName() + "], " +
typeToCanonicalTypeName(painlessField.typeParameter) + "] " +
"[[" + targetCanonicalClassName + "], [" + existingPainlessField.javaField.getName() + "], " +
typeToCanonicalTypeName(existingPainlessField.typeParameter) + "] " +
"with the same name and different type parameters");
}
}
}
public void addImportedPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName,
public void addImportedPainlessMethod(ClassLoader classLoader, String targetJavaClassName,
String methodName, String returnCanonicalTypeName, List<String> canonicalTypeNameParameters) {
Objects.requireNonNull(classLoader);
Objects.requireNonNull(targetCanonicalClassName);
Objects.requireNonNull(targetJavaClassName);
Objects.requireNonNull(methodName);
Objects.requireNonNull(returnCanonicalTypeName);
Objects.requireNonNull(canonicalTypeNameParameters);
Class<?> targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName);
Class<?> targetClass;
try {
targetClass = Class.forName(targetJavaClassName, true, classLoader);
} catch (ClassNotFoundException cnfe) {
throw new IllegalArgumentException("class [" + targetJavaClassName + "] not found", cnfe);
}
String targetCanonicalClassName = typeToCanonicalTypeName(targetClass);
if (targetClass == null) {
throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " +
@ -913,35 +743,33 @@ public final class PainlessLookupBuilder {
throw new IllegalArgumentException("imported method and class binding cannot have the same name [" + methodName + "]");
}
PainlessMethod importedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey);
MethodHandle methodHandle;
if (importedPainlessMethod == null) {
MethodHandle methodHandle;
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
}
try {
methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod);
} catch (IllegalAccessException iae) {
throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " +
"[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae);
}
MethodType methodType = methodHandle.type();
MethodType methodType = methodHandle.type();
PainlessMethod existingImportedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey);
PainlessMethod newImportedPainlessMethod =
new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType);
importedPainlessMethod = painlessMethodCache.computeIfAbsent(
new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters),
key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType));
painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, importedPainlessMethod);
} else if (importedPainlessMethod.returnType == returnType &&
importedPainlessMethod.typeParameters.equals(typeParameters) == false) {
throw new IllegalArgumentException("cannot have imported methods " +
if (existingImportedPainlessMethod == null) {
newImportedPainlessMethod = painlessMethodCache.computeIfAbsent(newImportedPainlessMethod, key -> key);
painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, newImportedPainlessMethod);
} else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) {
throw new IllegalArgumentException("cannot add imported methods with the same name and arity " +
"but are not equivalent for methods " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], [" + methodName + "], " +
"[" + typeToCanonicalTypeName(importedPainlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(importedPainlessMethod.typeParameters) + "] " +
"with the same arity and different return type or type parameters");
"[" + typeToCanonicalTypeName(existingImportedPainlessMethod.returnType) + "], " +
typesToCanonicalTypeNames(existingImportedPainlessMethod.typeParameters) + "]");
}
}
@ -987,7 +815,6 @@ public final class PainlessLookupBuilder {
}
public void addPainlessClassBinding(Class<?> targetClass, String methodName, Class<?> returnType, List<Class<?>> typeParameters) {
Objects.requireNonNull(targetClass);
Objects.requireNonNull(methodName);
Objects.requireNonNull(returnType);
@ -1100,31 +927,24 @@ public final class PainlessLookupBuilder {
throw new IllegalArgumentException("class binding and imported method cannot have the same name [" + methodName + "]");
}
PainlessClassBinding painlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey);
PainlessClassBinding existingPainlessClassBinding = painlessMethodKeysToPainlessClassBindings.get(painlessMethodKey);
PainlessClassBinding newPainlessClassBinding =
new PainlessClassBinding(javaConstructor, javaMethod, returnType, typeParameters);
if (painlessClassBinding == null) {
Constructor<?> finalJavaConstructor = javaConstructor;
Method finalJavaMethod = javaMethod;
painlessClassBinding = painlessClassBindingCache.computeIfAbsent(
new PainlessClassBindingCacheKey(targetClass, methodName, returnType, typeParameters),
key -> new PainlessClassBinding(finalJavaConstructor, finalJavaMethod, returnType, typeParameters));
painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, painlessClassBinding);
} else if (painlessClassBinding.javaConstructor.equals(javaConstructor) == false ||
painlessClassBinding.javaMethod.equals(javaMethod) == false ||
painlessClassBinding.returnType != returnType ||
painlessClassBinding.typeParameters.equals(typeParameters) == false) {
throw new IllegalArgumentException("cannot have class bindings " +
if (existingPainlessClassBinding == null) {
newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, key -> key);
painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey, newPainlessClassBinding);
} else if (newPainlessClassBinding.equals(existingPainlessClassBinding)) {
throw new IllegalArgumentException("cannot add class bindings with the same name and arity " +
"but are not equivalent for methods " +
"[[" + targetCanonicalClassName + "], " +
"[" + methodName + "], " +
"[" + typeToCanonicalTypeName(returnType) + "], " +
typesToCanonicalTypeNames(typeParameters) + "] and " +
"[[" + targetCanonicalClassName + "], " +
"[" + methodName + "], " +
"[" + typeToCanonicalTypeName(painlessClassBinding.returnType) + "], " +
typesToCanonicalTypeNames(painlessClassBinding.typeParameters) + "] and " +
"with the same name and arity but different constructors or methods");
"[" + typeToCanonicalTypeName(existingPainlessClassBinding.returnType) + "], " +
typesToCanonicalTypeNames(existingPainlessClassBinding.typeParameters) + "]");
}
}

View File

@ -24,6 +24,7 @@ import java.lang.invoke.MethodType;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
public class PainlessMethod {
@ -44,4 +45,28 @@ public class PainlessMethod {
this.methodHandle = methodHandle;
this.methodType = methodType;
}
@Override
public boolean equals(Object object) {
if (this == object) {
return true;
}
if (object == null || getClass() != object.getClass()) {
return false;
}
PainlessMethod that = (PainlessMethod)object;
return Objects.equals(javaMethod, that.javaMethod) &&
Objects.equals(targetClass, that.targetClass) &&
Objects.equals(returnType, that.returnType) &&
Objects.equals(typeParameters, that.typeParameters) &&
Objects.equals(methodType, that.methodType);
}
@Override
public int hashCode() {
return Objects.hash(javaMethod, targetClass, returnType, typeParameters, methodType);
}
}

View File

@ -91,7 +91,7 @@ public class ChildrenIT extends ParentChildTestCase {
String category = categories[j] = uniqueCategories[catIndex++ % uniqueCategories.length];
Control control = categoryToControl.get(category);
if (control == null) {
categoryToControl.put(category, control = new Control(category));
categoryToControl.put(category, control = new Control());
}
control.articleIds.add(id);
}
@ -457,14 +457,8 @@ public class ChildrenIT extends ParentChildTestCase {
}
private static final class Control {
final String category;
final Set<String> articleIds = new HashSet<>();
final Set<String> commentIds = new HashSet<>();
final Map<String, Set<String>> commenterToCommentId = new HashMap<>();
private Control(String category) {
this.category = category;
}
}
}

View File

@ -737,9 +737,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
private final Script script;
private final Map<String, Object> params;
private UpdateScript executable;
private Map<String, Object> context;
public ScriptApplier(WorkerBulkByScrollTaskState taskWorker,
ScriptService scriptService,
Script script,
@ -756,16 +753,8 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
if (script == null) {
return request;
}
if (executable == null) {
UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT);
executable = factory.newInstance(params);
}
if (context == null) {
context = new HashMap<>();
} else {
context.clear();
}
Map<String, Object> context = new HashMap<>();
context.put(IndexFieldMapper.NAME, doc.getIndex());
context.put(TypeFieldMapper.NAME, doc.getType());
context.put(IdFieldMapper.NAME, doc.getId());
@ -778,7 +767,9 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
OpType oldOpType = OpType.INDEX;
context.put("op", oldOpType.toString());
executable.execute(context);
UpdateScript.Factory factory = scriptService.compile(script, UpdateScript.CONTEXT);
UpdateScript updateScript = factory.newInstance(params, context);
updateScript.execute();
String newOp = (String) context.remove("op");
if (newOp == null) {

View File

@ -45,6 +45,7 @@ import java.util.Collections;
public class ReindexDocumentationIT extends ESIntegTestCase {
@SuppressWarnings("unused")
public void reindex() {
Client client = client();
// tag::reindex1
@ -55,6 +56,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase {
// end::reindex1
}
@SuppressWarnings("unused")
public void updateByQuery() {
Client client = client();
{
@ -165,6 +167,7 @@ public class ReindexDocumentationIT extends ESIntegTestCase {
}
}
@SuppressWarnings("unused")
public void deleteByQuery() {
Client client = client();
// tag::delete-by-query-sync

View File

@ -56,13 +56,12 @@ public abstract class AbstractAsyncBulkByScrollActionScriptTestCase<
protected <T extends ActionRequest> T applyScript(Consumer<Map<String, Object>> scriptBody) {
IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar"));
ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0);
UpdateScript updateScript = new UpdateScript(Collections.emptyMap()) {
UpdateScript.Factory factory = (params, ctx) -> new UpdateScript(Collections.emptyMap(), ctx) {
@Override
public void execute(Map<String, Object> ctx) {
public void execute() {
scriptBody.accept(ctx);
}
};
UpdateScript.Factory factory = params -> updateScript;
};;
ExecutableScript simpleExecutableScript = new SimpleExecutableScript(scriptBody);
when(scriptService.compile(any(), eq(ExecutableScript.CONTEXT))).thenReturn(params -> simpleExecutableScript);
when(scriptService.compile(any(), eq(UpdateScript.CONTEXT))).thenReturn(factory);

View File

@ -34,13 +34,13 @@ compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-tr
dependencies {
// network stack
compile "io.netty:netty-buffer:4.1.28.Final"
compile "io.netty:netty-codec:4.1.28.Final"
compile "io.netty:netty-codec-http:4.1.28.Final"
compile "io.netty:netty-common:4.1.28.Final"
compile "io.netty:netty-handler:4.1.28.Final"
compile "io.netty:netty-resolver:4.1.28.Final"
compile "io.netty:netty-transport:4.1.28.Final"
compile "io.netty:netty-buffer:4.1.29.Final"
compile "io.netty:netty-codec:4.1.29.Final"
compile "io.netty:netty-codec-http:4.1.29.Final"
compile "io.netty:netty-common:4.1.29.Final"
compile "io.netty:netty-handler:4.1.29.Final"
compile "io.netty:netty-resolver:4.1.29.Final"
compile "io.netty:netty-transport:4.1.29.Final"
}
dependencyLicenses {

View File

@ -1 +0,0 @@
d6c2d13492778009d33f60e05ed90bcb535d1fd1

View File

@ -0,0 +1 @@
c3809f72e4b535b343b7dfa3c0c8210dad2fa5ea

View File

@ -1 +0,0 @@
a38361d893900947524f8a9da980555950e73d6a

View File

@ -0,0 +1 @@
1651bc2e279216773c234cafe402d68d2a5adc90

View File

@ -1 +0,0 @@
897100c1022c780b0a436b9349e507e8fa9800dc

View File

@ -0,0 +1 @@
454688b88cea27a4d407202d1fc79a6522345b5e

View File

@ -1 +0,0 @@
df69ce8bb9b544a71e7bbee290253cf7c93e6bad

View File

@ -0,0 +1 @@
a5d6a735ed07d8f197daa48db7f097cfc971ee5e

View File

@ -1 +0,0 @@
a035784682da0126bc25f10713dac732b5082a6d

View File

@ -0,0 +1 @@
1acf1d94799296a2517533ec75ce7e155e9c4ea7

View File

@ -1 +0,0 @@
f33557dcb31fa20da075ac05e4808115e32ef9b7

View File

@ -0,0 +1 @@
bbec1dc913732e4773893c14d795b15d6c1e878e

Some files were not shown because too many files have changed in this diff Show More