From b15f27f6a6bed3c349169a7c928e6c5460548bdb Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 17 Dec 2018 13:36:48 +0200 Subject: [PATCH 01/26] SQL: Scripting support for casting functions CAST and CONVERT (#36640) --- .../sql/qa/src/main/resources/agg.csv-spec | 66 +++++++++++++++++-- .../sql/expression/function/scalar/Cast.java | 16 +++++ .../whitelist/InternalSqlScriptUtils.java | 8 +++ .../xpack/sql/plugin/sql_whitelist.txt | 5 ++ .../xpack/sql/planner/QueryFolderTests.java | 8 +-- 5 files changed, 93 insertions(+), 10 deletions(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec index 5d1e59ef7a2..d4837bfdafc 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.csv-spec @@ -130,12 +130,66 @@ SELECT MAX(languages) max, MIN(languages) min, SUM(languages) sum, AVG(languages FROM test_emp GROUP BY languages ORDER BY languages ASC LIMIT 5; max:bt | min:bt | sum:bt | avg:d | percent:d | percent_rank:d| kurtosis:d | skewness:d ----------------+---------------+---------------+---------------+---------------+---------------+---------------+--------------- -null |null |null |null |null |null |null |null -1 |1 |15 |1 |1.0 |100.0 |NaN |NaN -2 |2 |38 |2 |2.0 |100.0 |NaN |NaN -3 |3 |51 |3 |3.0 |100.0 |NaN |NaN -4 |4 |72 |4 |4.0 |0.0 |NaN |NaN +---------------+---------------+---------------+--------------+---------------+---------------+---------------+--------------- +null |null |null |null |null |null |null |null +1 |1 |15 |1 |1.0 |100.0 |NaN |NaN +2 |2 |38 |2 |2.0 |100.0 |NaN |NaN +3 |3 |51 |3 |3.0 |100.0 |NaN |NaN +4 |4 |72 |4 |4.0 |0.0 |NaN |NaN +; + +aggByComplexCastedValue +SELECT CONVERT(CONCAT(LTRIM(CONVERT("emp_no", SQL_VARCHAR)), LTRIM(CONVERT("languages", SQL_VARCHAR))), SQL_BIGINT) AS "TEMP" +FROM "test_emp" GROUP BY "TEMP" ORDER BY "TEMP" LIMIT 20; + + TEMP:l +--------------- +10020 +10021 +10022 +10023 +10024 +10025 +10026 +10027 +10028 +10029 +100012 +100025 +100034 +100045 +100051 +100063 +100074 +100082 +100091 +100104 +; + +aggAndOrderByCastedValue +SELECT CHAR_LENGTH(SPACE(CAST(languages AS SMALLINT))), COUNT(*) FROM test_emp GROUP BY 1 ORDER BY 1 DESC; + +CHAR_LENGTH(SPACE(CAST(languages AS SMALLINT))):i| COUNT(1):l +-------------------------------------------------+--------------- +5 |21 +4 |18 +3 |17 +2 |19 +1 |15 +null |10 +; + +aggAndOrderByCastedFunctionValue +SELECT ROUND(SQRT(CAST(EXP(languages) AS SMALLINT)), 2), COUNT(*) FROM test_emp GROUP BY 1 ORDER BY 1 DESC; + +ROUND(SQRT(CAST(EXP(languages) AS SMALLINT)),2):d| COUNT(1):l +-------------------------------------------------+--------------- +12.17 |21 +7.42 |18 +4.47 |17 +2.65 |19 +1.73 |15 +null |10 ; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java index 5c874cc7667..d4265d123e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Cast.java @@ -7,14 +7,18 @@ package org.elasticsearch.xpack.sql.expression.function.scalar; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.type.DataTypes; +import java.util.Locale; import java.util.Objects; +import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; + public class Cast extends UnaryScalarFunction { private final DataType dataType; @@ -74,6 +78,18 @@ public class Cast extends UnaryScalarFunction { return new CastProcessor(DataTypeConversion.conversionFor(from(), to())); } + @Override + public ScriptTemplate asScript() { + ScriptTemplate fieldAsScript = asScript(field()); + return new ScriptTemplate( + formatTemplate(String.format(Locale.ROOT, "{sql}.cast(%s,{})", fieldAsScript.template())), + paramsBuilder() + .script(fieldAsScript.params()) + .variable(dataType.name()) + .build(), + dataType()); + } + @Override public int hashCode() { return Objects.hash(super.hashCode(), dataType); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index b107598710c..cdc773a91af 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.Bina import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InProcessor; import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor.RegexOperation; import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.util.DateUtils; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -458,4 +459,11 @@ public final class InternalSqlScriptUtils { public static String ucase(String s) { return (String) StringOperation.UCASE.apply(s); } + + // + // Casting + // + public static Object cast(Object value, String typeName) { + return DataTypeConversion.convert(value, DataType.fromTypeName(typeName)); + } } diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index b5b19004eee..4e9fc1475e3 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -130,4 +130,9 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalS String space(Number) String substring(String, Number, Number) String ucase(String) + +# +# Casting +# + def cast(Object, String) } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index 6a6a1e2dd8e..617a4634826 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -240,10 +240,10 @@ public class QueryFolderTests extends ESTestCase { assertEquals(EsQueryExec.class, p.getClass()); EsQueryExec ee = (EsQueryExec) p; assertThat(ee.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("{\"script\":{" + - "\"source\":\"InternalSqlScriptUtils.docValue(doc,params.v0)\",\"lang\":\"painless\"," + - "\"params\":{\"v0\":\"keyword\"}},\"missing_bucket\":true," + - "\"value_type\":\"ip\",\"order\":\"asc\"}}}]}}}")); + endsWith("{\"script\":{\"source\":\"InternalSqlScriptUtils.cast(" + + "InternalSqlScriptUtils.docValue(doc,params.v0),params.v1)\"," + + "\"lang\":\"painless\",\"params\":{\"v0\":\"keyword\",\"v1\":\"IP\"}}," + + "\"missing_bucket\":true,\"value_type\":\"ip\",\"order\":\"asc\"}}}]}}}")); assertEquals(2, ee.output().size()); assertThat(ee.output().get(0).toString(), startsWith("COUNT(1){a->")); assertThat(ee.output().get(1).toString(), startsWith("a{s->")); From 3f128a89bcac93c17b6b8edc59892d3f9404cddd Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Mon, 17 Dec 2018 13:40:20 +0200 Subject: [PATCH 02/26] SQL: Disable integration test due to TZ issues Due to the difference in TZ between H2 and SQL, disable minute test for NOW() Fix #36695 --- x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec index 16fe5511e4d..0f8a16b9e7b 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec @@ -125,5 +125,5 @@ SELECT MONTH(CURRENT_TIMESTAMP()) AS result; currentTimestampHour-Ignore SELECT HOUR(CURRENT_TIMESTAMP()) AS result; -currentTimestampMinute +currentTimestampMinute-Ignore SELECT MINUTE(CURRENT_TIMESTAMP()) AS result; From b376edf26982f7b7756102664086448cba93e176 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Mon, 17 Dec 2018 13:55:44 +0200 Subject: [PATCH 03/26] SQL: Move internals from TimeZone to ZoneId (#36651) As the internals have moved to java.time, the usage of TimeZone itself should be minimized as it creates issues when being converted to ZoneId Protocol wise the two are mostly identical so consumer should not see any difference. Note that terminology wise, inside the docs, the public API and inside the protocol timeZone will continue to be used as it's more widely understood as oppose to zoneId which is an implementation detail specific to the JVM Fix #36535 --- .../xpack/sql/jdbc/JdbcConfiguration.java | 12 +++--- .../sql/action/AbstractSqlQueryRequest.java | 28 ++++++------- .../xpack/sql/action/SqlQueryRequest.java | 8 ++-- .../sql/action/SqlQueryRequestBuilder.java | 12 +++--- .../xpack/sql/action/SqlTranslateRequest.java | 8 ++-- .../action/SqlTranslateRequestBuilder.java | 10 ++--- .../sql/action/SqlQueryRequestTests.java | 10 ++--- .../sql/action/SqlRequestParsersTests.java | 4 +- .../sql/action/SqlTranslateRequestTests.java | 6 +-- .../xpack/sql/client/HttpClient.java | 4 +- .../xpack/sql/proto/Protocol.java | 4 +- .../xpack/sql/proto/SqlQueryRequest.java | 39 +++++++++++-------- .../xpack/sql/execution/search/Querier.java | 2 +- .../extractor/CompositeKeyExtractor.java | 26 ++++++------- .../expression/function/FunctionRegistry.java | 10 ++--- .../function/grouping/Histogram.java | 20 +++++----- .../scalar/datetime/BaseDateTimeFunction.java | 23 +++++------ .../datetime/BaseDateTimeProcessor.java | 16 +++----- .../scalar/datetime/DateTimeFunction.java | 9 ++--- .../datetime/DateTimeHistogramFunction.java | 6 +-- .../scalar/datetime/DateTimeProcessor.java | 10 ++--- .../function/scalar/datetime/DayName.java | 10 ++--- .../function/scalar/datetime/DayOfMonth.java | 10 ++--- .../function/scalar/datetime/DayOfWeek.java | 10 ++--- .../function/scalar/datetime/DayOfYear.java | 10 ++--- .../function/scalar/datetime/HourOfDay.java | 10 ++--- .../scalar/datetime/IsoDayOfWeek.java | 10 ++--- .../scalar/datetime/IsoWeekOfYear.java | 10 ++--- .../function/scalar/datetime/MinuteOfDay.java | 10 ++--- .../scalar/datetime/MinuteOfHour.java | 10 ++--- .../function/scalar/datetime/MonthName.java | 10 ++--- .../function/scalar/datetime/MonthOfYear.java | 10 ++--- .../datetime/NamedDateTimeFunction.java | 10 ++--- .../datetime/NamedDateTimeProcessor.java | 9 ++--- .../datetime/NonIsoDateTimeFunction.java | 10 ++--- .../datetime/NonIsoDateTimeProcessor.java | 8 ++-- .../function/scalar/datetime/Quarter.java | 14 +++---- .../scalar/datetime/QuarterProcessor.java | 9 ++--- .../scalar/datetime/SecondOfMinute.java | 10 ++--- .../function/scalar/datetime/WeekOfYear.java | 10 ++--- .../function/scalar/datetime/Year.java | 10 ++--- .../xpack/sql/planner/QueryFolder.java | 18 ++++----- .../xpack/sql/planner/QueryTranslator.java | 6 +-- .../plugin/TransportSqlClearCursorAction.java | 2 +- .../sql/plugin/TransportSqlQueryAction.java | 2 +- .../plugin/TransportSqlTranslateAction.java | 2 +- .../querydsl/agg/GroupByDateHistogram.java | 26 ++++++------- .../sql/querydsl/container/GroupByRef.java | 12 +++--- .../xpack/sql/session/Configuration.java | 16 ++++---- .../xpack/sql/util/DateUtils.java | 6 +-- .../elasticsearch/xpack/sql/TestUtils.java | 2 +- .../analyzer/VerifierErrorMessagesTests.java | 4 +- .../extractor/CompositeKeyExtractorTests.java | 18 ++++----- .../function/FunctionRegistryTests.java | 14 +++---- .../scalar/DatabaseFunctionTests.java | 5 +-- .../function/scalar/UserFunctionTests.java | 5 +-- .../datetime/DateTimeProcessorTests.java | 3 +- .../scalar/datetime/DateTimeTestUtils.java | 2 +- .../scalar/datetime/DayOfYearTests.java | 16 ++++---- .../datetime/NamedDateTimeProcessorTests.java | 10 ++--- .../NonIsoDateTimeProcessorTests.java | 8 ++-- .../datetime/QuarterProcessorTests.java | 9 ++--- .../arithmetic/BinaryArithmeticTests.java | 12 +++--- .../xpack/sql/optimizer/OptimizerTests.java | 7 ++-- .../sql/type/DataTypeConversionTests.java | 2 +- 65 files changed, 329 insertions(+), 345 deletions(-) diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java index 472504dd5ad..43d296058f4 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/JdbcConfiguration.java @@ -11,6 +11,7 @@ import org.elasticsearch.xpack.sql.client.Version; import java.net.URI; import java.sql.DriverPropertyInfo; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -69,7 +70,7 @@ class JdbcConfiguration extends ConnectionConfiguration { private final String debugOut; // mutable ones - private TimeZone timeZone; + private ZoneId zoneId; public static JdbcConfiguration create(String u, Properties props, int loginTimeoutSeconds) throws JdbcSQLException { URI uri = parseUrl(u); @@ -148,7 +149,8 @@ class JdbcConfiguration extends ConnectionConfiguration { this.debug = parseValue(DEBUG, props.getProperty(DEBUG, DEBUG_DEFAULT), Boolean::parseBoolean); this.debugOut = props.getProperty(DEBUG_OUTPUT, DEBUG_OUTPUT_DEFAULT); - this.timeZone = parseValue(TIME_ZONE, props.getProperty(TIME_ZONE, TIME_ZONE_DEFAULT), TimeZone::getTimeZone); + this.zoneId = parseValue(TIME_ZONE, props.getProperty(TIME_ZONE, TIME_ZONE_DEFAULT), + s -> TimeZone.getTimeZone(s).toZoneId().normalized()); } @Override @@ -165,11 +167,11 @@ class JdbcConfiguration extends ConnectionConfiguration { } public TimeZone timeZone() { - return timeZone; + return zoneId != null ? TimeZone.getTimeZone(zoneId) : null; } public void timeZone(TimeZone timeZone) { - this.timeZone = timeZone; + this.zoneId = timeZone != null ? timeZone.toZoneId() : null; } public static boolean canAccept(String url) { @@ -186,4 +188,4 @@ class JdbcConfiguration extends ConnectionConfiguration { return info.toArray(new DriverPropertyInfo[info.size()]); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java index 2b90a7d41fa..aaa8c56323d 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/AbstractSqlQueryRequest.java @@ -21,10 +21,10 @@ import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.io.IOException; +import java.time.ZoneId; import java.util.Collections; import java.util.List; import java.util.Objects; -import java.util.TimeZone; import java.util.function.Supplier; /** @@ -33,7 +33,7 @@ import java.util.function.Supplier; public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest implements CompositeIndicesRequest, ToXContentFragment { private String query = ""; - private TimeZone timeZone = Protocol.TIME_ZONE; + private ZoneId zoneId = Protocol.TIME_ZONE; private int fetchSize = Protocol.FETCH_SIZE; private TimeValue requestTimeout = Protocol.REQUEST_TIMEOUT; private TimeValue pageTimeout = Protocol.PAGE_TIMEOUT; @@ -56,12 +56,12 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme super(); } - public AbstractSqlQueryRequest(String query, List params, QueryBuilder filter, TimeZone timeZone, + public AbstractSqlQueryRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { super(requestInfo); this.query = query; this.params = params; - this.timeZone = timeZone; + this.zoneId = zoneId; this.fetchSize = fetchSize; this.requestTimeout = requestTimeout; this.pageTimeout = pageTimeout; @@ -76,7 +76,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme parser.declareString((request, mode) -> request.mode(Mode.fromString(mode)), MODE); parser.declareString((request, clientId) -> request.clientId(clientId), CLIENT_ID); parser.declareObjectArray(AbstractSqlQueryRequest::params, (p, c) -> SqlTypedParamValue.fromXContent(p), PARAMS); - parser.declareString((request, zoneId) -> request.timeZone(TimeZone.getTimeZone(zoneId)), TIME_ZONE); + parser.declareString((request, zoneId) -> request.zoneId(ZoneId.of(zoneId)), TIME_ZONE); parser.declareInt(AbstractSqlQueryRequest::fetchSize, FETCH_SIZE); parser.declareString((request, timeout) -> request.requestTimeout(TimeValue.parseTimeValue(timeout, Protocol.REQUEST_TIMEOUT, "request_timeout")), REQUEST_TIMEOUT); @@ -121,15 +121,15 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme /** * The client's time zone */ - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } - public AbstractSqlQueryRequest timeZone(TimeZone timeZone) { - if (timeZone == null) { + public AbstractSqlQueryRequest zoneId(ZoneId zoneId) { + if (zoneId == null) { throw new IllegalArgumentException("time zone may not be null."); } - this.timeZone = timeZone; + this.zoneId = zoneId; return this; } @@ -194,7 +194,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme super(in); query = in.readString(); params = in.readList(AbstractSqlQueryRequest::readSqlTypedParamValue); - timeZone = TimeZone.getTimeZone(in.readString()); + zoneId = ZoneId.of(in.readString()); fetchSize = in.readVInt(); requestTimeout = in.readTimeValue(); pageTimeout = in.readTimeValue(); @@ -218,7 +218,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme for (SqlTypedParamValue param: params) { writeSqlTypedParamValue(out, param); } - out.writeString(timeZone.getID()); + out.writeString(zoneId.getId()); out.writeVInt(fetchSize); out.writeTimeValue(requestTimeout); out.writeTimeValue(pageTimeout); @@ -240,7 +240,7 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme return fetchSize == that.fetchSize && Objects.equals(query, that.query) && Objects.equals(params, that.params) && - Objects.equals(timeZone, that.timeZone) && + Objects.equals(zoneId, that.zoneId) && Objects.equals(requestTimeout, that.requestTimeout) && Objects.equals(pageTimeout, that.pageTimeout) && Objects.equals(filter, that.filter); @@ -248,6 +248,6 @@ public abstract class AbstractSqlQueryRequest extends AbstractSqlRequest impleme @Override public int hashCode() { - return Objects.hash(super.hashCode(), query, timeZone, fetchSize, requestTimeout, pageTimeout, filter); + return Objects.hash(super.hashCode(), query, zoneId, fetchSize, requestTimeout, pageTimeout, filter); } } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java index ec3e2b331f0..60c7b66352c 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequest.java @@ -18,9 +18,9 @@ import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import java.io.IOException; +import java.time.ZoneId; import java.util.List; import java.util.Objects; -import java.util.TimeZone; import static org.elasticsearch.action.ValidateActions.addValidationError; @@ -40,9 +40,9 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest { super(); } - public SqlQueryRequest(String query, List params, QueryBuilder filter, TimeZone timeZone, + public SqlQueryRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, String cursor, RequestInfo requestInfo) { - super(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, requestInfo); + super(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo); this.cursor = cursor; } @@ -104,7 +104,7 @@ public class SqlQueryRequest extends AbstractSqlQueryRequest { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { // This is needed just to test round-trip compatibility with proto.SqlQueryRequest - return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(query(), params(), timeZone(), fetchSize(), requestTimeout(), + return new org.elasticsearch.xpack.sql.proto.SqlQueryRequest(query(), params(), zoneId(), fetchSize(), requestTimeout(), pageTimeout(), filter(), cursor(), requestInfo()).toXContent(builder, params); } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java index e7a670afa72..5443f09c5eb 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlQueryRequestBuilder.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import java.time.ZoneId; import java.util.Collections; import java.util.List; -import java.util.TimeZone; /** * The builder to build sql request @@ -29,9 +29,9 @@ public class SqlQueryRequestBuilder extends ActionRequestBuilder params, - QueryBuilder filter, TimeZone timeZone, int fetchSize, TimeValue requestTimeout, + QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, String nextPageInfo, RequestInfo requestInfo) { - super(client, action, new SqlQueryRequest(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, nextPageInfo, + super(client, action, new SqlQueryRequest(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, nextPageInfo, requestInfo)); } @@ -60,8 +60,8 @@ public class SqlQueryRequestBuilder extends ActionRequestBuilder params, QueryBuilder filter, TimeZone timeZone, + public SqlTranslateRequest(String query, List params, QueryBuilder filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { - super(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, requestInfo); + super(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo); } public SqlTranslateRequest(StreamInput in) throws IOException { @@ -64,7 +64,7 @@ public class SqlTranslateRequest extends AbstractSqlQueryRequest { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { // This is needed just to test parsing of SqlTranslateRequest, so we can reuse SqlQuerySerialization - return new SqlQueryRequest(query(), params(), timeZone(), fetchSize(), requestTimeout(), + return new SqlQueryRequest(query(), params(), zoneId(), fetchSize(), requestTimeout(), pageTimeout(), filter(), null, requestInfo()).toXContent(builder, params); } diff --git a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java index 408f2400ef4..fa96b8f5e89 100644 --- a/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java +++ b/x-pack/plugin/sql/sql-action/src/main/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestBuilder.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.proto.RequestInfo; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import java.time.ZoneId; import java.util.Collections; import java.util.List; -import java.util.TimeZone; /** * Builder for the request for the sql action for translating SQL queries into ES requests @@ -28,10 +28,10 @@ public class SqlTranslateRequestBuilder extends ActionRequestBuilder params, TimeZone timeZone, int fetchSize, TimeValue requestTimeout, + List params, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { super(client, action, - new SqlTranslateRequest(query, params, filter, timeZone, fetchSize, requestTimeout, pageTimeout, requestInfo)); + new SqlTranslateRequest(query, params, filter, zoneId, fetchSize, requestTimeout, pageTimeout, requestInfo)); } public SqlTranslateRequestBuilder query(String query) { @@ -39,8 +39,8 @@ public class SqlTranslateRequestBuilder extends ActionRequestBuilder request.requestInfo(randomValueOtherThan(request.requestInfo(), this::randomRequestInfo)), request -> request.query(randomValueOtherThan(request.query(), () -> randomAlphaOfLength(5))), request -> request.params(randomValueOtherThan(request.params(), this::randomParameters)), - request -> request.timeZone(randomValueOtherThan(request.timeZone(), ESTestCase::randomTimeZone)), + request -> request.zoneId(randomValueOtherThan(request.zoneId(), ESTestCase::randomZone)), request -> request.fetchSize(randomValueOtherThan(request.fetchSize(), () -> between(1, Integer.MAX_VALUE))), request -> request.requestTimeout(randomValueOtherThan(request.requestTimeout(), this::randomTV)), request -> request.filter(randomValueOtherThan(request.filter(), @@ -112,7 +112,7 @@ public class SqlQueryRequestTests extends AbstractSerializingTestCase request.cursor(randomValueOtherThan(request.cursor(), SqlQueryResponseTests::randomStringCursor)) ); SqlQueryRequest newRequest = new SqlQueryRequest(instance.query(), instance.params(), instance.filter(), - instance.timeZone(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.cursor(), + instance.zoneId(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.cursor(), instance.requestInfo()); mutator.accept(newRequest); return newRequest; @@ -120,7 +120,7 @@ public class SqlQueryRequestTests extends AbstractSerializingTestCase sqlQueryRequest.timeZone(null)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> sqlQueryRequest.zoneId(null)); assertEquals("time zone may not be null.", e.getMessage()); } } diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java index f2153065cbd..4e41dddb46c 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlRequestParsersTests.java @@ -114,10 +114,10 @@ public class SqlRequestParsersTests extends ESTestCase { assertEquals("whatever", request.cursor()); assertEquals("select", request.query()); - List list = new ArrayList(1); + List list = new ArrayList<>(1); list.add(new SqlTypedParamValue("whatever", 123)); assertEquals(list, request.params()); - assertEquals("UTC", request.timeZone().getID()); + assertEquals("UTC", request.zoneId().getId()); assertEquals(TimeValue.parseTimeValue("5s", "request_timeout"), request.requestTimeout()); assertEquals(TimeValue.parseTimeValue("10s", "page_timeout"), request.pageTimeout()); } diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java index 3d48f7fc7a4..4b047914067 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTranslateRequestTests.java @@ -37,7 +37,7 @@ public class SqlTranslateRequestTests extends AbstractSerializingTestCase mutator = randomFrom( request -> request.query(randomValueOtherThan(request.query(), () -> randomAlphaOfLength(5))), - request -> request.timeZone(randomValueOtherThan(request.timeZone(), ESTestCase::randomTimeZone)), + request -> request.zoneId(randomValueOtherThan(request.zoneId(), ESTestCase::randomZone)), request -> request.fetchSize(randomValueOtherThan(request.fetchSize(), () -> between(1, Integer.MAX_VALUE))), request -> request.requestTimeout(randomValueOtherThan(request.requestTimeout(), this::randomTV)), request -> request.filter(randomValueOtherThan(request.filter(), () -> request.filter() == null ? randomFilter(random()) : randomFilterOrNull(random()))) ); SqlTranslateRequest newRequest = new SqlTranslateRequest(instance.query(), instance.params(), instance.filter(), - instance.timeZone(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.requestInfo()); + instance.zoneId(), instance.fetchSize(), instance.requestTimeout(), instance.pageTimeout(), instance.requestInfo()); mutator.accept(newRequest); return newRequest; } diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 096ebb64e52..4fe6a39820b 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -32,8 +32,8 @@ import java.io.InputStream; import java.security.AccessController; import java.security.PrivilegedAction; import java.sql.SQLException; +import java.time.ZoneId; import java.util.Collections; -import java.util.TimeZone; import java.util.function.Function; import static org.elasticsearch.xpack.sql.proto.RequestInfo.CLI; @@ -66,7 +66,7 @@ public class HttpClient { public SqlQueryResponse queryInit(String query, int fetchSize) throws SQLException { // TODO allow customizing the time zone - this is what session set/reset/get should be about // method called only from CLI. "client_id" is set to "cli" - SqlQueryRequest sqlRequest = new SqlQueryRequest(query, Collections.emptyList(), null, TimeZone.getTimeZone("UTC"), + SqlQueryRequest sqlRequest = new SqlQueryRequest(query, Collections.emptyList(), null, ZoneId.of("Z"), fetchSize, TimeValue.timeValueMillis(cfg.queryTimeout()), TimeValue.timeValueMillis(cfg.pageTimeout()), new RequestInfo(Mode.PLAIN, CLI)); return query(sqlRequest); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java index 8080959e3c6..a6af79e0fba 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/Protocol.java @@ -8,13 +8,13 @@ package org.elasticsearch.xpack.sql.proto; import org.elasticsearch.common.unit.TimeValue; -import java.util.TimeZone; +import java.time.ZoneId; /** * Sql protocol defaults and end-points shared between JDBC and REST protocol implementations */ public final class Protocol { - public static final TimeZone TIME_ZONE = TimeZone.getTimeZone("UTC"); + public static final ZoneId TIME_ZONE = ZoneId.of("Z"); /** * Global choice for the default fetch size. diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java index 651dc468bb9..34b19faef78 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlQueryRequest.java @@ -12,10 +12,10 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; +import java.time.ZoneId; import java.util.Collections; import java.util.List; import java.util.Objects; -import java.util.TimeZone; /** * Sql query request for JDBC/CLI client @@ -24,7 +24,7 @@ public class SqlQueryRequest extends AbstractSqlRequest { @Nullable private final String cursor; private final String query; - private final TimeZone timeZone; + private final ZoneId zoneId; private final int fetchSize; private final TimeValue requestTimeout; private final TimeValue pageTimeout; @@ -33,12 +33,12 @@ public class SqlQueryRequest extends AbstractSqlRequest { private final List params; - public SqlQueryRequest(String query, List params, TimeZone timeZone, int fetchSize, + public SqlQueryRequest(String query, List params, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, ToXContent filter, String cursor, RequestInfo requestInfo) { super(requestInfo); this.query = query; this.params = params; - this.timeZone = timeZone; + this.zoneId = zoneId; this.fetchSize = fetchSize; this.requestTimeout = requestTimeout; this.pageTimeout = pageTimeout; @@ -46,9 +46,9 @@ public class SqlQueryRequest extends AbstractSqlRequest { this.cursor = cursor; } - public SqlQueryRequest(String query, List params, ToXContent filter, TimeZone timeZone, + public SqlQueryRequest(String query, List params, ToXContent filter, ZoneId zoneId, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { - this(query, params, timeZone, fetchSize, requestTimeout, pageTimeout, filter, null, requestInfo); + this(query, params, zoneId, fetchSize, requestTimeout, pageTimeout, filter, null, requestInfo); } public SqlQueryRequest(String cursor, TimeValue requestTimeout, TimeValue pageTimeout, RequestInfo requestInfo) { @@ -81,8 +81,8 @@ public class SqlQueryRequest extends AbstractSqlRequest { /** * The client's time zone */ - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @@ -116,14 +116,20 @@ public class SqlQueryRequest extends AbstractSqlRequest { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - if (!super.equals(o)) return false; + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + if (!super.equals(o)) { + return false; + } SqlQueryRequest that = (SqlQueryRequest) o; return fetchSize == that.fetchSize && Objects.equals(query, that.query) && Objects.equals(params, that.params) && - Objects.equals(timeZone, that.timeZone) && + Objects.equals(zoneId, that.zoneId) && Objects.equals(requestTimeout, that.requestTimeout) && Objects.equals(pageTimeout, that.pageTimeout) && Objects.equals(filter, that.filter) && @@ -132,7 +138,7 @@ public class SqlQueryRequest extends AbstractSqlRequest { @Override public int hashCode() { - return Objects.hash(super.hashCode(), query, timeZone, fetchSize, requestTimeout, pageTimeout, filter, cursor); + return Objects.hash(super.hashCode(), query, zoneId, fetchSize, requestTimeout, pageTimeout, filter, cursor); } @Override @@ -151,8 +157,8 @@ public class SqlQueryRequest extends AbstractSqlRequest { } builder.endArray(); } - if (timeZone != null) { - builder.field("time_zone", timeZone.getID()); + if (zoneId != null) { + builder.field("time_zone", zoneId.getId()); } if (fetchSize != Protocol.FETCH_SIZE) { builder.field("fetch_size", fetchSize); @@ -172,5 +178,4 @@ public class SqlQueryRequest extends AbstractSqlRequest { } return builder; } - -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 8a0163df0bb..14d7fa57fff 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -263,7 +263,7 @@ public class Querier { private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor totalCount) { if (ref instanceof GroupByRef) { GroupByRef r = (GroupByRef) ref; - return new CompositeKeyExtractor(r.key(), r.property(), r.timeZone()); + return new CompositeKeyExtractor(r.key(), r.property(), r.zoneId()); } if (ref instanceof MetricAggRef) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java index c799ab27dca..0c374038953 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractor.java @@ -16,7 +16,6 @@ import java.io.IOException; import java.time.ZoneId; import java.util.Map; import java.util.Objects; -import java.util.TimeZone; public class CompositeKeyExtractor implements BucketExtractor { @@ -27,40 +26,37 @@ public class CompositeKeyExtractor implements BucketExtractor { private final String key; private final Property property; - private final TimeZone timeZone; private final ZoneId zoneId; /** * Constructs a new CompositeKeyExtractor instance. * The time-zone parameter is used to indicate a date key. */ - public CompositeKeyExtractor(String key, Property property, TimeZone timeZone) { + public CompositeKeyExtractor(String key, Property property, ZoneId zoneId) { this.key = key; this.property = property; - this.timeZone = timeZone; - this.zoneId = timeZone != null ? timeZone.toZoneId() : null; + this.zoneId = zoneId; } CompositeKeyExtractor(StreamInput in) throws IOException { key = in.readString(); property = in.readEnum(Property.class); if (in.readBoolean()) { - timeZone = TimeZone.getTimeZone(in.readString()); + zoneId = ZoneId.of(in.readString()); } else { - timeZone = null; + zoneId = null; } - this.zoneId = timeZone != null ? timeZone.toZoneId() : null; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(key); out.writeEnum(property); - if (timeZone == null) { + if (zoneId == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - out.writeString(timeZone.getID()); + out.writeString(zoneId.getId()); } } @@ -72,8 +68,8 @@ public class CompositeKeyExtractor implements BucketExtractor { return property; } - TimeZone timeZone() { - return timeZone; + ZoneId zoneId() { + return zoneId; } @Override @@ -95,7 +91,7 @@ public class CompositeKeyExtractor implements BucketExtractor { Object object = ((Map) m).get(key); - if (timeZone != null) { + if (zoneId != null) { if (object == null) { return object; } else if (object instanceof Long) { @@ -110,7 +106,7 @@ public class CompositeKeyExtractor implements BucketExtractor { @Override public int hashCode() { - return Objects.hash(key, property, timeZone); + return Objects.hash(key, property, zoneId); } @Override @@ -126,7 +122,7 @@ public class CompositeKeyExtractor implements BucketExtractor { CompositeKeyExtractor other = (CompositeKeyExtractor) obj; return Objects.equals(key, other.key) && Objects.equals(property, other.property) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(zoneId, other.zoneId); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 00581ffd84e..d6faf167322 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -100,6 +100,7 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; +import java.time.ZoneId; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -108,7 +109,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; -import java.util.TimeZone; import java.util.function.BiFunction; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -441,13 +441,13 @@ public class FunctionRegistry { if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), cfg.timeZone()); + return ctorRef.build(location, children.get(0), cfg.zoneId()); }; return def(function, builder, true, names); } interface DatetimeUnaryFunctionBuilder { - T build(Location location, Expression target, TimeZone tz); + T build(Location location, Expression target, ZoneId zi); } /** @@ -463,13 +463,13 @@ public class FunctionRegistry { if (distinct) { throw new IllegalArgumentException("does not support DISTINCT yet it was specified"); } - return ctorRef.build(location, children.get(0), children.get(1), cfg.timeZone()); + return ctorRef.build(location, children.get(0), children.get(1), cfg.zoneId()); }; return def(function, builder, false, names); } interface DatetimeBinaryFunctionBuilder { - T build(Location location, Expression lhs, Expression rhs, TimeZone tz); + T build(Location location, Expression lhs, Expression rhs, ZoneId zi); } /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java index 200682d980a..4c1b761b1a0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/grouping/Histogram.java @@ -15,26 +15,26 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; +import java.time.ZoneId; import java.util.Objects; -import java.util.TimeZone; public class Histogram extends GroupingFunction { private final Literal interval; - private final TimeZone timeZone; + private final ZoneId zoneId; - public Histogram(Location location, Expression field, Expression interval, TimeZone timeZone) { + public Histogram(Location location, Expression field, Expression interval, ZoneId zoneId) { super(location, field); this.interval = (Literal) interval; - this.timeZone = timeZone; + this.zoneId = zoneId; } public Literal interval() { return interval; } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @Override @@ -54,7 +54,7 @@ public class Histogram extends GroupingFunction { @Override protected GroupingFunction replaceChild(Expression newChild) { - return new Histogram(location(), newChild, interval, timeZone); + return new Histogram(location(), newChild, interval, zoneId); } @Override @@ -64,12 +64,12 @@ public class Histogram extends GroupingFunction { @Override protected NodeInfo info() { - return NodeInfo.create(this, Histogram::new, field(), interval, timeZone); + return NodeInfo.create(this, Histogram::new, field(), interval, zoneId); } @Override public int hashCode() { - return Objects.hash(field(), interval, timeZone); + return Objects.hash(field(), interval, zoneId); } @Override @@ -77,7 +77,7 @@ public class Histogram extends GroupingFunction { if (super.equals(obj)) { Histogram other = (Histogram) obj; return Objects.equals(interval, other.interval) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(zoneId, other.zoneId); } return false; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java index cfee964b01e..1ac143c2a02 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java @@ -16,40 +16,37 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Objects; -import java.util.TimeZone; abstract class BaseDateTimeFunction extends UnaryScalarFunction { - private final TimeZone timeZone; private final ZoneId zoneId; private final String name; - BaseDateTimeFunction(Location location, Expression field, TimeZone timeZone) { + BaseDateTimeFunction(Location location, Expression field, ZoneId zoneId) { super(location, field); - this.timeZone = timeZone; - this.zoneId = timeZone != null ? timeZone.toZoneId() : null; + this.zoneId = zoneId; StringBuilder sb = new StringBuilder(super.name()); // add timezone as last argument - sb.insert(sb.length() - 1, " [" + timeZone.getID() + "]"); + sb.insert(sb.length() - 1, " [" + zoneId.getId() + "]"); this.name = sb.toString(); } @Override protected final NodeInfo info() { - return NodeInfo.create(this, ctorForInfo(), field(), timeZone()); + return NodeInfo.create(this, ctorForInfo(), field(), zoneId()); } - protected abstract NodeInfo.NodeCtor2 ctorForInfo(); + protected abstract NodeInfo.NodeCtor2 ctorForInfo(); @Override protected TypeResolution resolveType() { return Expressions.typeMustBeDate(field(), functionName(), ParamOrdinal.DEFAULT); } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @Override @@ -82,11 +79,11 @@ abstract class BaseDateTimeFunction extends UnaryScalarFunction { } BaseDateTimeFunction other = (BaseDateTimeFunction) obj; return Objects.equals(other.field(), field()) - && Objects.equals(other.timeZone(), timeZone()); + && Objects.equals(other.zoneId(), zoneId()); } @Override public int hashCode() { - return Objects.hash(field(), timeZone()); + return Objects.hash(field(), zoneId()); } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java index ce6bd1ad470..608057cf235 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java @@ -14,30 +14,26 @@ import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import java.io.IOException; import java.time.ZoneId; import java.time.ZonedDateTime; -import java.util.TimeZone; public abstract class BaseDateTimeProcessor implements Processor { - private final TimeZone timeZone; private final ZoneId zoneId; - BaseDateTimeProcessor(TimeZone timeZone) { - this.timeZone = timeZone; - this.zoneId = timeZone.toZoneId(); + BaseDateTimeProcessor(ZoneId zoneId) { + this.zoneId = zoneId; } BaseDateTimeProcessor(StreamInput in) throws IOException { - timeZone = TimeZone.getTimeZone(in.readString()); - zoneId = timeZone.toZoneId(); + zoneId = ZoneId.of(in.readString()); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeString(timeZone.getID()); + out.writeString(zoneId.getId()); } - TimeZone timeZone() { - return timeZone; + ZoneId zoneId() { + return zoneId; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java index 0b429fdf1a1..1ad00c8785f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.sql.type.DataType; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; -import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -24,8 +23,8 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { private final DateTimeExtractor extractor; - DateTimeFunction(Location location, Expression field, TimeZone timeZone, DateTimeExtractor extractor) { - super(location, field, timeZone); + DateTimeFunction(Location location, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(location, field, zoneId); this.extractor = extractor; } @@ -50,7 +49,7 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { ScriptTemplate script = super.asScript(); String template = formatTemplate("{sql}.dateTimeChrono(" + script.template() + ", {}, {})"); params.script(script.params()) - .variable(timeZone().getID()) + .variable(zoneId().getId()) .variable(extractor.chronoField().name()); return new ScriptTemplate(template, params.build(), dataType()); @@ -59,7 +58,7 @@ public abstract class DateTimeFunction extends BaseDateTimeFunction { @Override protected Processor makeProcessor() { - return new DateTimeProcessor(extractor, timeZone()); + return new DateTimeProcessor(extractor, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java index 1a60ba66f48..0a59c4d52ea 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeHistogramFunction.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; import org.elasticsearch.xpack.sql.tree.Location; -import java.util.TimeZone; +import java.time.ZoneId; /** * DateTimeFunctions that can be mapped as histogram. This means the dates order is maintained @@ -17,8 +17,8 @@ import java.util.TimeZone; */ public abstract class DateTimeHistogramFunction extends DateTimeFunction { - DateTimeHistogramFunction(Location location, Expression field, TimeZone timeZone, DateTimeExtractor extractor) { - super(location, field, timeZone, extractor); + DateTimeHistogramFunction(Location location, Expression field, ZoneId zoneId, DateTimeExtractor extractor) { + super(location, field, zoneId, extractor); } /** diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java index c248b50b51d..5357462fdd6 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java @@ -9,10 +9,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.temporal.ChronoField; import java.util.Objects; -import java.util.TimeZone; public class DateTimeProcessor extends BaseDateTimeProcessor { @@ -46,8 +46,8 @@ public class DateTimeProcessor extends BaseDateTimeProcessor { public static final String NAME = "dt"; private final DateTimeExtractor extractor; - public DateTimeProcessor(DateTimeExtractor extractor, TimeZone timeZone) { - super(timeZone); + public DateTimeProcessor(DateTimeExtractor extractor, ZoneId zoneId) { + super(zoneId); this.extractor = extractor; } @@ -78,7 +78,7 @@ public class DateTimeProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(extractor, timeZone()); + return Objects.hash(extractor, zoneId()); } @Override @@ -88,7 +88,7 @@ public class DateTimeProcessor extends BaseDateTimeProcessor { } DateTimeProcessor other = (DateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone(), other.timeZone()); + && Objects.equals(zoneId(), other.zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java index 8d6e12544d0..b5144020e63 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDate import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the week from a datetime in text format (Monday, Tuesday etc.) */ public class DayName extends NamedDateTimeFunction { - public DayName(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NameExtractor.DAY_NAME); + public DayName(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NameExtractor.DAY_NAME); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayName::new; } @Override protected DayName replaceChild(Expression newChild) { - return new DayName(location(), newChild, timeZone()); + return new DayName(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java index 3c402ef2f4a..837779888f2 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the month from a datetime. */ public class DayOfMonth extends DateTimeFunction { - public DayOfMonth(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.DAY_OF_MONTH); + public DayOfMonth(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.DAY_OF_MONTH); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfMonth::new; } @Override protected DayOfMonth replaceChild(Expression newChild) { - return new DayOfMonth(location(), newChild, timeZone()); + return new DayOfMonth(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java index 9b03ed0548a..5bc54654bdf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDat import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the week from a datetime in non-ISO format. 1 is Sunday, 2 is Monday, etc. */ public class DayOfWeek extends NonIsoDateTimeFunction { - public DayOfWeek(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NonIsoDateTimeExtractor.DAY_OF_WEEK); + public DayOfWeek(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NonIsoDateTimeExtractor.DAY_OF_WEEK); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfWeek::new; } @Override protected DayOfWeek replaceChild(Expression newChild) { - return new DayOfWeek(location(), newChild, timeZone()); + return new DayOfWeek(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java index a6b843bd0bd..9cacb78b342 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java @@ -11,24 +11,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the year from a datetime. */ public class DayOfYear extends DateTimeFunction { - public DayOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.DAY_OF_YEAR); + public DayOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.DAY_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfYear::new; } @Override protected UnaryScalarFunction replaceChild(Expression newChild) { - return new DayOfYear(location(), newChild, timeZone()); + return new DayOfYear(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java index 193a14c0932..490ec721042 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the hour of the day from a datetime. */ public class HourOfDay extends DateTimeFunction { - public HourOfDay(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.HOUR_OF_DAY); + public HourOfDay(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.HOUR_OF_DAY); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return HourOfDay::new; } @Override protected HourOfDay replaceChild(Expression newChild) { - return new HourOfDay(location(), newChild, timeZone()); + return new HourOfDay(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java index 16a3a0098ae..ff02f6490d0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoDayOfWeek.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the day of the week (following the ISO standard) from a datetime. 1 is Monday, 2 is Tuesday, etc. */ public class IsoDayOfWeek extends DateTimeFunction { - public IsoDayOfWeek(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.ISO_DAY_OF_WEEK); + public IsoDayOfWeek(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.ISO_DAY_OF_WEEK); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return IsoDayOfWeek::new; } @Override protected IsoDayOfWeek replaceChild(Expression newChild) { - return new IsoDayOfWeek(location(), newChild, timeZone()); + return new IsoDayOfWeek(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java index 5e540e5b846..f50deec9fe0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/IsoWeekOfYear.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the week of the year from a datetime following the ISO standard. */ public class IsoWeekOfYear extends DateTimeFunction { - public IsoWeekOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.ISO_WEEK_OF_YEAR); + public IsoWeekOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.ISO_WEEK_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return IsoWeekOfYear::new; } @Override protected IsoWeekOfYear replaceChild(Expression newChild) { - return new IsoWeekOfYear(location(), newChild, timeZone()); + return new IsoWeekOfYear(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java index 25ef41a18ca..e16e0caa836 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java @@ -10,25 +10,25 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the minute of the day from a datetime. */ public class MinuteOfDay extends DateTimeFunction { - public MinuteOfDay(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.MINUTE_OF_DAY); + public MinuteOfDay(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.MINUTE_OF_DAY); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MinuteOfDay::new; } @Override protected MinuteOfDay replaceChild(Expression newChild) { - return new MinuteOfDay(location(), newChild, timeZone()); + return new MinuteOfDay(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java index 798b7007237..0a49bb042f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Exract the minute of the hour from a datetime. */ public class MinuteOfHour extends DateTimeFunction { - public MinuteOfHour(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.MINUTE_OF_HOUR); + public MinuteOfHour(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.MINUTE_OF_HOUR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MinuteOfHour::new; } @Override protected MinuteOfHour replaceChild(Expression newChild) { - return new MinuteOfHour(location(), newChild, timeZone()); + return new MinuteOfHour(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java index 7a951281015..570a4a2ea2d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDate import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the month from a datetime in text format (January, February etc.) */ public class MonthName extends NamedDateTimeFunction { - public MonthName(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NameExtractor.MONTH_NAME); + public MonthName(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NameExtractor.MONTH_NAME); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MonthName::new; } @Override protected MonthName replaceChild(Expression newChild) { - return new MonthName(location(), newChild, timeZone()); + return new MonthName(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java index 9231987b5ad..88c025a7231 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the month of the year from a datetime. */ public class MonthOfYear extends DateTimeFunction { - public MonthOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.MONTH_OF_YEAR); + public MonthOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.MONTH_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MonthOfYear::new; } @Override protected MonthOfYear replaceChild(Expression newChild) { - return new MonthOfYear(location(), newChild, timeZone()); + return new MonthOfYear(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java index 4ec42def0eb..d42c18ce88c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Locale; -import java.util.TimeZone; import static java.lang.String.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -28,8 +28,8 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { private final NameExtractor nameExtractor; - NamedDateTimeFunction(Location location, Expression field, TimeZone timeZone, NameExtractor nameExtractor) { - super(location, field, timeZone); + NamedDateTimeFunction(Location location, Expression field, ZoneId zoneId, NameExtractor nameExtractor) { + super(location, field, zoneId); this.nameExtractor = nameExtractor; } @@ -45,13 +45,13 @@ abstract class NamedDateTimeFunction extends BaseDateTimeFunction { StringUtils.underscoreToLowerCamelCase(nameExtractor.name()))), paramsBuilder() .variable(field.name()) - .variable(timeZone().getID()).build(), + .variable(zoneId().getId()).build(), dataType()); } @Override protected Processor makeProcessor() { - return new NamedDateTimeProcessor(nameExtractor, timeZone()); + return new NamedDateTimeProcessor(nameExtractor, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java index a0707d2a65e..7a23b40be78 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java @@ -14,7 +14,6 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Locale; import java.util.Objects; -import java.util.TimeZone; import java.util.function.Function; public class NamedDateTimeProcessor extends BaseDateTimeProcessor { @@ -46,8 +45,8 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { private final NameExtractor extractor; - public NamedDateTimeProcessor(NameExtractor extractor, TimeZone timeZone) { - super(timeZone); + public NamedDateTimeProcessor(NameExtractor extractor, ZoneId zoneId) { + super(zoneId); this.extractor = extractor; } @@ -78,7 +77,7 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(extractor, timeZone()); + return Objects.hash(extractor, zoneId()); } @Override @@ -88,7 +87,7 @@ public class NamedDateTimeProcessor extends BaseDateTimeProcessor { } NamedDateTimeProcessor other = (NamedDateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone(), other.timeZone()); + && Objects.equals(zoneId(), other.zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java index b6d28f16a57..82af7380d53 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeFunction.java @@ -14,9 +14,9 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; +import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Locale; -import java.util.TimeZone; import static java.lang.String.format; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; @@ -28,8 +28,8 @@ abstract class NonIsoDateTimeFunction extends BaseDateTimeFunction { private final NonIsoDateTimeExtractor extractor; - NonIsoDateTimeFunction(Location location, Expression field, TimeZone timeZone, NonIsoDateTimeExtractor extractor) { - super(location, field, timeZone); + NonIsoDateTimeFunction(Location location, Expression field, ZoneId zoneId, NonIsoDateTimeExtractor extractor) { + super(location, field, zoneId); this.extractor = extractor; } @@ -45,13 +45,13 @@ abstract class NonIsoDateTimeFunction extends BaseDateTimeFunction { StringUtils.underscoreToLowerCamelCase(extractor.name()))), paramsBuilder() .variable(field.name()) - .variable(timeZone().getID()).build(), + .variable(zoneId().getId()).build(), dataType()); } @Override protected Processor makeProcessor() { - return new NonIsoDateTimeProcessor(extractor, timeZone()); + return new NonIsoDateTimeProcessor(extractor, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java index e6d4d452169..714c7c86927 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessor.java @@ -60,8 +60,8 @@ public class NonIsoDateTimeProcessor extends BaseDateTimeProcessor { private final NonIsoDateTimeExtractor extractor; - public NonIsoDateTimeProcessor(NonIsoDateTimeExtractor extractor, TimeZone timeZone) { - super(timeZone); + public NonIsoDateTimeProcessor(NonIsoDateTimeExtractor extractor, ZoneId zoneId) { + super(zoneId); this.extractor = extractor; } @@ -92,7 +92,7 @@ public class NonIsoDateTimeProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(extractor, timeZone()); + return Objects.hash(extractor, zoneId()); } @Override @@ -102,7 +102,7 @@ public class NonIsoDateTimeProcessor extends BaseDateTimeProcessor { } NonIsoDateTimeProcessor other = (NonIsoDateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone(), other.timeZone()); + && Objects.equals(zoneId(), other.zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java index 4da5c94626e..63455c76ba0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java @@ -14,16 +14,16 @@ import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; import org.elasticsearch.xpack.sql.type.DataType; +import java.time.ZoneId; import java.time.ZonedDateTime; -import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor.quarter; import static org.elasticsearch.xpack.sql.expression.gen.script.ParamsBuilder.paramsBuilder; public class Quarter extends BaseDateTimeFunction { - public Quarter(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone); + public Quarter(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId); } @Override @@ -36,24 +36,24 @@ public class Quarter extends BaseDateTimeFunction { return new ScriptTemplate(formatTemplate("{sql}.quarter(doc[{}].value, {})"), paramsBuilder() .variable(field.name()) - .variable(timeZone().getID()) + .variable(zoneId().getId()) .build(), dataType()); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return Quarter::new; } @Override protected Quarter replaceChild(Expression newChild) { - return new Quarter(location(), newChild, timeZone()); + return new Quarter(location(), newChild, zoneId()); } @Override protected Processor makeProcessor() { - return new QuarterProcessor(timeZone()); + return new QuarterProcessor(zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java index d2a20de84d3..7d09093d35f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java @@ -14,12 +14,11 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.Locale; import java.util.Objects; -import java.util.TimeZone; public class QuarterProcessor extends BaseDateTimeProcessor { - public QuarterProcessor(TimeZone timeZone) { - super(timeZone); + public QuarterProcessor(ZoneId zoneId) { + super(zoneId); } public QuarterProcessor(StreamInput in) throws IOException { @@ -49,7 +48,7 @@ public class QuarterProcessor extends BaseDateTimeProcessor { @Override public int hashCode() { - return Objects.hash(timeZone()); + return Objects.hash(zoneId()); } @Override @@ -58,6 +57,6 @@ public class QuarterProcessor extends BaseDateTimeProcessor { return false; } DateTimeProcessor other = (DateTimeProcessor) obj; - return Objects.equals(timeZone(), other.timeZone()); + return Objects.equals(zoneId(), other.zoneId()); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java index 3702c4beb3f..c06d48ba287 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the second of the minute from a datetime. */ public class SecondOfMinute extends DateTimeFunction { - public SecondOfMinute(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.SECOND_OF_MINUTE); + public SecondOfMinute(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.SECOND_OF_MINUTE); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return SecondOfMinute::new; } @Override protected SecondOfMinute replaceChild(Expression newChild) { - return new SecondOfMinute(location(), newChild, timeZone()); + return new SecondOfMinute(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java index 1d64eec447d..a3d8a128fbc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java @@ -10,24 +10,24 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDat import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; /** * Extract the week of the year from a datetime following the non-ISO standard. */ public class WeekOfYear extends NonIsoDateTimeFunction { - public WeekOfYear(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, NonIsoDateTimeExtractor.WEEK_OF_YEAR); + public WeekOfYear(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, NonIsoDateTimeExtractor.WEEK_OF_YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return WeekOfYear::new; } @Override protected WeekOfYear replaceChild(Expression newChild) { - return new WeekOfYear(location(), newChild, timeZone()); + return new WeekOfYear(location(), newChild, zoneId()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java index 0ba4c47058d..0f78cf4d78a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java @@ -10,7 +10,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeP import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; -import java.util.TimeZone; +import java.time.ZoneId; import java.util.concurrent.TimeUnit; /** @@ -20,18 +20,18 @@ public class Year extends DateTimeHistogramFunction { private static long YEAR_IN_MILLIS = TimeUnit.DAYS.toMillis(1) * 365L; - public Year(Location location, Expression field, TimeZone timeZone) { - super(location, field, timeZone, DateTimeExtractor.YEAR); + public Year(Location location, Expression field, ZoneId zoneId) { + super(location, field, zoneId, DateTimeExtractor.YEAR); } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return Year::new; } @Override protected Year replaceChild(Expression newChild) { - return new Year(location(), newChild, timeZone()); + return new Year(location(), newChild, zoneId()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java index 35ba50ab75a..20aad3f2f9a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryFolder.java @@ -62,11 +62,12 @@ import org.elasticsearch.xpack.sql.rule.RuleExecutor; import org.elasticsearch.xpack.sql.session.EmptyExecutable; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.Check; +import org.elasticsearch.xpack.sql.util.DateUtils; +import java.time.ZoneId; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; -import java.util.TimeZone; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.sql.planner.QueryTranslator.and; @@ -77,7 +78,6 @@ import static org.elasticsearch.xpack.sql.planner.QueryTranslator.toQuery; * Folds the PhysicalPlan into a {@link Query}. */ class QueryFolder extends RuleExecutor { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); PhysicalPlan fold(PhysicalPlan plan) { return execute(plan); @@ -283,7 +283,7 @@ class QueryFolder extends RuleExecutor { if (matchingGroup != null) { if (exp instanceof Attribute || exp instanceof ScalarFunction) { Processor action = null; - TimeZone tz = DataType.DATE == exp.dataType() ? UTC : null; + ZoneId zi = DataType.DATE == exp.dataType() ? DateUtils.UTC : null; /* * special handling of dates since aggs return the typed Date object which needs * extraction instead of handling this in the scroller, the folder handles this @@ -291,9 +291,9 @@ class QueryFolder extends RuleExecutor { */ if (exp instanceof DateTimeHistogramFunction) { action = ((UnaryPipe) p).action(); - tz = ((DateTimeFunction) exp).timeZone(); + zi = ((DateTimeFunction) exp).zoneId(); } - return new AggPathInput(exp.location(), exp, new GroupByRef(matchingGroup.id(), null, tz), action); + return new AggPathInput(exp.location(), exp, new GroupByRef(matchingGroup.id(), null, zi), action); } } // or found an aggregate expression (which has to work on an attribute used for grouping) @@ -334,8 +334,8 @@ class QueryFolder extends RuleExecutor { // check if the field is a date - if so mark it as such to interpret the long as a date // UTC is used since that's what the server uses and there's no conversion applied // (like for date histograms) - TimeZone dt = DataType.DATE == child.dataType() ? UTC : null; - queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, dt)); + ZoneId zi = DataType.DATE == child.dataType() ? DateUtils.UTC : null; + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } // handle histogram else if (child instanceof GroupingFunction) { @@ -358,8 +358,8 @@ class QueryFolder extends RuleExecutor { matchingGroup = groupingContext.groupFor(ne); Check.notNull(matchingGroup, "Cannot find group [{}]", Expressions.name(ne)); - TimeZone dt = DataType.DATE == ne.dataType() ? UTC : null; - queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, dt)); + ZoneId zi = DataType.DATE == ne.dataType() ? DateUtils.UTC : null; + queryC = queryC.addColumn(new GroupByRef(matchingGroup.id(), null, zi)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index 23352af790d..a757bde89e8 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -259,7 +259,7 @@ final class QueryTranslator { // dates are handled differently because of date histograms if (exp instanceof DateTimeHistogramFunction) { DateTimeHistogramFunction dthf = (DateTimeHistogramFunction) exp; - key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.interval(), dthf.timeZone()); + key = new GroupByDateHistogram(aggId, nameOf(exp), dthf.interval(), dthf.zoneId()); } // all other scalar functions become a script else if (exp instanceof ScalarFunction) { @@ -277,9 +277,9 @@ final class QueryTranslator { long intervalAsMillis = Intervals.inMillis(h.interval()); // TODO: set timezone if (field instanceof FieldAttribute || field instanceof DateTimeHistogramFunction) { - key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.timeZone()); + key = new GroupByDateHistogram(aggId, nameOf(field), intervalAsMillis, h.zoneId()); } else if (field instanceof Function) { - key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.timeZone()); + key = new GroupByDateHistogram(aggId, ((Function) field).asScript(), intervalAsMillis, h.zoneId()); } } // numeric histogram diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 989f94672df..cce721e78fd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -45,7 +45,7 @@ public class TransportSqlClearCursorAction extends HandledTransportAction listener) { Cursor cursor = Cursors.decodeFromString(request.getCursor()); planExecutor.cleanCursor( - new Configuration(DateUtils.UTC_TZ, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, + new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, request.mode(), "", ""), cursor, ActionListener.wrap( success -> listener.onResponse(new SqlClearCursorResponse(success)), listener::onFailure)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 5a794572b90..738cd77af1e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -71,7 +71,7 @@ public class TransportSqlQueryAction extends HandledTransportAction createSourceBuilder() { return new DateHistogramValuesSourceBuilder(id()) .interval(interval) - .timeZone(DateTimeZone.forTimeZone(timeZone)); + .timeZone(DateUtils.zoneIdToDateTimeZone(zoneId)); } @Override protected GroupByKey copy(String id, String fieldName, ScriptTemplate script, Direction direction) { - return new GroupByDateHistogram(id, fieldName, script, direction, interval, timeZone); + return new GroupByDateHistogram(id, fieldName, script, direction, interval, zoneId); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), interval, timeZone); + return Objects.hash(super.hashCode(), interval, zoneId); } @Override @@ -60,7 +60,7 @@ public class GroupByDateHistogram extends GroupByKey { if (super.equals(obj)) { GroupByDateHistogram other = (GroupByDateHistogram) obj; return Objects.equals(interval, other.interval) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(zoneId, other.zoneId); } return false; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java index 66c05a1339d..95ab6b3b410 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/GroupByRef.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.querydsl.container; import org.elasticsearch.xpack.sql.execution.search.AggRef; -import java.util.TimeZone; +import java.time.ZoneId; /** * Reference to a GROUP BY agg (typically this gets translated to a composite key). @@ -20,12 +20,12 @@ public class GroupByRef extends AggRef { private final String key; private final Property property; - private final TimeZone timeZone; + private final ZoneId zoneId; - public GroupByRef(String key, Property property, TimeZone timeZone) { + public GroupByRef(String key, Property property, ZoneId zoneId) { this.key = key; this.property = property == null ? Property.VALUE : property; - this.timeZone = timeZone; + this.zoneId = zoneId; } public String key() { @@ -36,8 +36,8 @@ public class GroupByRef extends AggRef { return property; } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java index 4e2965809f2..6eb6ad19ad4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java @@ -10,12 +10,12 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.sql.proto.Mode; +import java.time.ZoneId; import java.time.ZonedDateTime; -import java.util.TimeZone; // Typed object holding properties for a given query public class Configuration { - private final TimeZone timeZone; + private final ZoneId zoneId; private final int pageSize; private final TimeValue requestTimeout; private final TimeValue pageTimeout; @@ -27,9 +27,9 @@ public class Configuration { @Nullable private QueryBuilder filter; - public Configuration(TimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode, + public Configuration(ZoneId zi, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter, Mode mode, String username, String clusterName) { - this.timeZone = tz; + this.zoneId = zi.normalized(); this.pageSize = pageSize; this.requestTimeout = requestTimeout; this.pageTimeout = pageTimeout; @@ -37,11 +37,11 @@ public class Configuration { this.mode = mode == null ? Mode.PLAIN : mode; this.username = username; this.clusterName = clusterName; - this.now = ZonedDateTime.now(timeZone.toZoneId().normalized()); + this.now = ZonedDateTime.now(zoneId); } - public TimeZone timeZone() { - return timeZone; + public ZoneId zoneId() { + return zoneId; } public int pageSize() { @@ -74,4 +74,4 @@ public class Configuration { public ZonedDateTime now() { return now; } -} +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java index 8e774bf6a4f..6aa56914a63 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/DateUtils.java @@ -16,15 +16,13 @@ import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; -import java.util.TimeZone; public class DateUtils { // TODO: do we have a java.time based parser we can use instead? private static final DateTimeFormatter UTC_DATE_FORMATTER = ISODateTimeFormat.dateOptionalTimeParser().withZoneUTC(); - public static TimeZone UTC_TZ = TimeZone.getTimeZone("UTC"); - public static ZoneId UTC_ZI = ZoneId.of("Z"); + public static ZoneId UTC = ZoneId.of("Z"); private DateUtils() {} @@ -33,7 +31,7 @@ public class DateUtils { * Creates a date from the millis since epoch (thus the time-zone is UTC). */ public static ZonedDateTime of(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC_ZI); + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC); } /** diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java index d0c1c06239d..cd6fa79cb55 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/TestUtils.java @@ -15,7 +15,7 @@ public class TestUtils { private TestUtils() {} - public static final Configuration TEST_CFG = new Configuration(DateUtils.UTC_TZ, Protocol.FETCH_SIZE, + public static final Configuration TEST_CFG = new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, Mode.PLAIN, null, null); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 18f544767fa..fcb46d7f8d4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -212,7 +212,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } public void testGroupByOrderByScalarOverNonGrouped() { - assertEquals("1:50: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", + assertEquals("1:50: Cannot order by non-grouped column [YEAR(date [Z])], expected [text]", error("SELECT MAX(int) FROM test GROUP BY text ORDER BY YEAR(date)")); } @@ -222,7 +222,7 @@ public class VerifierErrorMessagesTests extends ESTestCase { } public void testGroupByOrderByScalarOverNonGrouped_WithHaving() { - assertEquals("1:71: Cannot order by non-grouped column [YEAR(date [UTC])], expected [text]", + assertEquals("1:71: Cannot order by non-grouped column [YEAR(date [Z])], expected [text]", error("SELECT MAX(int) FROM test GROUP BY text HAVING MAX(int) > 10 ORDER BY YEAR(date)")); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java index c0125a365aa..135ae74dd20 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/CompositeKeyExtractorTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.xpack.sql.querydsl.container.GroupByRef.Property; import org.elasticsearch.xpack.sql.util.DateUtils; import java.io.IOException; -import java.util.TimeZone; +import java.time.ZoneId; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -24,7 +24,7 @@ import static java.util.Collections.singletonMap; public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase { public static CompositeKeyExtractor randomCompositeKeyExtractor() { - return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeTimeZone()); + return new CompositeKeyExtractor(randomAlphaOfLength(16), randomFrom(asList(Property.values())), randomSafeZone()); } @Override @@ -39,13 +39,13 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< @Override protected CompositeKeyExtractor mutateInstance(CompositeKeyExtractor instance) throws IOException { - return new CompositeKeyExtractor(instance.key() + "mutated", instance.property(), instance.timeZone()); + return new CompositeKeyExtractor(instance.key() + "mutated", instance.property(), instance.zoneId()); } public void testExtractBucketCount() { Bucket bucket = new TestBucket(emptyMap(), randomLong(), new Aggregations(emptyList())); CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.COUNT, - randomTimeZone()); + randomZone()); assertEquals(bucket.getDocCount(), extractor.extract(bucket)); } @@ -58,15 +58,15 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< } public void testExtractDate() { - CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeTimeZone()); + CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomSafeZone()); long millis = System.currentTimeMillis(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), millis), randomLong(), new Aggregations(emptyList())); - assertEquals(DateUtils.of(millis, extractor.timeZone().toZoneId()), extractor.extract(bucket)); + assertEquals(DateUtils.of(millis, extractor.zoneId()), extractor.extract(bucket)); } public void testExtractIncorrectDateKey() { - CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomTimeZone()); + CompositeKeyExtractor extractor = new CompositeKeyExtractor(randomAlphaOfLength(16), Property.VALUE, randomZone()); Object value = new Object(); Bucket bucket = new TestBucket(singletonMap(extractor.key(), value), randomLong(), new Aggregations(emptyList())); @@ -79,7 +79,7 @@ public class CompositeKeyExtractorTests extends AbstractWireSerializingTestCase< * back to DateTimeZone which we currently still need to do internally, * e.g. in bwc serialization and in the extract() method */ - private static TimeZone randomSafeTimeZone() { - return randomValueOtherThanMany(tz -> tz.getID().startsWith("SystemV"), () -> randomTimeZone()); + private static ZoneId randomSafeZone() { + return randomValueOtherThanMany(zi -> zi.getId().startsWith("SystemV"), () -> randomZone()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java index cbd2bf8bfde..a23fbff4a99 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistryTests.java @@ -20,9 +20,9 @@ import org.elasticsearch.xpack.sql.tree.LocationTests; import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; +import java.time.ZoneId; import java.util.Arrays; import java.util.List; -import java.util.TimeZone; import static java.util.Collections.emptyList; import static org.elasticsearch.xpack.sql.expression.function.FunctionRegistry.def; @@ -104,10 +104,10 @@ public class FunctionRegistryTests extends ESTestCase { public void testDateTimeFunction() { boolean urIsExtract = randomBoolean(); UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class)); - TimeZone providedTimeZone = randomTimeZone(); + ZoneId providedTimeZone = randomZone().normalized(); Configuration providedConfiguration = randomConfiguration(providedTimeZone); - FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> { - assertEquals(providedTimeZone, tz); + FunctionRegistry r = new FunctionRegistry(def(DummyFunction.class, (Location l, Expression e, ZoneId zi) -> { + assertEquals(providedTimeZone, zi); assertSame(e, ur.children().get(0)); return new DummyFunction(l); }, "DUMMY_FUNCTION")); @@ -232,7 +232,7 @@ public class FunctionRegistryTests extends ESTestCase { } private Configuration randomConfiguration() { - return new Configuration(randomTimeZone(), + return new Configuration(randomZone(), randomIntBetween(0, 1000), new TimeValue(randomNonNegativeLong()), new TimeValue(randomNonNegativeLong()), @@ -242,8 +242,8 @@ public class FunctionRegistryTests extends ESTestCase { randomAlphaOfLength(10)); } - private Configuration randomConfiguration(TimeZone providedTimeZone) { - return new Configuration(providedTimeZone, + private Configuration randomConfiguration(ZoneId providedZoneId) { + return new Configuration(providedZoneId, randomIntBetween(0, 1000), new TimeValue(randomNonNegativeLong()), new TimeValue(randomNonNegativeLong()), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java index e61690decdf..de2fc69a263 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/DatabaseFunctionTests.java @@ -19,8 +19,7 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.TypesTests; - -import java.util.TimeZone; +import org.elasticsearch.xpack.sql.util.DateUtils; public class DatabaseFunctionTests extends ESTestCase { @@ -29,7 +28,7 @@ public class DatabaseFunctionTests extends ESTestCase { SqlParser parser = new SqlParser(); EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true)); Analyzer analyzer = new Analyzer( - new Configuration(TimeZone.getTimeZone("UTC"), Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, + new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), null, clusterName), new FunctionRegistry(), IndexResolution.valid(test), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java index 047c2a01842..7b1e86af5d5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/UserFunctionTests.java @@ -19,8 +19,7 @@ import org.elasticsearch.xpack.sql.proto.Protocol; import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.stats.Metrics; import org.elasticsearch.xpack.sql.type.TypesTests; - -import java.util.TimeZone; +import org.elasticsearch.xpack.sql.util.DateUtils; public class UserFunctionTests extends ESTestCase { @@ -28,7 +27,7 @@ public class UserFunctionTests extends ESTestCase { SqlParser parser = new SqlParser(); EsIndex test = new EsIndex("test", TypesTests.loadMapping("mapping-basic.json", true)); Analyzer analyzer = new Analyzer( - new Configuration(TimeZone.getTimeZone("UTC"), Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, + new Configuration(DateUtils.UTC, Protocol.FETCH_SIZE, Protocol.REQUEST_TIMEOUT, Protocol.PAGE_TIMEOUT, null, randomFrom(Mode.values()), null, randomAlphaOfLengthBetween(1, 15)), new FunctionRegistry(), IndexResolution.valid(test), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java index 30c5fa6cb4e..03f9c949d29 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessorTests.java @@ -10,12 +10,11 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; import java.io.IOException; -import java.util.TimeZone; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class DateTimeProcessorTests extends AbstractWireSerializingTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); public static DateTimeProcessor randomDateTimeProcessor() { return new DateTimeProcessor(randomFrom(DateTimeExtractor.values()), UTC); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java index 305fa528e1f..164fe1fe931 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeTestUtils.java @@ -20,7 +20,7 @@ public class DateTimeTestUtils { public static ZonedDateTime dateTime(int year, int month, int day, int hour, int minute) { DateTime dateTime = new DateTime(year, month, day, hour, minute, DateTimeZone.UTC); - ZonedDateTime zdt = ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC_ZI); + ZonedDateTime zdt = ZonedDateTime.of(year, month, day, hour, minute, 0, 0, DateUtils.UTC); assertEquals(dateTime.getMillis() / 1000, zdt.toEpochSecond()); return zdt; } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java index c134446a2c3..6bd4a8fe1ba 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYearTests.java @@ -9,24 +9,24 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.type.DataType; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class DayOfYearTests extends ESTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); public void testAsColumnProcessor() { assertEquals(1, extract(dateTime(0), UTC)); - assertEquals(1, extract(dateTime(0), TimeZone.getTimeZone("GMT+01:00"))); - assertEquals(365, extract(dateTime(0), TimeZone.getTimeZone("GMT-01:00"))); + assertEquals(1, extract(dateTime(0), ZoneId.of("GMT+01:00"))); + assertEquals(365, extract(dateTime(0), ZoneId.of("GMT-01:00"))); } - private Object extract(Object value, TimeZone timeZone) { - return build(value, timeZone).asPipe().asProcessor().process(value); + private Object extract(Object value, ZoneId zoneId) { + return build(value, zoneId).asPipe().asProcessor().process(value); } - private DayOfYear build(Object value, TimeZone timeZone) { - return new DayOfYear(null, new Literal(null, value, DataType.DATE), timeZone); + private DayOfYear build(Object value, ZoneId zoneId) { + return new DayOfYear(null, new Literal(null, value, DataType.DATE), zoneId); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java index 379cf5f7e09..ae152bba5d8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java @@ -13,14 +13,13 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDate import org.junit.Assume; import java.io.IOException; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); - public static NamedDateTimeProcessor randomNamedDateTimeProcessor() { return new NamedDateTimeProcessor(randomFrom(NameExtractor.values()), UTC); } @@ -42,7 +41,6 @@ public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase } public void testValidDayNamesInUTC() { - assumeJava9PlusAndCompatLocaleProviderSetting(); NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, UTC); assertEquals("Thursday", proc.process(dateTime(0L))); assertEquals("Saturday", proc.process(dateTime(-64164233612338L))); @@ -56,7 +54,7 @@ public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase public void testValidDayNamesWithNonUTCTimeZone() { assumeJava9PlusAndCompatLocaleProviderSetting(); - NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, TimeZone.getTimeZone("GMT-10:00")); + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, ZoneId.of("GMT-10:00")); assertEquals("Wednesday", proc.process(dateTime(0))); assertEquals("Friday", proc.process(dateTime(-64164233612338L))); assertEquals("Monday", proc.process(dateTime(64164233612338L))); @@ -83,7 +81,7 @@ public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase public void testValidMonthNamesWithNonUTCTimeZone() { assumeJava9PlusAndCompatLocaleProviderSetting(); - NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.MONTH_NAME, TimeZone.getTimeZone("GMT-3:00")); + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.MONTH_NAME, ZoneId.of("GMT-03:00")); assertEquals("December", proc.process(dateTime(0))); assertEquals("August", proc.process(dateTime(-64165813612338L))); // GMT: Tuesday, September 1, -0064 2:53:07.662 AM assertEquals("April", proc.process(dateTime(64164233612338L))); // GMT: Monday, April 14, 4003 2:13:32.338 PM diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java index 23cffe514b9..6fb007e4321 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NonIsoDateTimeProcessorTests.java @@ -10,13 +10,13 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NonIsoDateTimeProcessor.NonIsoDateTimeExtractor; import java.io.IOException; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class NonIsoDateTimeProcessorTests extends AbstractWireSerializingTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); public static NonIsoDateTimeProcessor randomNonISODateTimeProcessor() { return new NonIsoDateTimeProcessor(randomFrom(NonIsoDateTimeExtractor.values()), UTC); @@ -52,7 +52,7 @@ public class NonIsoDateTimeProcessorTests extends AbstractWireSerializingTestCas } public void testNonISOWeekOfYearInNonUTCTimeZone() { - NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.WEEK_OF_YEAR, TimeZone.getTimeZone("GMT-10:00")); + NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.WEEK_OF_YEAR, ZoneId.of("GMT-10:00")); assertEquals(2, proc.process(dateTime(568372930000L))); assertEquals(5, proc.process(dateTime(981278530000L))); assertEquals(7, proc.process(dateTime(224241730000L))); @@ -78,7 +78,7 @@ public class NonIsoDateTimeProcessorTests extends AbstractWireSerializingTestCas } public void testNonISODayOfWeekInNonUTCTimeZone() { - NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.DAY_OF_WEEK, TimeZone.getTimeZone("GMT-10:00")); + NonIsoDateTimeProcessor proc = new NonIsoDateTimeProcessor(NonIsoDateTimeExtractor.DAY_OF_WEEK, ZoneId.of("GMT-10:00")); assertEquals(2, proc.process(dateTime(568372930000L))); assertEquals(7, proc.process(dateTime(981278530000L))); assertEquals(2, proc.process(dateTime(224241730000L))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java index 29e5d31db21..353fe0834a3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.test.ESTestCase; -import java.util.TimeZone; +import java.time.ZoneId; import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeTestUtils.dateTime; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; public class QuarterProcessorTests extends ESTestCase { - private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); - public void testQuarterWithUTCTimezone() { QuarterProcessor proc = new QuarterProcessor(UTC); @@ -30,12 +29,12 @@ public class QuarterProcessorTests extends ESTestCase { } public void testValidDayNamesWithNonUTCTimeZone() { - QuarterProcessor proc = new QuarterProcessor(TimeZone.getTimeZone("GMT-10:00")); + QuarterProcessor proc = new QuarterProcessor(ZoneId.of("GMT-10:00")); assertEquals(4, proc.process(dateTime(0L))); assertEquals(4, proc.process(dateTime(-5400, 1, 1, 5, 0))); assertEquals(1, proc.process(dateTime(30, 4, 1, 9, 59))); - proc = new QuarterProcessor(TimeZone.getTimeZone("GMT+10:00")); + proc = new QuarterProcessor(ZoneId.of("GMT+10:00")); assertEquals(4, proc.process(dateTime(10902, 9, 30, 14, 1))); assertEquals(3, proc.process(dateTime(10902, 9, 30, 13, 59))); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java index 748718d0a3a..2618392a067 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/predicate/operator/arithmetic/BinaryArithmeticTests.java @@ -78,7 +78,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testAddYearMonthIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -87,7 +87,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testAddDayTimeIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -96,7 +96,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testAddDayTimeIntervalToDateReverse() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); @@ -125,7 +125,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testSubYearMonthIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -134,7 +134,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testSubYearMonthIntervalToDateIllegal() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Period.ofYears(100).plusMonths(50); Literal r = interval(t, INTERVAL_HOUR); @@ -149,7 +149,7 @@ public class BinaryArithmeticTests extends ESTestCase { } public void testSubDayTimeIntervalToDate() { - ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime now = ZonedDateTime.now(DateUtils.UTC); Literal l = L(now); TemporalAmount t = Duration.ofHours(2); Literal r = interval(Duration.ofHours(2), INTERVAL_HOUR); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 514c36ddf72..7094b1c88b5 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -23,8 +23,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfYear; -import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.IsoWeekOfYear; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ACos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.ASin; @@ -91,7 +91,6 @@ import org.elasticsearch.xpack.sql.util.CollectionUtils; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.TimeZone; import static java.util.Arrays.asList; import static java.util.Collections.emptyList; @@ -99,6 +98,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Literal.NULL; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; +import static org.elasticsearch.xpack.sql.util.DateUtils.UTC; import static org.hamcrest.Matchers.contains; public class OptimizerTests extends ESTestCase { @@ -327,7 +327,6 @@ public class OptimizerTests extends ESTestCase { } public void testConstantFoldingDatetime() { - final TimeZone UTC = TimeZone.getTimeZone("UTC"); Expression cast = new Cast(EMPTY, Literal.of(EMPTY, "2018-01-19T10:23:27Z"), DataType.DATE); assertEquals(2018, foldFunction(new Year(EMPTY, cast, UTC))); assertEquals(1, foldFunction(new MonthOfYear(EMPTY, cast, UTC))); @@ -407,7 +406,7 @@ public class OptimizerTests extends ESTestCase { public void testGenericNullableExpression() { FoldNull rule = new FoldNull(); // date-time - assertNullLiteral(rule.rule(new DayName(EMPTY, Literal.NULL, randomTimeZone()))); + assertNullLiteral(rule.rule(new DayName(EMPTY, Literal.NULL, randomZone()))); // math function assertNullLiteral(rule.rule(new Cos(EMPTY, Literal.NULL))); // string function diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java index 064014a321d..ffe68e1765f 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/type/DataTypeConversionTests.java @@ -111,7 +111,7 @@ public class DataTypeConversionTests extends ESTestCase { assertEquals(dateTime(18000000L), conversion.convert("1970-01-01T00:00:00-05:00")); // double check back and forth conversion - ZonedDateTime dt = ZonedDateTime.now(DateUtils.UTC_ZI); + ZonedDateTime dt = ZonedDateTime.now(DateUtils.UTC); Conversion forward = conversionFor(DATE, KEYWORD); Conversion back = conversionFor(KEYWORD, DATE); assertEquals(dt, back.convert(forward.convert(dt))); From 2ed6ab9648cb6d93d6c27fe9b0db3e5a5c57e91e Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 17 Dec 2018 14:00:56 +0200 Subject: [PATCH 04/26] SQL: Concat should be always not nullable (#36601) --- .../qa/src/main/resources/functions.csv-spec | 27 +++++++++++++++++++ .../function/scalar/string/Concat.java | 2 +- .../xpack/sql/optimizer/OptimizerTests.java | 9 +++++++ .../xpack/sql/planner/QueryFolderTests.java | 10 +++++++ 4 files changed, 47 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec b/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec index 930a15f9438..6fec225df0c 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/functions.csv-spec @@ -68,6 +68,33 @@ cct:s AlejandroMcAlpine ; +selectConcatWithNullValues +SELECT first_name, CONCAT(first_name,null),last_name, CONCAT(null,null), LENGTH(CONCAT(null,null)) FROM test_emp ORDER BY first_name DESC LIMIT 20; + + first_name:s |CONCAT(first_name,null):s| last_name:s |CONCAT(null,null):s|LENGTH(CONCAT(null,null)):i +---------------+-------------------------+----------------+-------------------+------------------------- +null | |Demeyer | |0 +null | |Joslin | |0 +null | |Reistad | |0 +null | |Merlo | |0 +null | |Swan | |0 +null | |Chappelet | |0 +null | |Portugali | |0 +null | |Makrucki | |0 +null | |Lortz | |0 +null | |Brender | |0 +Zvonko |Zvonko |Nyanchama | |0 +Zhongwei |Zhongwei |Rosen | |0 +Yongqiao |Yongqiao |Berztiss | |0 +Yishay |Yishay |Tzvieli | |0 +Yinghua |Yinghua |Dredge | |0 +Xinglin |Xinglin |Eugenio | |0 +Weiyi |Weiyi |Meriste | |0 +Vishv |Vishv |Zockler | |0 +Valter |Valter |Sullins | |0 +Valdiodio |Valdiodio |Niizuma | |0 +; + selectAsciiOfConcatWithGroupByOrderByCount SELECT ASCII(CONCAT("first_name","last_name")) ascii, COUNT(*) count FROM "test_emp" GROUP BY ASCII(CONCAT("first_name","last_name")) ORDER BY ASCII(CONCAT("first_name","last_name")) DESC LIMIT 10; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java index 3bd03986eb5..d89d8fe6efb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Concat.java @@ -51,7 +51,7 @@ public class Concat extends BinaryScalarFunction { @Override public boolean nullable() { - return left().nullable() && right().nullable(); + return false; } @Override diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 7094b1c88b5..8c8a64c79f2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Cos; import org.elasticsearch.xpack.sql.expression.function.scalar.math.E; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Floor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Ascii; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.sql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.sql.expression.predicate.Range; @@ -87,6 +88,7 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.EsField; import org.elasticsearch.xpack.sql.util.CollectionUtils; +import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Arrays; import java.util.Collections; @@ -519,6 +521,13 @@ public class OptimizerTests extends ESTestCase { assertEquals(ONE, e.children().get(0)); assertEquals(TWO, e.children().get(1)); } + + public void testConcatFoldingIsNotNull() { + FoldNull foldNull = new FoldNull(); + assertEquals(1, foldNull.rule(new Concat(EMPTY, Literal.NULL, ONE)).fold()); + assertEquals(1, foldNull.rule(new Concat(EMPTY, ONE, Literal.NULL)).fold()); + assertEquals(StringUtils.EMPTY, foldNull.rule(new Concat(EMPTY, Literal.NULL, Literal.NULL)).fold()); + } // // Logical simplifications diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java index 617a4634826..bb85921369a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryFolderTests.java @@ -263,4 +263,14 @@ public class QueryFolderTests extends ESTestCase { assertThat(ee.output().get(0).toString(), startsWith("COUNT(1){a->")); assertThat(ee.output().get(1).toString(), startsWith("a{s->")); } + + public void testConcatIsNotFoldedForNull() { + PhysicalPlan p = plan("SELECT keyword FROM test WHERE CONCAT(keyword, null) IS NULL"); + assertEquals(LocalExec.class, p.getClass()); + LocalExec le = (LocalExec) p; + assertEquals(EmptyExecutable.class, le.executable().getClass()); + EmptyExecutable ee = (EmptyExecutable) le.executable(); + assertEquals(1, ee.output().size()); + assertThat(ee.output().get(0).toString(), startsWith("keyword{f}#")); + } } From 6f038997e14b9427399df77f8ff6415fa8cfb67a Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 17 Dec 2018 14:45:55 +0200 Subject: [PATCH 05/26] Watcher accounts constructed lazily (#36656) This fixes two bugs about watcher notifications: * registering accounts that had only secure settings was not possible before; these accounts are very much practical for Slack and PagerDuty integrations. * removes the limitation that, for an account with both secure and cluster settings, the admin had to first change/add the secure settings and only then add the dependent dynamic cluster settings. The reverse order would trigger a SettingsException for an incomplete account. The workaround is to lazily instantiate account objects, hoping that when accounts are instantiated all the required settings are in place. Previously, the approach was to greedily validate all the account settings by constructing the account objects, even if they would not ever be used by actions. This made sense in a world where all the settings were set by a single API. But given that accounts have dependent settings (that must be used together) that have to be changed using different APIs (POST _nodes/reload_secure_settings and PUT _cluster/settings), the settings group would technically be in an invalid state in between the calls. This fix builds account objects, and validates the settings, when they are needed by actions. --- .../notification/NotificationService.java | 33 ++-- .../NotificationServiceTests.java | 164 +++++++++++++++++- .../hipchat/HipChatServiceTests.java | 2 +- 3 files changed, 180 insertions(+), 19 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java index f62de14b931..c2a079e519f 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.common.util.LazyInitializable; import java.io.IOException; import java.io.InputStream; @@ -35,8 +36,8 @@ public abstract class NotificationService { private final Settings bootSettings; private final List> pluginSecureSettings; // all are guarded by this - private volatile Map accounts; - private volatile Account defaultAccount; + private volatile Map> accounts; + private volatile LazyInitializable defaultAccount; // cached cluster setting, required when recreating the notification clients // using the new "reloaded" secure settings private volatile Settings cachedClusterSettings; @@ -59,7 +60,7 @@ public abstract class NotificationService { this.pluginSecureSettings = pluginSecureSettings; } - private synchronized void clusterSettingsConsumer(Settings settings) { + protected synchronized void clusterSettingsConsumer(Settings settings) { // update cached cluster settings this.cachedClusterSettings = settings; // use these new dynamic cluster settings together with the previously cached @@ -102,13 +103,13 @@ public abstract class NotificationService { public Account getAccount(String name) { // note this is not final since we mock it in tests and that causes // trouble since final methods can't be mocked... - final Map accounts; - final Account defaultAccount; + final Map> accounts; + final LazyInitializable defaultAccount; synchronized (this) { // must read under sync block otherwise it might be inconsistent accounts = this.accounts; defaultAccount = this.defaultAccount; } - Account theAccount = accounts.getOrDefault(name, defaultAccount); + LazyInitializable theAccount = accounts.getOrDefault(name, defaultAccount); if (theAccount == null && name == null) { throw new IllegalArgumentException("no accounts of type [" + type + "] configured. " + "Please set up an account using the [xpack.notification." + type +"] settings"); @@ -116,7 +117,7 @@ public abstract class NotificationService { if (theAccount == null) { throw new IllegalArgumentException("no account found for name: [" + name + "]"); } - return theAccount; + return theAccount.getOrCompute(); } private String getNotificationsAccountPrefix() { @@ -124,27 +125,27 @@ public abstract class NotificationService { } private Set getAccountNames(Settings settings) { - // secure settings are not responsible for the client names - final Settings noSecureSettings = Settings.builder().put(settings, false).build(); - return noSecureSettings.getByPrefix(getNotificationsAccountPrefix()).names(); + return settings.getByPrefix(getNotificationsAccountPrefix()).names(); } private @Nullable String getDefaultAccountName(Settings settings) { return settings.get("xpack.notification." + type + ".default_account"); } - private Map createAccounts(Settings settings, Set accountNames, + private Map> createAccounts(Settings settings, Set accountNames, BiFunction accountFactory) { - final Map accounts = new HashMap<>(); + final Map> accounts = new HashMap<>(); for (final String accountName : accountNames) { final Settings accountSettings = settings.getAsSettings(getNotificationsAccountPrefix() + accountName); - final Account account = accountFactory.apply(accountName, accountSettings); - accounts.put(accountName, account); + accounts.put(accountName, new LazyInitializable<>(() -> { + return accountFactory.apply(accountName, accountSettings); + })); } return Collections.unmodifiableMap(accounts); } - private @Nullable Account findDefaultAccountOrNull(Settings settings, Map accounts) { + private @Nullable LazyInitializable findDefaultAccountOrNull(Settings settings, + Map> accounts) { final String defaultAccountName = getDefaultAccountName(settings); if (defaultAccountName == null) { if (accounts.isEmpty()) { @@ -153,7 +154,7 @@ public abstract class NotificationService { return accounts.values().iterator().next(); } } else { - final Account account = accounts.get(defaultAccountName); + final LazyInitializable account = accounts.get(defaultAccountName); if (account == null) { throw new SettingsException("could not find default account [" + defaultAccountName + "]"); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java index 184ff56c213..efbefdd6408 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java @@ -5,12 +5,27 @@ */ package org.elasticsearch.xpack.watcher.notification; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureSettings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.watcher.notification.NotificationService; +import java.io.IOException; +import java.io.InputStream; +import java.security.GeneralSecurityException; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.is; @@ -25,6 +40,7 @@ public class NotificationServiceTests extends ESTestCase { assertThat(service.getAccount(accountName), is(accountName)); // single account, this will also be the default assertThat(service.getAccount("non-existing"), is(accountName)); + assertThat(service.getAccount(null), is(accountName)); } public void testMultipleAccountsWithExistingDefault() { @@ -80,16 +96,160 @@ public class NotificationServiceTests extends ESTestCase { is("no accounts of type [test] configured. Please set up an account using the [xpack.notification.test] settings")); } + public void testAccountWithSecureSettings() throws Exception { + final Setting secureSetting1 = SecureSetting.secureString("xpack.notification.test.account.secure_only", null); + final Setting secureSetting2 = SecureSetting.secureString("xpack.notification.test.account.mixed.secure", null); + final Map secureSettingsMap = new HashMap<>(); + secureSettingsMap.put(secureSetting1.getKey(), "secure_only".toCharArray()); + secureSettingsMap.put(secureSetting2.getKey(), "mixed_secure".toCharArray()); + Settings settings = Settings.builder() + .put("xpack.notification.test.account.unsecure_only", "bar") + .put("xpack.notification.test.account.mixed.unsecure", "mixed_unsecure") + .setSecureSettings(secureSettingsFromMap(secureSettingsMap)) + .build(); + TestNotificationService service = new TestNotificationService(settings, Arrays.asList(secureSetting1, secureSetting2)); + assertThat(service.getAccount("secure_only"), is("secure_only")); + assertThat(service.getAccount("unsecure_only"), is("unsecure_only")); + assertThat(service.getAccount("mixed"), is("mixed")); + assertThat(service.getAccount(null), anyOf(is("secure_only"), is("unsecure_only"), is("mixed"))); + } + + public void testAccountCreationCached() { + String accountName = randomAlphaOfLength(10); + Settings settings = Settings.builder().put("xpack.notification.test.account." + accountName, "bar").build(); + final AtomicInteger validationInvocationCount = new AtomicInteger(0); + + TestNotificationService service = new TestNotificationService(settings, (String name, Settings accountSettings) -> { + validationInvocationCount.incrementAndGet(); + }); + assertThat(validationInvocationCount.get(), is(0)); + assertThat(service.getAccount(accountName), is(accountName)); + assertThat(validationInvocationCount.get(), is(1)); + if (randomBoolean()) { + assertThat(service.getAccount(accountName), is(accountName)); + } else { + assertThat(service.getAccount(null), is(accountName)); + } + // counter is still 1 because the account is cached + assertThat(validationInvocationCount.get(), is(1)); + } + + public void testAccountUpdateSettings() throws Exception { + final Setting secureSetting = SecureSetting.secureString("xpack.notification.test.account.x.secure", null); + final Setting setting = Setting.simpleString("xpack.notification.test.account.x.dynamic", Setting.Property.Dynamic, + Setting.Property.NodeScope); + final AtomicReference secureSettingValue = new AtomicReference(randomAlphaOfLength(4)); + final AtomicReference settingValue = new AtomicReference(randomAlphaOfLength(4)); + final Map secureSettingsMap = new HashMap<>(); + final AtomicInteger validationInvocationCount = new AtomicInteger(0); + secureSettingsMap.put(secureSetting.getKey(), secureSettingValue.get().toCharArray()); + final Settings.Builder settingsBuilder = Settings.builder() + .put(setting.getKey(), settingValue.get()) + .setSecureSettings(secureSettingsFromMap(secureSettingsMap)); + final TestNotificationService service = new TestNotificationService(settingsBuilder.build(), Arrays.asList(secureSetting), + (String name, Settings accountSettings) -> { + assertThat(accountSettings.get("dynamic"), is(settingValue.get())); + assertThat(SecureSetting.secureString("secure", null).get(accountSettings), is(secureSettingValue.get())); + validationInvocationCount.incrementAndGet(); + }); + assertThat(validationInvocationCount.get(), is(0)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(1)); + // update secure setting only + updateSecureSetting(secureSettingValue, secureSetting, secureSettingsMap, settingsBuilder, service); + assertThat(validationInvocationCount.get(), is(1)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(2)); + updateDynamicClusterSetting(settingValue, setting, settingsBuilder, service); + assertThat(validationInvocationCount.get(), is(2)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(3)); + // update both + if (randomBoolean()) { + // update secure first + updateSecureSetting(secureSettingValue, secureSetting, secureSettingsMap, settingsBuilder, service); + // update cluster second + updateDynamicClusterSetting(settingValue, setting, settingsBuilder, service); + } else { + // update cluster first + updateDynamicClusterSetting(settingValue, setting, settingsBuilder, service); + // update secure second + updateSecureSetting(secureSettingValue, secureSetting, secureSettingsMap, settingsBuilder, service); + } + assertThat(validationInvocationCount.get(), is(3)); + service.getAccount(null); + assertThat(validationInvocationCount.get(), is(4)); + } + + private static void updateDynamicClusterSetting(AtomicReference settingValue, Setting setting, + Settings.Builder settingsBuilder, TestNotificationService service) { + settingValue.set(randomAlphaOfLength(4)); + settingsBuilder.put(setting.getKey(), settingValue.get()); + service.clusterSettingsConsumer(settingsBuilder.build()); + } + + private static void updateSecureSetting(AtomicReference secureSettingValue, Setting secureSetting, + Map secureSettingsMap, Settings.Builder settingsBuilder, TestNotificationService service) { + secureSettingValue.set(randomAlphaOfLength(4)); + secureSettingsMap.put(secureSetting.getKey(), secureSettingValue.get().toCharArray()); + service.reload(settingsBuilder.build()); + } + private static class TestNotificationService extends NotificationService { - TestNotificationService(Settings settings) { - super("test", settings, Collections.emptyList()); + private final BiConsumer validator; + + TestNotificationService(Settings settings, List> secureSettings, BiConsumer validator) { + super("test", settings, secureSettings); + this.validator = validator; reload(settings); } + TestNotificationService(Settings settings, List> secureSettings) { + this(settings, secureSettings, (x, y) -> {}); + } + + TestNotificationService(Settings settings) { + this(settings, Collections.emptyList(), (x, y) -> {}); + } + + TestNotificationService(Settings settings, BiConsumer validator) { + this(settings, Collections.emptyList(), validator); + } + @Override protected String createAccount(String name, Settings accountSettings) { + validator.accept(name, accountSettings); return name; } } + + private static SecureSettings secureSettingsFromMap(Map secureSettingsMap) { + return new SecureSettings() { + + @Override + public boolean isLoaded() { + return true; + } + + @Override + public SecureString getString(String setting) throws GeneralSecurityException { + return new SecureString(secureSettingsMap.get(setting)); + } + + @Override + public Set getSettingNames() { + return secureSettingsMap.keySet(); + } + + @Override + public InputStream getFile(String setting) throws GeneralSecurityException { + return null; + } + + @Override + public void close() throws IOException { + } + }; + } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java index 7d3960a9344..7b5d6c7f081 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatServiceTests.java @@ -128,7 +128,7 @@ public class HipChatServiceTests extends ESTestCase { .put("xpack.notification.hipchat.account." + accountName + ".auth_token", "_token"); SettingsException e = expectThrows(SettingsException.class, () -> new HipChatService(settingsBuilder.build(), httpClient, - new ClusterSettings(settingsBuilder.build(), new HashSet<>(HipChatService.getSettings())))); + new ClusterSettings(settingsBuilder.build(), new HashSet<>(HipChatService.getSettings()))).getAccount(null)); assertThat(e.getMessage(), containsString("missing required [room] setting for [integration] account profile")); } From a181a25226ce2c9de532934762c5f48d76298c69 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 17 Dec 2018 14:14:56 +0100 Subject: [PATCH 06/26] [CCR] Add time since last auto follow fetch to auto follow stats (#36542) For each remote cluster the auto follow coordinator, starts an auto follower that checks the remote cluster state and determines whether an index needs to be auto followed. The time since last auto follow is reported per remote cluster and gives insight whether the auto follow process is alive. Relates to #33007 Originates from #35895 --- .../client/ccr/AutoFollowStats.java | 47 ++++++- .../client/ccr/CcrStatsResponseTests.java | 21 +++- .../reference/ccr/apis/get-ccr-stats.asciidoc | 4 +- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 2 +- .../ccr/action/AutoFollowCoordinator.java | 36 +++++- .../action/AutoFollowCoordinatorTests.java | 102 +++++++++++++-- .../action/AutoFollowStatsResponseTests.java | 4 +- .../ccr/action/AutoFollowStatsTests.java | 20 ++- .../AutoFollowStatsMonitoringDocTests.java | 43 ++++++- .../xpack/core/ccr/AutoFollowStats.java | 119 ++++++++++++++++-- .../src/main/resources/monitoring-es.json | 14 +++ 11 files changed, 379 insertions(+), 33 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java index 09b57e68ff5..b442336ca4d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ccr/AutoFollowStats.java @@ -39,6 +39,10 @@ public final class AutoFollowStats { static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors"); static final ParseField LEADER_INDEX = new ParseField("leader_index"); static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception"); + static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters"); + static final ParseField CLUSTER_NAME = new ParseField("cluster_name"); + static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis"); + static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version"); @SuppressWarnings("unchecked") static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats", @@ -48,6 +52,10 @@ public final class AutoFollowStats { (Long) args[2], new TreeMap<>( ((List>) args[3]) + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), + new TreeMap<>( + ((List>) args[4]) .stream() .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) )); @@ -57,6 +65,11 @@ public final class AutoFollowStats { "auto_follow_stats_errors", args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1])); + private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER = + new ConstructingObjectParser<>( + "auto_followed_clusters", + args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2]))); + static { AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX); AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject( @@ -64,26 +77,35 @@ public final class AutoFollowStats { (p, c) -> ElasticsearchException.fromXContent(p), AUTO_FOLLOW_EXCEPTION); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION); + STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER, RECENT_AUTO_FOLLOW_ERRORS); + STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER, + AUTO_FOLLOWED_CLUSTERS); } private final long numberOfFailedFollowIndices; private final long numberOfFailedRemoteClusterStateRequests; private final long numberOfSuccessfulFollowIndices; private final NavigableMap recentAutoFollowErrors; + private final NavigableMap autoFollowedClusters; AutoFollowStats(long numberOfFailedFollowIndices, long numberOfFailedRemoteClusterStateRequests, long numberOfSuccessfulFollowIndices, - NavigableMap recentAutoFollowErrors) { + NavigableMap recentAutoFollowErrors, + NavigableMap autoFollowedClusters) { this.numberOfFailedFollowIndices = numberOfFailedFollowIndices; this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests; this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices; this.recentAutoFollowErrors = recentAutoFollowErrors; + this.autoFollowedClusters = autoFollowedClusters; } public long getNumberOfFailedFollowIndices() { @@ -102,4 +124,27 @@ public final class AutoFollowStats { return recentAutoFollowErrors; } + public NavigableMap getAutoFollowedClusters() { + return autoFollowedClusters; + } + + public static class AutoFollowedCluster { + + private final long timeSinceLastCheckMillis; + private final long lastSeenMetadataVersion; + + public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) { + this.timeSinceLastCheckMillis = timeSinceLastCheckMillis; + this.lastSeenMetadataVersion = lastSeenMetadataVersion; + } + + public long getTimeSinceLastCheckMillis() { + return timeSinceLastCheckMillis; + } + + public long getLastSeenMetadataVersion() { + return lastSeenMetadataVersion; + } + } + } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java index 039e31151c4..8d53b5cde08 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ccr/CcrStatsResponseTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.client.ccr; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.client.ccr.AutoFollowStats.AutoFollowedCluster; import org.elasticsearch.client.ccr.IndicesFollowStats.ShardFollowStats; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -185,6 +186,19 @@ public class CcrStatsResponseTests extends ESTestCase { builder.endObject(); } builder.endArray(); + builder.startArray(AutoFollowStats.AUTO_FOLLOWED_CLUSTERS.getPreferredName()); + for (Map.Entry entry : autoFollowStats.getAutoFollowedClusters().entrySet()) { + builder.startObject(); + { + builder.field(AutoFollowStats.CLUSTER_NAME.getPreferredName(), entry.getKey()); + builder.field(AutoFollowStats.TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(), + entry.getValue().getTimeSinceLastCheckMillis()); + builder.field(AutoFollowStats.LAST_SEEN_METADATA_VERSION.getPreferredName(), + entry.getValue().getLastSeenMetadataVersion()); + } + builder.endObject(); + } + builder.endArray(); } builder.endObject(); @@ -315,11 +329,16 @@ public class CcrStatsResponseTests extends ESTestCase { for (int i = 0; i < count; i++) { readExceptions.put("" + i, new ElasticsearchException(new IllegalStateException("index [" + i + "]"))); } + final NavigableMap autoFollowClusters = new TreeMap<>(); + for (int i = 0; i < count; i++) { + autoFollowClusters.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong())); + } return new AutoFollowStats( randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - readExceptions + readExceptions, + autoFollowClusters ); } diff --git a/docs/reference/ccr/apis/get-ccr-stats.asciidoc b/docs/reference/ccr/apis/get-ccr-stats.asciidoc index b8491e8a601..d849a99c459 100644 --- a/docs/reference/ccr/apis/get-ccr-stats.asciidoc +++ b/docs/reference/ccr/apis/get-ccr-stats.asciidoc @@ -105,7 +105,8 @@ The API returns the following results: "number_of_failed_follow_indices" : 0, "number_of_failed_remote_cluster_state_requests" : 0, "number_of_successful_follow_indices" : 1, - "recent_auto_follow_errors" : [] + "recent_auto_follow_errors" : [], + "auto_followed_clusters" : [] }, "follow_stats" : { "indices" : [ @@ -151,6 +152,7 @@ The API returns the following results: // TESTRESPONSE[s/"number_of_failed_remote_cluster_state_requests" : 0/"number_of_failed_remote_cluster_state_requests" : $body.auto_follow_stats.number_of_failed_remote_cluster_state_requests/] // TESTRESPONSE[s/"number_of_successful_follow_indices" : 1/"number_of_successful_follow_indices" : $body.auto_follow_stats.number_of_successful_follow_indices/] // TESTRESPONSE[s/"recent_auto_follow_errors" : \[\]/"recent_auto_follow_errors" : $body.auto_follow_stats.recent_auto_follow_errors/] +// TESTRESPONSE[s/"auto_followed_clusters" : \[\]/"auto_followed_clusters" : $body.auto_follow_stats.auto_followed_clusters/] // TESTRESPONSE[s/"leader_global_checkpoint" : 1024/"leader_global_checkpoint" : $body.follow_stats.indices.0.shards.0.leader_global_checkpoint/] // TESTRESPONSE[s/"leader_max_seq_no" : 1536/"leader_max_seq_no" : $body.follow_stats.indices.0.shards.0.leader_max_seq_no/] // TESTRESPONSE[s/"follower_global_checkpoint" : 768/"follower_global_checkpoint" : $body.follow_stats.indices.0.shards.0.follower_global_checkpoint/] diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index b25bd71c67f..70d4905d943 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -156,7 +156,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E return Arrays.asList( ccrLicenseChecker, - new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker) + new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker, threadPool::relativeTimeInMillis) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 7900351105c..4888b0367fd 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -52,9 +52,12 @@ import java.util.TreeMap; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster; + /** * A component that runs only on the elected master node and follows leader indices automatically * if they match with a auto follow pattern that is defined in {@link AutoFollowMetadata}. @@ -67,6 +70,7 @@ public class AutoFollowCoordinator implements ClusterStateListener { private final Client client; private final ClusterService clusterService; private final CcrLicenseChecker ccrLicenseChecker; + private final LongSupplier relativeMillisTimeProvider; private volatile Map autoFollowers = Collections.emptyMap(); @@ -79,10 +83,13 @@ public class AutoFollowCoordinator implements ClusterStateListener { public AutoFollowCoordinator( Client client, ClusterService clusterService, - CcrLicenseChecker ccrLicenseChecker) { + CcrLicenseChecker ccrLicenseChecker, + LongSupplier relativeMillisTimeProvider) { + this.client = client; this.clusterService = clusterService; this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker, "ccrLicenseChecker"); + this.relativeMillisTimeProvider = relativeMillisTimeProvider; clusterService.addListener(this); this.recentAutoFollowErrors = new LinkedHashMap() { @Override @@ -93,11 +100,26 @@ public class AutoFollowCoordinator implements ClusterStateListener { } public synchronized AutoFollowStats getStats() { + final Map autoFollowers = this.autoFollowers; + final TreeMap timesSinceLastAutoFollowPerRemoteCluster = new TreeMap<>(); + for (Map.Entry entry : autoFollowers.entrySet()) { + long lastAutoFollowTimeInMillis = entry.getValue().lastAutoFollowTimeInMillis; + long lastSeenMetadataVersion = entry.getValue().metadataVersion; + if (lastAutoFollowTimeInMillis != -1) { + long timeSinceLastCheckInMillis = relativeMillisTimeProvider.getAsLong() - lastAutoFollowTimeInMillis; + timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(), + new AutoFollowedCluster(timeSinceLastCheckInMillis, lastSeenMetadataVersion)); + } else { + timesSinceLastAutoFollowPerRemoteCluster.put(entry.getKey(), new AutoFollowedCluster(-1L, lastSeenMetadataVersion)); + } + } + return new AutoFollowStats( numberOfFailedIndicesAutoFollowed, numberOfFailedRemoteClusterStateRequests, numberOfSuccessfulIndicesAutoFollowed, - new TreeMap<>(recentAutoFollowErrors) + new TreeMap<>(recentAutoFollowErrors), + timesSinceLastAutoFollowPerRemoteCluster ); } @@ -146,7 +168,8 @@ public class AutoFollowCoordinator implements ClusterStateListener { Map newAutoFollowers = new HashMap<>(newRemoteClusters.size()); for (String remoteCluster : newRemoteClusters) { - AutoFollower autoFollower = new AutoFollower(remoteCluster, this::updateStats, clusterService::state) { + AutoFollower autoFollower = + new AutoFollower(remoteCluster, this::updateStats, clusterService::state, relativeMillisTimeProvider) { @Override void getRemoteClusterState(final String remoteCluster, @@ -239,20 +262,25 @@ public class AutoFollowCoordinator implements ClusterStateListener { private final String remoteCluster; private final Consumer> statsUpdater; private final Supplier followerClusterStateSupplier; + private final LongSupplier relativeTimeProvider; + private volatile long lastAutoFollowTimeInMillis = -1; private volatile long metadataVersion = 0; private volatile CountDown autoFollowPatternsCountDown; private volatile AtomicArray autoFollowResults; AutoFollower(final String remoteCluster, final Consumer> statsUpdater, - final Supplier followerClusterStateSupplier) { + final Supplier followerClusterStateSupplier, + LongSupplier relativeTimeProvider) { this.remoteCluster = remoteCluster; this.statsUpdater = statsUpdater; this.followerClusterStateSupplier = followerClusterStateSupplier; + this.relativeTimeProvider = relativeTimeProvider; } void start() { + lastAutoFollowTimeInMillis = relativeTimeProvider.getAsLong(); final ClusterState clusterState = followerClusterStateSupplier.get(); final AutoFollowMetadata autoFollowMetadata = clusterState.metaData().custom(AutoFollowMetadata.TYPE); if (autoFollowMetadata == null) { diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 534397a0a9a..7228acaacf1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -89,7 +89,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); assertThat(entries.get(0).getValue(), nullValue()); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(currentState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -154,7 +154,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(results.get(0).clusterStateFetchException, sameInstance(failure)); assertThat(results.get(0).autoFollowExecutionResults.entrySet().size(), equalTo(0)); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -209,7 +209,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); assertThat(entries.get(0).getValue(), sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -266,7 +266,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); assertThat(entries.get(0).getValue(), sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(clusterState), () -> 1L) { @Override void getRemoteClusterState(String remoteCluster, long metadataVersion, @@ -532,8 +532,8 @@ public class AutoFollowCoordinatorTests extends ESTestCase { AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( null, mock(ClusterService.class), - new CcrLicenseChecker(() -> true, () -> false) - ); + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); autoFollowCoordinator.updateStats(Collections.singletonList( new AutoFollowCoordinator.AutoFollowResult("_alias1")) @@ -585,6 +585,92 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias2:index2").getCause().getMessage(), equalTo("error")); } + public void testUpdateAutoFollowers() { + ClusterService clusterService = mock(ClusterService.class); + // Return a cluster state with no patterns so that the auto followers never really execute: + ClusterState followerState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))) + .build(); + when(clusterService.state()).thenReturn(followerState); + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + null, + clusterService, + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); + // Add 3 patterns: + Map patterns = new HashMap<>(); + patterns.put("pattern1", new AutoFollowPattern("remote1", Collections.singletonList("logs-*"), null, null, null, + null, null, null, null, null, null, null, null)); + patterns.put("pattern2", new AutoFollowPattern("remote2", Collections.singletonList("logs-*"), null, null, null, + null, null, null, null, null, null, null, null)); + patterns.put("pattern3", new AutoFollowPattern("remote2", Collections.singletonList("metrics-*"), null, null, null, + null, null, null, null, null, null, null, null)); + ClusterState clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2)); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue()); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue()); + // Remove patterns 1 and 3: + patterns.remove("pattern1"); + patterns.remove("pattern3"); + clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(1)); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue()); + // Add pattern 4: + patterns.put("pattern4", new AutoFollowPattern("remote1", Collections.singletonList("metrics-*"), null, null, null, + null, null, null, null, null, null, null, null)); + clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(2)); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote1"), notNullValue()); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().get("remote2"), notNullValue()); + // Remove patterns 2 and 4: + patterns.remove("pattern2"); + patterns.remove("pattern4"); + clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(patterns, Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0)); + } + + public void testUpdateAutoFollowersNoPatterns() { + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + null, + mock(ClusterService.class), + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); + ClusterState clusterState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))) + .build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0)); + } + + public void testUpdateAutoFollowersNoAutoFollowMetadata() { + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + null, + mock(ClusterService.class), + new CcrLicenseChecker(() -> true, () -> false), + () -> 1L); + ClusterState clusterState = ClusterState.builder(new ClusterName("remote")).build(); + autoFollowCoordinator.updateAutoFollowers(clusterState); + assertThat(autoFollowCoordinator.getStats().getAutoFollowedClusters().size(), equalTo(0)); + } + public void testWaitForMetadataVersion() { Client client = mock(Client.class); when(client.getRemoteClusterClient(anyString())).thenReturn(client); @@ -611,7 +697,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { List allResults = new ArrayList<>(); Consumer> handler = allResults::addAll; - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) { long previousRequestedMetadataVersion = 0; @@ -669,7 +755,7 @@ public class AutoFollowCoordinatorTests extends ESTestCase { fail("should not be invoked"); }; AtomicInteger counter = new AtomicInteger(); - AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states)) { + AutoFollower autoFollower = new AutoFollower("remote", handler, localClusterStateSupplier(states), () -> 1L) { long previousRequestedMetadataVersion = 0; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java index c651cca5b6a..41e771ac97e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsResponseTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction; import org.elasticsearch.xpack.core.ccr.action.CcrStatsAction; import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomReadExceptions; +import static org.elasticsearch.xpack.ccr.action.AutoFollowStatsTests.randomTrackingClusters; import static org.elasticsearch.xpack.ccr.action.StatsResponsesTests.createStatsResponse; public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCase { @@ -27,7 +28,8 @@ public class AutoFollowStatsResponseTests extends AbstractWireSerializingTestCas randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), - randomReadExceptions() + randomReadExceptions(), + randomTrackingClusters() ); FollowStatsAction.StatsResponses statsResponse = createStatsResponse(); return new CcrStatsAction.Response(autoFollowStats, statsResponse); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java index c4a61529f49..61b92b485c1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ccr.AutoFollowStats; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats.AutoFollowedCluster; import java.io.IOException; import java.util.Map; @@ -34,7 +35,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase randomTrackingClusters() { + final int count = randomIntBetween(0, 16); + final NavigableMap readExceptions = new TreeMap<>(); + for (int i = 0; i < count; i++) { + readExceptions.put("" + i, new AutoFollowedCluster(randomLong(), randomNonNegativeLong())); + } + return readExceptions; + } + @Override protected Writeable.Reader instanceReader() { return AutoFollowStats::new; @@ -56,6 +67,11 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase entry : newInstance.getRecentAutoFollowErrors().entrySet()) { @@ -68,6 +84,8 @@ public class AutoFollowStatsTests extends AbstractSerializingTestCase(Collections.singletonMap( randomAlphaOfLength(4), new ElasticsearchException("cannot follow index"))); + + final NavigableMap trackingClusters = + new TreeMap<>(Collections.singletonMap( + randomAlphaOfLength(4), + new AutoFollowedCluster(1L, 1L))); final AutoFollowStats autoFollowStats = - new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions); + new AutoFollowStats(randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong(), recentAutoFollowExceptions, + trackingClusters); final AutoFollowStatsMonitoringDoc document = new AutoFollowStatsMonitoringDoc("_cluster", timestamp, intervalMillis, node, autoFollowStats); @@ -99,7 +107,7 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase + "\"ccr_auto_follow_stats\":{" + "\"number_of_failed_follow_indices\":" + autoFollowStats.getNumberOfFailedFollowIndices() + "," + "\"number_of_failed_remote_cluster_state_requests\":" + - autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + "," + autoFollowStats.getNumberOfFailedRemoteClusterStateRequests() + "," + "\"number_of_successful_follow_indices\":" + autoFollowStats.getNumberOfSuccessfulFollowIndices() + "," + "\"recent_auto_follow_errors\":[" + "{" @@ -109,6 +117,15 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase + "\"reason\":\"cannot follow index\"" + "}" + "}" + + "]," + + "\"auto_followed_clusters\":[" + + "{" + + "\"cluster_name\":\"" + trackingClusters.keySet().iterator().next() + "\"," + + "\"time_since_last_check_millis\":" + + trackingClusters.values().iterator().next().getTimeSinceLastCheckMillis() + "," + + "\"last_seen_metadata_version\":" + + trackingClusters.values().iterator().next().getLastSeenMetadataVersion() + + "}" + "]" + "}" + "}")); @@ -117,7 +134,11 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase public void testShardFollowNodeTaskStatusFieldsMapped() throws IOException { final NavigableMap fetchExceptions = new TreeMap<>(Collections.singletonMap("leader_index", new ElasticsearchException("cannot follow index"))); - final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions); + final NavigableMap trackingClusters = + new TreeMap<>(Collections.singletonMap( + randomAlphaOfLength(4), + new AutoFollowedCluster(1L, 1L))); + final AutoFollowStats status = new AutoFollowStats(1, 0, 2, fetchExceptions, trackingClusters); XContentBuilder builder = jsonBuilder(); builder.value(status); Map serializedStatus = XContentHelper.convertToMap(XContentType.JSON.xContent(), Strings.toString(builder), false); @@ -142,18 +163,28 @@ public class AutoFollowStatsMonitoringDocTests extends BaseMonitoringDocTestCase assertThat("expected keyword field type for field [" + fieldName + "]", fieldType, anyOf(equalTo("keyword"), equalTo("text"))); } else { + Map innerFieldValue = (Map) ((List) fieldValue).get(0); // Manual test specific object fields and if not just fail: if (fieldName.equals("recent_auto_follow_errors")) { assertThat(fieldType, equalTo("nested")); - assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(2)); + assertThat(((Map) fieldMapping.get("properties")).size(), equalTo(innerFieldValue.size())); assertThat(XContentMapValues.extractValue("properties.leader_index.type", fieldMapping), equalTo("keyword")); assertThat(XContentMapValues.extractValue("properties.auto_follow_exception.type", fieldMapping), equalTo("object")); + innerFieldValue = (Map) innerFieldValue.get("auto_follow_exception"); Map exceptionFieldMapping = (Map) XContentMapValues.extractValue("properties.auto_follow_exception.properties", fieldMapping); - assertThat(exceptionFieldMapping.size(), equalTo(2)); + assertThat(exceptionFieldMapping.size(), equalTo(innerFieldValue.size())); assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); + } else if (fieldName.equals("auto_followed_clusters")) { + assertThat(fieldType, equalTo("nested")); + Map innerFieldMapping = ((Map) fieldMapping.get("properties")); + assertThat(innerFieldMapping.size(), equalTo(innerFieldValue.size())); + + assertThat(XContentMapValues.extractValue("cluster_name.type", innerFieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("time_since_last_check_millis.type", innerFieldMapping), equalTo("long")); + assertThat(XContentMapValues.extractValue("last_seen_metadata_version.type", innerFieldMapping), equalTo("long")); } else { fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java index 6f28c450f04..032cedbdcdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.ccr; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -17,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.AbstractMap; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.NavigableMap; @@ -33,6 +35,10 @@ public class AutoFollowStats implements Writeable, ToXContentObject { private static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors"); private static final ParseField LEADER_INDEX = new ParseField("leader_index"); private static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception"); + private static final ParseField AUTO_FOLLOWED_CLUSTERS = new ParseField("auto_followed_clusters"); + private static final ParseField CLUSTER_NAME = new ParseField("cluster_name"); + private static final ParseField TIME_SINCE_LAST_CHECK_MILLIS = new ParseField("time_since_last_check_millis"); + private static final ParseField LAST_SEEN_METADATA_VERSION = new ParseField("last_seen_metadata_version"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats", @@ -43,26 +49,39 @@ public class AutoFollowStats implements Writeable, ToXContentObject { new TreeMap<>( ((List>) args[3]) .stream() - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) - )); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))), + new TreeMap<>( + ((List>) args[4]) + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))))); private static final ConstructingObjectParser, Void> AUTO_FOLLOW_EXCEPTIONS_PARSER = new ConstructingObjectParser<>( "auto_follow_stats_errors", args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1])); + private static final ConstructingObjectParser, Void> AUTO_FOLLOWED_CLUSTERS_PARSER = + new ConstructingObjectParser<>( + "auto_followed_clusters", + args -> new AbstractMap.SimpleEntry<>((String) args[0], new AutoFollowedCluster((Long) args[1], (Long) args[2]))); + static { AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX); AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject( ConstructingObjectParser.constructorArg(), (p, c) -> ElasticsearchException.fromXContent(p), AUTO_FOLLOW_EXCEPTION); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareString(ConstructingObjectParser.constructorArg(), CLUSTER_NAME); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_SINCE_LAST_CHECK_MILLIS); + AUTO_FOLLOWED_CLUSTERS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), LAST_SEEN_METADATA_VERSION); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS); STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED); STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER, RECENT_AUTO_FOLLOW_ERRORS); + STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOWED_CLUSTERS_PARSER, + AUTO_FOLLOWED_CLUSTERS); } public static AutoFollowStats fromXContent(final XContentParser parser) { @@ -73,24 +92,32 @@ public class AutoFollowStats implements Writeable, ToXContentObject { private final long numberOfFailedRemoteClusterStateRequests; private final long numberOfSuccessfulFollowIndices; private final NavigableMap recentAutoFollowErrors; + private final NavigableMap autoFollowedClusters; public AutoFollowStats( - long numberOfFailedFollowIndices, - long numberOfFailedRemoteClusterStateRequests, - long numberOfSuccessfulFollowIndices, - NavigableMap recentAutoFollowErrors + long numberOfFailedFollowIndices, + long numberOfFailedRemoteClusterStateRequests, + long numberOfSuccessfulFollowIndices, + NavigableMap recentAutoFollowErrors, + NavigableMap autoFollowedClusters ) { this.numberOfFailedFollowIndices = numberOfFailedFollowIndices; this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests; this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices; this.recentAutoFollowErrors = recentAutoFollowErrors; + this.autoFollowedClusters = autoFollowedClusters; } public AutoFollowStats(StreamInput in) throws IOException { numberOfFailedFollowIndices = in.readVLong(); numberOfFailedRemoteClusterStateRequests = in.readVLong(); numberOfSuccessfulFollowIndices = in.readVLong(); - recentAutoFollowErrors= new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException)); + recentAutoFollowErrors = new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException)); + if (in.getVersion().onOrAfter(Version.V_6_6_0)) { + autoFollowedClusters = new TreeMap<>(in.readMap(StreamInput::readString, AutoFollowedCluster::new)); + } else { + autoFollowedClusters = Collections.emptyNavigableMap(); + } } @Override @@ -99,6 +126,9 @@ public class AutoFollowStats implements Writeable, ToXContentObject { out.writeVLong(numberOfFailedRemoteClusterStateRequests); out.writeVLong(numberOfSuccessfulFollowIndices); out.writeMap(recentAutoFollowErrors, StreamOutput::writeString, StreamOutput::writeException); + if (out.getVersion().onOrAfter(Version.V_6_6_0)) { + out.writeMap(autoFollowedClusters, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } } public long getNumberOfFailedFollowIndices() { @@ -117,6 +147,10 @@ public class AutoFollowStats implements Writeable, ToXContentObject { return recentAutoFollowErrors; } + public NavigableMap getAutoFollowedClusters() { + return autoFollowedClusters; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -148,6 +182,19 @@ public class AutoFollowStats implements Writeable, ToXContentObject { } } builder.endArray(); + builder.startArray(AUTO_FOLLOWED_CLUSTERS.getPreferredName()); + { + for (final Map.Entry entry : autoFollowedClusters.entrySet()) { + builder.startObject(); + { + builder.field(CLUSTER_NAME.getPreferredName(), entry.getKey()); + builder.field(TIME_SINCE_LAST_CHECK_MILLIS.getPreferredName(), entry.getValue().getTimeSinceLastCheckMillis()); + builder.field(LAST_SEEN_METADATA_VERSION.getPreferredName(), entry.getValue().getLastSeenMetadataVersion()); + } + builder.endObject(); + } + } + builder.endArray(); return builder; } @@ -165,7 +212,8 @@ public class AutoFollowStats implements Writeable, ToXContentObject { * keys. */ recentAutoFollowErrors.keySet().equals(that.recentAutoFollowErrors.keySet()) && - getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)); + getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)) && + Objects.equals(autoFollowedClusters, that.autoFollowedClusters); } @Override @@ -179,7 +227,8 @@ public class AutoFollowStats implements Writeable, ToXContentObject { * messages. Note that we are relying on the fact that the auto follow exceptions are ordered by keys. */ recentAutoFollowErrors.keySet(), - getFetchExceptionMessages(this) + getFetchExceptionMessages(this), + autoFollowedClusters ); } @@ -194,6 +243,58 @@ public class AutoFollowStats implements Writeable, ToXContentObject { ", numberOfFailedRemoteClusterStateRequests=" + numberOfFailedRemoteClusterStateRequests + ", numberOfSuccessfulFollowIndices=" + numberOfSuccessfulFollowIndices + ", recentAutoFollowErrors=" + recentAutoFollowErrors + + ", autoFollowedClusters=" + autoFollowedClusters + '}'; } + + public static class AutoFollowedCluster implements Writeable { + + private final long timeSinceLastCheckMillis; + private final long lastSeenMetadataVersion; + + public AutoFollowedCluster(long timeSinceLastCheckMillis, long lastSeenMetadataVersion) { + this.timeSinceLastCheckMillis = timeSinceLastCheckMillis; + this.lastSeenMetadataVersion = lastSeenMetadataVersion; + } + + public AutoFollowedCluster(StreamInput in) throws IOException { + this(in.readZLong(), in.readVLong()); + } + + public long getTimeSinceLastCheckMillis() { + return timeSinceLastCheckMillis; + } + + public long getLastSeenMetadataVersion() { + return lastSeenMetadataVersion; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeZLong(timeSinceLastCheckMillis); + out.writeVLong(lastSeenMetadataVersion); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoFollowedCluster that = (AutoFollowedCluster) o; + return timeSinceLastCheckMillis == that.timeSinceLastCheckMillis && + lastSeenMetadataVersion == that.lastSeenMetadataVersion; + } + + @Override + public int hashCode() { + return Objects.hash(timeSinceLastCheckMillis, lastSeenMetadataVersion); + } + + @Override + public String toString() { + return "AutoFollowedCluster{" + + "timeSinceLastCheckMillis=" + timeSinceLastCheckMillis + + ", lastSeenMetadataVersion=" + lastSeenMetadataVersion + + '}'; + } + } } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 1e6d3ec892a..c34fed37516 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -1060,6 +1060,20 @@ } } } + }, + "auto_followed_clusters": { + "type": "nested", + "properties": { + "cluster_name": { + "type": "keyword" + }, + "time_since_last_check_millis": { + "type": "long" + }, + "last_seen_metadata_version": { + "type": "long" + } + } } } } From e356b8cb958cba03050fa698865c465b3bf77267 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Mon, 17 Dec 2018 15:22:13 +0100 Subject: [PATCH 07/26] Add doc's sequence number + primary term to GetResult and use it for updates (#36680) This commit adds the last sequence number and primary term of the last operation that have modified a document to `GetResult` and uses it to power the Update API. Relates #36148 Relates #10708 --- docs/plugins/ingest-attachment.asciidoc | 16 +++-- docs/plugins/ingest-geoip.asciidoc | 12 +++- docs/plugins/ingest-user-agent.asciidoc | 4 +- docs/reference/docs/get.asciidoc | 12 +++- docs/reference/docs/reindex.asciidoc | 4 +- docs/reference/getting-started.asciidoc | 6 +- docs/reference/ingest/ingest-node.asciidoc | 24 ++++--- .../PercolateQueryBuilderTests.java | 6 +- .../action/bulk/TransportShardBulkAction.java | 4 +- .../elasticsearch/action/get/GetResponse.java | 14 ++++ .../action/update/TransportUpdateAction.java | 19 +++--- .../action/update/UpdateHelper.java | 14 ++-- .../action/update/UpdateResponse.java | 5 +- .../elasticsearch/index/get/GetResult.java | 58 +++++++++++++++-- .../index/get/ShardGetService.java | 8 ++- .../action/explain/ExplainResponseTests.java | 6 +- .../action/get/GetResponseTests.java | 22 ++++--- .../action/get/MultiGetResponseTests.java | 2 +- .../action/update/UpdateRequestTests.java | 23 +++---- .../action/update/UpdateResponseTests.java | 5 +- .../index/get/GetResultTests.java | 65 ++++++++++++------- .../query/GeoShapeQueryBuilderTests.java | 3 +- .../index/query/TermsQueryBuilderTests.java | 3 +- .../document/RestGetSourceActionTests.java | 10 ++- .../versioning/SimpleVersioningIT.java | 12 ++-- .../authc/esnative/NativeUsersStoreTests.java | 7 +- .../store/NativePrivilegeStoreTests.java | 5 +- .../execution/ExecutionServiceTests.java | 9 ++- .../ack/TransportAckWatchActionTests.java | 3 +- 29 files changed, 262 insertions(+), 119 deletions(-) diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc index 2f9564294d0..a3d716ff2d9 100644 --- a/docs/plugins/ingest-attachment.asciidoc +++ b/docs/plugins/ingest-attachment.asciidoc @@ -63,6 +63,8 @@ Returns this: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 22, + "_primary_term": 1, "_source": { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", "attachment": { @@ -74,7 +76,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] To specify only some fields to be extracted: @@ -146,6 +148,8 @@ Returns this: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 35, + "_primary_term": 1, "_source": { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", "attachment": { @@ -157,7 +161,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] [source,js] @@ -194,6 +198,8 @@ Returns this: "_type": "_doc", "_id": "my_id_2", "_version": 1, + "_seq_no": 40, + "_primary_term": 1, "_source": { "data": "e1xydGYxXGFuc2kNCkxvcmVtIGlwc3VtIGRvbG9yIHNpdCBhbWV0DQpccGFyIH0=", "max_size": 5, @@ -206,7 +212,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] [[ingest-attachment-with-arrays]] @@ -285,6 +291,8 @@ Returns this: "_type" : "_doc", "_id" : "my_id", "_version" : 1, + "_seq_no" : 50, + "_primary_term" : 1, "found" : true, "_source" : { "attachments" : [ @@ -312,7 +320,7 @@ Returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Note that the `target_field` needs to be set, otherwise the diff --git a/docs/plugins/ingest-geoip.asciidoc b/docs/plugins/ingest-geoip.asciidoc index 5d22a31baa8..f4795f6620a 100644 --- a/docs/plugins/ingest-geoip.asciidoc +++ b/docs/plugins/ingest-geoip.asciidoc @@ -75,6 +75,8 @@ Which returns: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 55, + "_primary_term": 1, "_source": { "ip": "8.8.8.8", "geoip": { @@ -85,7 +87,7 @@ Which returns: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term":1/"_primary_term" : $body._primary_term/] Here is an example that uses the default country database and adds the geographical information to the `geo` field based on the `ip` field`. Note that @@ -124,6 +126,8 @@ returns this: "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 65, + "_primary_term": 1, "_source": { "ip": "8.8.8.8", "geo": { @@ -133,7 +137,7 @@ returns this: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Not all IP addresses find geo information from the database, When this @@ -174,13 +178,15 @@ Which returns: "_type" : "_doc", "_id" : "my_id", "_version" : 1, + "_seq_no" : 71, + "_primary_term": 1, "found" : true, "_source" : { "ip" : "80.231.5.0" } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] [[ingest-geoip-mappings-note]] ===== Recognizing Location as a Geopoint diff --git a/docs/plugins/ingest-user-agent.asciidoc b/docs/plugins/ingest-user-agent.asciidoc index 57594eab573..a0e6d3257f3 100644 --- a/docs/plugins/ingest-user-agent.asciidoc +++ b/docs/plugins/ingest-user-agent.asciidoc @@ -57,6 +57,8 @@ Which returns "_type": "_doc", "_id": "my_id", "_version": 1, + "_seq_no": 22, + "_primary_term": 1, "_source": { "agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36", "user_agent": { @@ -73,7 +75,7 @@ Which returns } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/] ===== Using a custom regex file To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-user-agent` directory and diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index ec6ef28534f..3ab5fa11fa1 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -21,6 +21,8 @@ The result of the above get operation is: "_type" : "_doc", "_id" : "0", "_version" : 1, + "_seq_no" : 10, + "_primary_term" : 1, "found": true, "_source" : { "user" : "kimchy", @@ -30,7 +32,7 @@ The result of the above get operation is: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] The above result includes the `_index`, `_type`, `_id` and `_version` of the document we wish to retrieve, including the actual `_source` @@ -156,6 +158,8 @@ The result of the above get operation is: "_type": "_doc", "_id": "1", "_version": 1, + "_seq_no" : 22, + "_primary_term" : 1, "found": true, "fields": { "tags": [ @@ -164,7 +168,7 @@ The result of the above get operation is: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Field values fetched from the document itself are always returned as an array. @@ -199,6 +203,8 @@ The result of the above get operation is: "_type": "_doc", "_id": "2", "_version": 1, + "_seq_no" : 13, + "_primary_term" : 1, "_routing": "user1", "found": true, "fields": { @@ -208,7 +214,7 @@ The result of the above get operation is: } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Also only leaf fields can be returned via the `stored_field` option. So object fields can't be returned and such requests will fail. diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index 3dd4a98e99f..642bcb20518 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -870,13 +870,15 @@ which will return: "_index": "test2", "_type": "_doc", "_version": 1, + "_seq_no": 44, + "_primary_term": 1, "_source": { "text": "words words", "tag": "foo" } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/] [float] [[docs-reindex-slice]] diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index a0466290696..ff1bdd6b7a6 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -421,7 +421,7 @@ And the response: "_primary_term" : 1 } -------------------------------------------------- -// TESTRESPONSE[s/"_seq_no" : 0/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] From the above, we can see that a new customer document was successfully created inside the customer index. The document also has an internal id of 1 which we specified at index time. @@ -445,11 +445,13 @@ And the response: "_type" : "_doc", "_id" : "1", "_version" : 1, + "_seq_no" : 25, + "_primary_term" : 1, "found" : true, "_source" : { "name": "John Doe" } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Nothing out of the ordinary here other than a field, `found`, stating that we found a document with the requested ID 1 and another field, `_source`, which returns the full JSON document that we indexed from the previous step. diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 584a2c77315..2f422a1f4f7 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -730,13 +730,15 @@ GET test/_doc/2 "_type": "_doc", "_id": "2", "_version": 1, + "_seq_no": 22, + "_primary_term": 1, "found": true, "_source": { "foo": "bar" } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term": 1/"_primary_term" : $body._primary_term/] //// The source document can also use dot delimited fields to represent nested fields. @@ -967,6 +969,8 @@ GET test/_doc/2 "_type": "_doc", "_id": "2", "_version": 1, + "_seq_no": 34, + "_primary_term": 1, "found": true, "_source": { "tags": [ @@ -976,7 +980,7 @@ GET test/_doc/2 } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] //// @@ -1088,6 +1092,8 @@ GET test/_doc/1 "_type": "_doc", "_id": "1", "_version": 1, + "_seq_no": 60, + "_primary_term": 1, "found": true, "_source": { "href": { @@ -1097,7 +1103,7 @@ GET test/_doc/1 } } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Regular expressions can be expensive and should be avoided if viable @@ -1548,11 +1554,11 @@ PUT /myindex/_doc/1?pipeline=monthlyindex "successful" : 1, "failed" : 0 }, - "_seq_no" : 0, + "_seq_no" : 55, "_primary_term" : 1 } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] The above request will not index this document into the `myindex` index, but into the `myindex-2016-04-01` index because @@ -2787,11 +2793,11 @@ Response from the index request: "successful": 1, "failed": 0 }, - "_seq_no": 0, + "_seq_no": 66, "_primary_term": 1, } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] Indexed document: @@ -2963,11 +2969,11 @@ The response from the above index request: "successful": 1, "failed": 0 }, - "_seq_no": 0, + "_seq_no": 89, "_primary_term": 1, } -------------------------------------------------- -// TESTRESPONSE +// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] In the above response, you can see that our document was actually indexed into `my_index` instead of `any_index`. This type of manipulation is often convenient in pipelines that have various branches of transformation, diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index be9c3f83f3f..d4fe0fe1ddd 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -54,6 +54,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.equalTo; public class PercolateQueryBuilderTests extends AbstractQueryTestCase { @@ -152,12 +153,13 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true, updateIndexRequest.getContentType()); updateResponse.setGetResult(UpdateHelper.extractGetResult(updateRequest, concreteIndex, + indexResponse.getSeqNo(), indexResponse.getPrimaryTerm(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes)); } } else if (translatedResult == DocWriteResponse.Result.DELETED) { @@ -315,7 +316,8 @@ public class TransportShardBulkAction extends TransportWriteAction> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true, upsertRequest.getContentType()); - update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), - sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes)); + update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), + response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), sourceAndContent.v2(), + sourceAndContent.v1(), upsertSourceBytes)); } else { update.setGetResult(null); } @@ -205,7 +206,8 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), response.getResult()); - update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), + update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), + response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); @@ -216,10 +218,11 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio DeleteRequest deleteRequest = result.action(); client.bulk(toSingleItemBulkRequest(deleteRequest), wrapBulkResponse( ActionListener.wrap(response -> { - UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), - response.getType(), response.getId(), response.getSeqNo(), response.getPrimaryTerm(), - response.getVersion(), response.getResult()); - update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), + UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), + response.getId(), response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), + response.getResult()); + update.setGetResult(UpdateHelper.extractGetResult(request, request.concreteIndex(), + response.getSeqNo(), response.getPrimaryTerm(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null)); update.setForcedRefresh(response.forcedRefresh()); listener.onResponse(update); diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 3ef89b997a1..255161c8f32 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -209,8 +209,8 @@ public class UpdateHelper { if (detectNoop && noop) { UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); - update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, - updateSourceContentType, getResult.internalSourceRef())); + update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(), + getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); } else { final IndexRequest finalIndexRequest = Requests.indexRequest(request.index()) @@ -270,10 +270,9 @@ public class UpdateHelper { // If it was neither an INDEX or DELETE operation, treat it as a noop UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), DocWriteResponse.Result.NOOP); - update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, - updateSourceContentType, getResult.internalSourceRef())); + update.setGetResult(extractGetResult(request, request.index(), getResult.getSeqNo(), getResult.getPrimaryTerm(), + getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef())); return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType); - } } @@ -293,7 +292,7 @@ public class UpdateHelper { /** * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. */ - public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long version, + public static GetResult extractGetResult(final UpdateRequest request, String concreteIndex, long seqNo, long primaryTerm, long version, final Map source, XContentType sourceContentType, @Nullable final BytesReference sourceAsBytes) { if (request.fetchSource() == null || request.fetchSource().fetchSource() == false) { @@ -318,7 +317,8 @@ public class UpdateHelper { } // TODO when using delete/none, we can still return the source as bytes by generating it (using the sourceContentType) - return new GetResult(concreteIndex, request.type(), request.id(), version, true, sourceFilteredAsBytes, Collections.emptyMap()); + return new GetResult(concreteIndex, request.type(), request.id(), seqNo, primaryTerm, version, true, sourceFilteredAsBytes, + Collections.emptyMap()); } public static class Result { diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java index 9e33e62622a..03d721b26fe 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateResponse.java @@ -162,8 +162,9 @@ public class UpdateResponse extends DocWriteResponse { update = new UpdateResponse(shardId, type, id, version, result); } if (getResult != null) { - update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(), update.getVersion(), - getResult.isExists(),getResult.internalSourceRef(), getResult.getFields())); + update.setGetResult(new GetResult(update.getIndex(), update.getType(), update.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), update.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); } update.setForcedRefresh(forcedRefresh); return update; diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index ba70c703550..b98d766dd4e 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.get; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressorFactory; @@ -33,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; @@ -53,6 +55,8 @@ public class GetResult implements Streamable, Iterable, ToXConten public static final String _TYPE = "_type"; public static final String _ID = "_id"; private static final String _VERSION = "_version"; + private static final String _SEQ_NO = "_seq_no"; + private static final String _PRIMARY_TERM = "_primary_term"; private static final String FOUND = "found"; private static final String FIELDS = "fields"; @@ -60,6 +64,8 @@ public class GetResult implements Streamable, Iterable, ToXConten private String type; private String id; private long version; + private long seqNo; + private long primaryTerm; private boolean exists; private Map fields; private Map sourceAsMap; @@ -69,11 +75,17 @@ public class GetResult implements Streamable, Iterable, ToXConten GetResult() { } - public GetResult(String index, String type, String id, long version, boolean exists, BytesReference source, - Map fields) { + public GetResult(String index, String type, String id, long seqNo, long primaryTerm, long version, boolean exists, + BytesReference source, Map fields) { this.index = index; this.type = type; this.id = id; + this.seqNo = seqNo; + this.primaryTerm = primaryTerm; + assert (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) || (seqNo >= 0 && primaryTerm >= 1) : + "seqNo: " + seqNo + " primaryTerm: " + primaryTerm; + assert exists || (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO && primaryTerm == 0) : + "doc not found but seqNo/primaryTerm are set"; this.version = version; this.exists = exists; this.source = source; @@ -118,6 +130,20 @@ public class GetResult implements Streamable, Iterable, ToXConten return version; } + /** + * The sequence number assigned to the last operation to have changed this document, if found. + */ + public long getSeqNo() { + return seqNo; + } + + /** + * The primary term of the last primary that has changed this document, if found. + */ + public long getPrimaryTerm() { + return primaryTerm; + } + /** * The source of the document if exists. */ @@ -213,6 +239,11 @@ public class GetResult implements Streamable, Iterable, ToXConten } public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params) throws IOException { + if (seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { // seqNo may not be assigned if read from an old node + builder.field(_SEQ_NO, seqNo); + builder.field(_PRIMARY_TERM, primaryTerm); + } + List metaFields = new ArrayList<>(); List otherFields = new ArrayList<>(); if (fields != null && !fields.isEmpty()) { @@ -282,6 +313,8 @@ public class GetResult implements Streamable, Iterable, ToXConten String currentFieldName = parser.currentName(); long version = -1; + long seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + long primaryTerm = 0; Boolean found = null; BytesReference source = null; Map fields = new HashMap<>(); @@ -297,6 +330,10 @@ public class GetResult implements Streamable, Iterable, ToXConten id = parser.text(); } else if (_VERSION.equals(currentFieldName)) { version = parser.longValue(); + } else if (_SEQ_NO.equals(currentFieldName)) { + seqNo = parser.longValue(); + } else if (_PRIMARY_TERM.equals(currentFieldName)) { + primaryTerm = parser.longValue(); } else if (FOUND.equals(currentFieldName)) { found = parser.booleanValue(); } else { @@ -326,7 +363,7 @@ public class GetResult implements Streamable, Iterable, ToXConten } } } - return new GetResult(index, type, id, version, found, source, fields); + return new GetResult(index, type, id, seqNo, primaryTerm, version, found, source, fields); } public static GetResult fromXContent(XContentParser parser) throws IOException { @@ -347,6 +384,13 @@ public class GetResult implements Streamable, Iterable, ToXConten index = in.readString(); type = in.readOptionalString(); id = in.readString(); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + seqNo = in.readZLong(); + primaryTerm = in.readVLong(); + } else { + seqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + primaryTerm = 0L; + } version = in.readLong(); exists = in.readBoolean(); if (exists) { @@ -372,6 +416,10 @@ public class GetResult implements Streamable, Iterable, ToXConten out.writeString(index); out.writeOptionalString(type); out.writeString(id); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeZLong(seqNo); + out.writeVLong(primaryTerm); + } out.writeLong(version); out.writeBoolean(exists); if (exists) { @@ -397,6 +445,8 @@ public class GetResult implements Streamable, Iterable, ToXConten } GetResult getResult = (GetResult) o; return version == getResult.version && + seqNo == getResult.seqNo && + primaryTerm == getResult.primaryTerm && exists == getResult.exists && Objects.equals(index, getResult.index) && Objects.equals(type, getResult.type) && @@ -407,7 +457,7 @@ public class GetResult implements Streamable, Iterable, ToXConten @Override public int hashCode() { - return Objects.hash(version, exists, index, type, id, fields, sourceAsMap()); + return Objects.hash(version, seqNo, primaryTerm, exists, index, type, id, fields, sourceAsMap()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index fc1796dfcc5..6d58b981ddc 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -45,6 +45,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; @@ -112,7 +113,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { public GetResult get(Engine.GetResult engineGetResult, String id, String type, String[] fields, FetchSourceContext fetchSourceContext) { if (!engineGetResult.exists()) { - return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null); } currentMetric.inc(); @@ -168,7 +169,7 @@ public final class ShardGetService extends AbstractIndexShardComponent { } if (get == null || get.exists() == false) { - return new GetResult(shardId.getIndexName(), type, id, -1, false, null, null); + return new GetResult(shardId.getIndexName(), type, id, SequenceNumbers.UNASSIGNED_SEQ_NO, 0, -1, false, null, null); } try { @@ -233,7 +234,8 @@ public final class ShardGetService extends AbstractIndexShardComponent { } } - return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), source, fields); + return new GetResult(shardId.getIndexName(), type, id, get.docIdAndVersion().seqNo, get.docIdAndVersion().primaryTerm, + get.version(), get.exists(), source, fields); } private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) { diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java index ca5c35ccab3..2a04a976677 100644 --- a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -65,7 +65,7 @@ public class ExplainResponseTests extends AbstractStreamableXContentTestCase nowInMillis); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); @@ -372,7 +373,7 @@ public class UpdateRequestTests extends ESTestCase { .script(mockInlineScript("ctx._timestamp = ctx._now")) .scriptedUpsert(true); // We simulate that the document is not existing yet - GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null); + GetResult getResult = new GetResult("test", "type1", "2", 0, 1, 0, true, new BytesArray("{}"), null); UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); @@ -381,7 +382,7 @@ public class UpdateRequestTests extends ESTestCase { public void testIndexTimeout() { final GetResult getResult = - new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null); + new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null); final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1") .script(mockInlineScript("return")) @@ -391,7 +392,7 @@ public class UpdateRequestTests extends ESTestCase { public void testDeleteTimeout() { final GetResult getResult = - new GetResult("test", "type", "1", 0, true, new BytesArray("{\"f\":\"v\"}"), null); + new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"f\":\"v\"}"), null); final UpdateRequest updateRequest = new UpdateRequest("test", "type", "1") .script(mockInlineScript("ctx.op = delete")) @@ -402,7 +403,7 @@ public class UpdateRequestTests extends ESTestCase { public void testUpsertTimeout() throws IOException { final boolean exists = randomBoolean(); final BytesReference source = exists ? new BytesArray("{\"f\":\"v\"}") : null; - final GetResult getResult = new GetResult("test", "type", "1", 0, exists, source, null); + final GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, exists, source, null); final XContentBuilder sourceBuilder = jsonBuilder(); sourceBuilder.startObject(); { @@ -535,7 +536,7 @@ public class UpdateRequestTests extends ESTestCase { } public void testRoutingExtraction() throws Exception { - GetResult getResult = new GetResult("test", "type", "1", 0, false, null, null); + GetResult getResult = new GetResult("test", "type", "1", UNASSIGNED_SEQ_NO, 0, 0, false, null, null); IndexRequest indexRequest = new IndexRequest("test", "type", "1"); // There is no routing and parent because the document doesn't exist @@ -545,7 +546,7 @@ public class UpdateRequestTests extends ESTestCase { assertNull(UpdateHelper.calculateRouting(getResult, indexRequest)); // Doc exists but has no source or fields - getResult = new GetResult("test", "type", "1", 0, true, null, null); + getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, null); // There is no routing and parent on either request assertNull(UpdateHelper.calculateRouting(getResult, indexRequest)); @@ -554,7 +555,7 @@ public class UpdateRequestTests extends ESTestCase { fields.put("_routing", new DocumentField("_routing", Collections.singletonList("routing1"))); // Doc exists and has the parent and routing fields - getResult = new GetResult("test", "type", "1", 0, true, null, fields); + getResult = new GetResult("test", "type", "1", 0, 1, 0, true, null, fields); // Use the get result parent and routing assertThat(UpdateHelper.calculateRouting(getResult, indexRequest), equalTo("routing1")); @@ -563,7 +564,7 @@ public class UpdateRequestTests extends ESTestCase { @SuppressWarnings("deprecated") // VersionType.FORCE is deprecated public void testCalculateUpdateVersion() throws Exception { long randomVersion = randomIntBetween(0, 100); - GetResult getResult = new GetResult("test", "type", "1", randomVersion, true, new BytesArray("{}"), null); + GetResult getResult = new GetResult("test", "type", "1", 0, 1, randomVersion, true, new BytesArray("{}"), null); UpdateRequest request = new UpdateRequest("test", "type1", "1"); long version = UpdateHelper.calculateUpdateVersion(request, getResult); @@ -580,7 +581,7 @@ public class UpdateRequestTests extends ESTestCase { public void testNoopDetection() throws Exception { ShardId shardId = new ShardId("test", "", 0); - GetResult getResult = new GetResult("test", "type", "1", 0, true, + GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"foo\"}"), null); @@ -611,7 +612,7 @@ public class UpdateRequestTests extends ESTestCase { public void testUpdateScript() throws Exception { ShardId shardId = new ShardId("test", "", 0); - GetResult getResult = new GetResult("test", "type", "1", 0, true, + GetResult getResult = new GetResult("test", "type", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"bar\"}"), null); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java index c8d63f73732..8ec0423b406 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateResponseTests.java @@ -74,11 +74,12 @@ public class UpdateResponseTests extends ESTestCase { UpdateResponse updateResponse = new UpdateResponse(new ReplicationResponse.ShardInfo(3, 2), new ShardId("books", "books_uuid", 2), "book", "1", 7, 17, 2, UPDATED); - updateResponse.setGetResult(new GetResult("books", "book", "1", 2, true, source, fields)); + updateResponse.setGetResult(new GetResult("books", "book", "1",0, 1, 2, true, source, fields)); String output = Strings.toString(updateResponse); assertEquals("{\"_index\":\"books\",\"_type\":\"book\",\"_id\":\"1\",\"_version\":2,\"result\":\"updated\"," + - "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{\"found\":true," + + "\"_shards\":{\"total\":3,\"successful\":2,\"failed\":0},\"_seq_no\":7,\"_primary_term\":17,\"get\":{" + + "\"_seq_no\":0,\"_primary_term\":1,\"found\":true," + "\"_source\":{\"title\":\"Book title\",\"isbn\":\"ABC-123\"},\"fields\":{\"isbn\":[\"ABC-123\"],\"title\":[\"Book " + "title\"]}}}", output); } diff --git a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java index 1cc2612041f..0dc6b2573ea 100644 --- a/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/elasticsearch/index/get/GetResultTests.java @@ -44,6 +44,7 @@ import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; import static org.elasticsearch.index.get.DocumentFieldTests.randomDocumentField; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -72,15 +73,16 @@ public class GetResultTests extends ESTestCase { public void testToXContent() throws IOException { { - GetResult getResult = new GetResult("index", "type", "id", 1, true, new BytesArray("{ \"field1\" : " + + GetResult getResult = new GetResult("index", "type", "id", 0, 1, 1, true, new BytesArray("{ \"field1\" : " + "\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1", singletonList("value1")))); String output = Strings.toString(getResult); - assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"found\":true,\"_source\":{ \"field1\" " + - ": \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", output); + assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"_version\":1,\"_seq_no\":0,\"_primary_term\":1," + + "\"found\":true,\"_source\":{ \"field1\" : \"value1\", \"field2\":\"value2\"},\"fields\":{\"field1\":[\"value1\"]}}", + output); } { - GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null); + GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null); String output = Strings.toString(getResult); assertEquals("{\"_index\":\"index\",\"_type\":\"type\",\"_id\":\"id\",\"found\":false}", output); } @@ -92,7 +94,7 @@ public class GetResultTests extends ESTestCase { GetResult getResult = tuple.v1(); // We don't expect to retrieve the index/type/id of the GetResult because they are not rendered // by the toXContentEmbedded method. - GetResult expectedGetResult = new GetResult(null, null, null, -1, + GetResult expectedGetResult = new GetResult(null, null, null, tuple.v2().getSeqNo(), tuple.v2().getPrimaryTerm(), -1, tuple.v2().isExists(), tuple.v2().sourceRef(), tuple.v2().getFields()); boolean humanReadable = randomBoolean(); @@ -118,16 +120,16 @@ public class GetResultTests extends ESTestCase { fields.put("foo", new DocumentField("foo", singletonList("bar"))); fields.put("baz", new DocumentField("baz", Arrays.asList("baz_0", "baz_1"))); - GetResult getResult = new GetResult("index", "type", "id", 2, true, + GetResult getResult = new GetResult("index", "type", "id", 0, 1, 2, true, new BytesArray("{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}"), fields); BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false); - assertEquals("{\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," + + assertEquals("{\"_seq_no\":0,\"_primary_term\":1,\"found\":true,\"_source\":{\"foo\":\"bar\",\"baz\":[\"baz_0\",\"baz_1\"]}," + "\"fields\":{\"foo\":[\"bar\"],\"baz\":[\"baz_0\",\"baz_1\"]}}", originalBytes.utf8ToString()); } public void testToXContentEmbeddedNotFound() throws IOException { - GetResult getResult = new GetResult("index", "type", "id", 1, false, null, null); + GetResult getResult = new GetResult("index", "type", "id", UNASSIGNED_SEQ_NO, 0, 1, false, null, null); BytesReference originalBytes = toXContentEmbedded(getResult, XContentType.JSON, false); assertEquals("{\"found\":false}", originalBytes.utf8ToString()); @@ -149,25 +151,34 @@ public class GetResultTests extends ESTestCase { } public static GetResult copyGetResult(GetResult getResult) { - return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()); + return new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields()); } public static GetResult mutateGetResult(GetResult getResult) { List> mutations = new ArrayList<>(); - mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), getResult.getVersion(), + mutations.add(() -> new GetResult(randomUnicodeOfLength(15), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), getResult.getVersion(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), getResult.getVersion(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), randomNonNegativeLong(), - getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), - getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), - getResult.isExists(), RandomObjects.randomSource(random()), getResult.getFields())); - mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), getResult.getVersion(), + mutations.add(() -> new GetResult(getResult.getIndex(), randomUnicodeOfLength(15), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), randomUnicodeOfLength(15), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), randomNonNegativeLong(), + getResult.isExists(), getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.isExists() ? UNASSIGNED_SEQ_NO : getResult.getSeqNo(), + getResult.isExists() ? 0 : getResult.getPrimaryTerm(), + getResult.getVersion(), getResult.isExists() == false, getResult.internalSourceRef(), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(), + RandomObjects.randomSource(random()), getResult.getFields())); + mutations.add(() -> new GetResult(getResult.getIndex(), getResult.getType(), getResult.getId(), + getResult.getSeqNo(), getResult.getPrimaryTerm(), getResult.getVersion(), getResult.isExists(), getResult.internalSourceRef(), randomDocumentFields(XContentType.JSON).v1())); return randomFrom(mutations).get(); } @@ -177,12 +188,16 @@ public class GetResultTests extends ESTestCase { final String type = randomAlphaOfLengthBetween(3, 10); final String id = randomAlphaOfLengthBetween(3, 10); final long version; + final long seqNo; + final long primaryTerm; final boolean exists; BytesReference source = null; Map fields = null; Map expectedFields = null; if (frequently()) { version = randomNonNegativeLong(); + seqNo = randomNonNegativeLong(); + primaryTerm = randomLongBetween(1, 100); exists = true; if (frequently()) { source = RandomObjects.randomSource(random()); @@ -193,11 +208,13 @@ public class GetResultTests extends ESTestCase { expectedFields = tuple.v2(); } } else { + seqNo = UNASSIGNED_SEQ_NO; + primaryTerm = 0; version = -1; exists = false; } - GetResult getResult = new GetResult(index, type, id, version, exists, source, fields); - GetResult expectedGetResult = new GetResult(index, type, id, version, exists, source, expectedFields); + GetResult getResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, fields); + GetResult expectedGetResult = new GetResult(index, type, id, seqNo, primaryTerm, version, exists, source, expectedFields); return Tuple.tuple(getResult, expectedGetResult); } diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index ca9a21973aa..bcd2b4ef144 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -131,7 +131,8 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase listener.buildResponse(response)); @@ -69,7 +72,8 @@ public class RestGetSourceActionTests extends ESTestCase { } public void testRestGetSourceActionWithMissingDocumentSource() { - final GetResponse response = new GetResponse(new GetResult("index1", "_doc", "1", -1, true, null, emptyMap())); + final GetResponse response = + new GetResponse(new GetResult("index1", "_doc", "1", UNASSIGNED_SEQ_NO, 0, -1, true, null, emptyMap())); final ResourceNotFoundException exception = expectThrows(ResourceNotFoundException.class, () -> listener.buildResponse(response)); diff --git a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java index 9de70f4339f..0c253f1446c 100644 --- a/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/test/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -311,12 +312,11 @@ public class SimpleVersioningIT extends ESIntegTestCase { assertThrows(client().prepareDelete("test", "type", "1").setIfMatch(1, 2).execute(), VersionConflictEngineException.class); client().admin().indices().prepareRefresh().execute().actionGet(); - // TODO: Enable once get response returns seqNo -// for (int i = 0; i < 10; i++) { -// final GetResponse response = client().prepareGet("test", "type", "1").get(); -// assertThat(response.getSeqNo(), equalTo(1L)); -// assertThat(response.getPrimaryTerm(), equalTo(1L)); -// } + for (int i = 0; i < 10; i++) { + final GetResponse response = client().prepareGet("test", "type", "1").get(); + assertThat(response.getSeqNo(), equalTo(1L)); + assertThat(response.getPrimaryTerm(), equalTo(1L)); + } // search with versioning for (int i = 0; i < 10; i++) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index 41bd8bfc6e6..3d13119292b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -45,6 +45,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Consumer; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; @@ -112,7 +113,7 @@ public class NativeUsersStoreTests extends ESTestCase { SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.RESERVED_USER_TYPE, randomAlphaOfLength(12)), - 1L, + 0, 1, 1L, true, BytesReference.bytes(jsonBuilder().map(values)), Collections.emptyMap()); @@ -181,7 +182,7 @@ public class NativeUsersStoreTests extends ESTestCase { SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), - 1L, + UNASSIGNED_SEQ_NO, 0, 1L, false, null, Collections.emptyMap()); @@ -223,7 +224,7 @@ public class NativeUsersStoreTests extends ESTestCase { SecurityIndexManager.SECURITY_INDEX_NAME, NativeUsersStore.INDEX_TYPE, NativeUsersStore.getIdForUser(NativeUsersStore.USER_DOC_TYPE, username), - 1L, + 0, 1, 1L, true, source, Collections.emptyMap()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index c95204ddfdf..e2acbb81560 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -56,6 +56,7 @@ import java.util.function.Consumer; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.set.Sets.newHashSet; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -129,7 +130,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { final String docSource = Strings.toString(sourcePrivilege); listener.get().onResponse(new GetResponse( - new GetResult(request.index(), request.type(), request.id(), 1L, true, new BytesArray(docSource), emptyMap()) + new GetResult(request.index(), request.type(), request.id(), 0, 1, 1L, true, new BytesArray(docSource), emptyMap()) )); final ApplicationPrivilegeDescriptor getPrivilege = future.get(1, TimeUnit.SECONDS); assertThat(getPrivilege, equalTo(sourcePrivilege)); @@ -146,7 +147,7 @@ public class NativePrivilegeStoreTests extends ESTestCase { assertThat(request.id(), equalTo("application-privilege_myapp:admin")); listener.get().onResponse(new GetResponse( - new GetResult(request.index(), request.type(), request.id(), -1, false, null, emptyMap()) + new GetResult(request.index(), request.type(), request.id(), UNASSIGNED_SEQ_NO, 0, -1, false, null, emptyMap()) )); final ApplicationPrivilegeDescriptor getPrivilege = future.get(1, TimeUnit.SECONDS); assertThat(getPrivilege, Matchers.nullValue()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java index 13761948adc..287b3976dea 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -53,7 +54,6 @@ import org.elasticsearch.xpack.core.watcher.execution.Wid; import org.elasticsearch.xpack.core.watcher.history.WatchRecord; import org.elasticsearch.xpack.core.watcher.input.ExecutableInput; import org.elasticsearch.xpack.core.watcher.input.Input; -import org.elasticsearch.common.xcontent.ObjectPath; import org.elasticsearch.xpack.core.watcher.transform.ExecutableTransform; import org.elasticsearch.xpack.core.watcher.transform.Transform; import org.elasticsearch.xpack.core.watcher.trigger.TriggerEvent; @@ -88,6 +88,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import static java.util.Arrays.asList; import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -1148,7 +1149,8 @@ public class ExecutionServiceTests extends ESTestCase { if (request.id().equals(id)) { listener.onResponse(response); } else { - GetResult notFoundResult = new GetResult(request.index(), request.type(), request.id(), -1, false, null, null); + GetResult notFoundResult = + new GetResult(request.index(), request.type(), request.id(), UNASSIGNED_SEQ_NO, 0, -1, false, null, null); listener.onResponse(new GetResponse(notFoundResult)); } return null; @@ -1162,7 +1164,8 @@ public class ExecutionServiceTests extends ESTestCase { if (request.id().equals(id)) { listener.onFailure(e); } else { - GetResult notFoundResult = new GetResult(request.index(), request.type(), request.id(), -1, false, null, null); + GetResult notFoundResult = + new GetResult(request.index(), request.type(), request.id(), UNASSIGNED_SEQ_NO, 0, -1, false, null, null); listener.onResponse(new GetResponse(notFoundResult)); } return null; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java index 0b57c856744..57c189d328e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java @@ -38,6 +38,7 @@ import java.time.Clock; import java.util.Collections; import java.util.concurrent.ExecutionException; +import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.eq; @@ -67,7 +68,7 @@ public class TransportAckWatchActionTests extends ESTestCase { String watchId = "my_watch_id"; doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[1]; - listener.onResponse(new GetResponse(new GetResult(Watch.INDEX, Watch.DOC_TYPE, watchId, -1, false, + listener.onResponse(new GetResponse(new GetResult(Watch.INDEX, Watch.DOC_TYPE, watchId, UNASSIGNED_SEQ_NO, 0, -1, false, BytesArray.EMPTY, Collections.emptyMap()))); return null; }).when(client).get(anyObject(), anyObject()); From eb59c1f7bdc664c590da392518633846cf00075d Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 17 Dec 2018 16:06:56 +0100 Subject: [PATCH 08/26] SNAPSHOTS: Disable BwC Tests Until #36659 Landed (#36709) --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index 7e067b89978..36412c047a7 100644 --- a/build.gradle +++ b/build.gradle @@ -163,8 +163,8 @@ task verifyVersions { * the enabled state of every bwc task. It should be set back to true * after the backport of the backcompat code is complete. */ -final boolean bwc_tests_enabled = true -final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ +final boolean bwc_tests_enabled = false +final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/36659" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") From 4103d3b9ec5186aa74c987f72d65c99ee27f4c2b Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 17 Dec 2018 08:18:11 -0800 Subject: [PATCH 09/26] [DOCS] Adds monitoring requirement for ingest node (#36665) --- .../monitoring/collecting-monitoring-data.asciidoc | 8 +++++--- docs/reference/monitoring/configuring-metricbeat.asciidoc | 4 ++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/docs/reference/monitoring/collecting-monitoring-data.asciidoc b/docs/reference/monitoring/collecting-monitoring-data.asciidoc index 61b08801eeb..432b5f4d01f 100644 --- a/docs/reference/monitoring/collecting-monitoring-data.asciidoc +++ b/docs/reference/monitoring/collecting-monitoring-data.asciidoc @@ -101,11 +101,13 @@ the `xpack.monitoring.collection.interval` setting 10 seconds. See + -- By default, the data is stored on the same cluster by using a -<>. - -Alternatively, you can use an <> to send data to +<>. Alternatively, you can use an <> to send data to a separate _monitoring cluster_. +IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the +cluster that stores the monitoring data must have at least one +<>. + For more information about typical monitoring architectures, see {stack-ov}/how-monitoring-works.html[How Monitoring Works]. -- diff --git a/docs/reference/monitoring/configuring-metricbeat.asciidoc b/docs/reference/monitoring/configuring-metricbeat.asciidoc index 6098336538b..dd7811b3421 100644 --- a/docs/reference/monitoring/configuring-metricbeat.asciidoc +++ b/docs/reference/monitoring/configuring-metricbeat.asciidoc @@ -164,6 +164,10 @@ output.elasticsearch: <1> In this example, the data is stored on a monitoring cluster with nodes `es-mon-1` and `es-mon-2`. +IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the +cluster that stores the monitoring data must have at least one +<>. + For more information about these configuration options, see {metricbeat-ref}/elasticsearch-output.html[Configure the {es} output]. -- From 6d9d5e397b9f2b008a6023dd2d65aa071efbdba2 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Mon, 17 Dec 2018 18:58:06 +0200 Subject: [PATCH 10/26] SQL: Fix translation of LIKE/RLIKE keywords (#36672) * SQL: Fix translation of LIKE/RLIKE keywords Refactor Like/RLike functions to simplify internals and improve query translation when chained or within a script context. Fix #36039 Fix #36584 --- .../sql/qa/src/main/resources/agg.sql-spec | 2 + .../qa/src/main/resources/datetime.sql-spec | 5 +- .../sql/qa/src/main/resources/filter.sql-spec | 2 + .../whitelist/InternalSqlScriptUtils.java | 1 + .../sql/expression/predicate/regex/Like.java | 26 +++--- .../predicate/regex/LikePattern.java | 24 +----- .../sql/expression/predicate/regex/RLike.java | 23 ++--- .../predicate/regex/RegexMatch.java | 29 +++++-- .../expression/predicate/regex/RegexPipe.java | 34 -------- .../predicate/regex/RegexProcessor.java | 82 +++++++++--------- .../xpack/sql/parser/ExpressionBuilder.java | 4 +- .../xpack/sql/planner/QueryTranslator.java | 83 +++++++++++++++---- .../xpack/sql/optimizer/OptimizerTests.java | 6 +- .../xpack/sql/tree/NodeSubclassTests.java | 13 ++- 14 files changed, 169 insertions(+), 165 deletions(-) delete mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexPipe.java diff --git a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec index 9adbe79edc6..149e23f7713 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/agg.sql-spec @@ -280,6 +280,8 @@ aggMaxWithAlias SELECT gender g, MAX(emp_no) m FROM "test_emp" GROUP BY g ORDER BY gender; aggMaxOnDate SELECT gender, MAX(birth_date) m FROM "test_emp" GROUP BY gender ORDER BY gender; +aggAvgAndMaxWithLikeFilter +SELECT CAST(AVG(salary) AS LONG) AS avg, CAST(SUM(salary) AS LONG) AS s FROM "test_emp" WHERE first_name LIKE 'G%'; // Conditional MAX aggMaxWithHaving diff --git a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec index 0f8a16b9e7b..4b12d2de58f 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/datetime.sql-spec @@ -119,7 +119,10 @@ SELECT DAY_OF_WEEK(birth_date) day, COUNT(*) c FROM test_emp WHERE DAY_OF_WEEK(b currentTimestampYear SELECT YEAR(CURRENT_TIMESTAMP()) AS result; -currentTimestampMonth +// +// H2 uses the local timezone instead of the specified one +// +currentTimestampMonth-Ignore SELECT MONTH(CURRENT_TIMESTAMP()) AS result; currentTimestampHour-Ignore diff --git a/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec b/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec index cfbff2ada57..af81b060ebd 100644 --- a/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec +++ b/x-pack/plugin/sql/qa/src/main/resources/filter.sql-spec @@ -49,6 +49,8 @@ whereFieldWithNotEqualsOnString SELECT last_name l FROM "test_emp" WHERE emp_no < 10003 AND gender <> 'M'; whereFieldWithLikeMatch SELECT last_name l FROM "test_emp" WHERE emp_no < 10003 AND last_name LIKE 'K%'; +whereFieldWithNotLikeMatch +SELECT last_name l FROM "test_emp" WHERE emp_no < 10020 AND first_name NOT LIKE 'Ma%'; whereFieldWithOrderNot SELECT last_name l FROM "test_emp" WHERE NOT emp_no < 10003 ORDER BY emp_no LIMIT 5; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index cdc773a91af..a67da8d6efd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -165,6 +165,7 @@ public final class InternalSqlScriptUtils { // Regex // public static Boolean regex(String value, String pattern) { + // TODO: this needs to be improved to avoid creating the pattern on every call return RegexOperation.match(value, pattern); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java index a5c8028f670..9dc3c69fd29 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/Like.java @@ -11,26 +11,24 @@ import org.elasticsearch.xpack.sql.tree.NodeInfo; public class Like extends RegexMatch { - public Like(Location location, Expression left, LikePattern right) { - super(location, left, right); + private final LikePattern pattern; + + public Like(Location location, Expression left, LikePattern pattern) { + super(location, left, pattern.asJavaRegex()); + this.pattern = pattern; + } + + public LikePattern pattern() { + return pattern; } @Override protected NodeInfo info() { - return NodeInfo.create(this, Like::new, left(), pattern()); - } - - public LikePattern pattern() { - return (LikePattern) right(); + return NodeInfo.create(this, Like::new, field(), pattern); } @Override - protected Like replaceChildren(Expression newLeft, Expression newRight) { - return new Like(location(), newLeft, (LikePattern) newRight); - } - - @Override - protected String asString(Expression pattern) { - return ((LikePattern) pattern).asJavaRegex(); + protected Like replaceChild(Expression newLeft) { + return new Like(location(), newLeft, pattern); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java index bde8129f8e7..d07df617df9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/LikePattern.java @@ -5,10 +5,6 @@ */ package org.elasticsearch.xpack.sql.expression.predicate.regex; -import org.elasticsearch.xpack.sql.expression.LeafExpression; -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; -import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.util.StringUtils; import java.util.Objects; @@ -21,7 +17,7 @@ import java.util.Objects; * * To prevent conflicts with ES, the string and char must be validated to not contain '*'. */ -public class LikePattern extends LeafExpression { +public class LikePattern { private final String pattern; private final char escape; @@ -30,8 +26,7 @@ public class LikePattern extends LeafExpression { private final String wildcard; private final String indexNameWildcard; - public LikePattern(Location location, String pattern, char escape) { - super(location); + public LikePattern(String pattern, char escape) { this.pattern = pattern; this.escape = escape; // early initialization to force string validation @@ -40,11 +35,6 @@ public class LikePattern extends LeafExpression { this.indexNameWildcard = StringUtils.likeToIndexWildcard(pattern, escape); } - @Override - protected NodeInfo info() { - return NodeInfo.create(this, LikePattern::new, pattern, escape); - } - public String pattern() { return pattern; } @@ -74,16 +64,6 @@ public class LikePattern extends LeafExpression { return indexNameWildcard; } - @Override - public boolean nullable() { - return false; - } - - @Override - public DataType dataType() { - return DataType.KEYWORD; - } - @Override public int hashCode() { return Objects.hash(pattern, escape); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java index 346c3062bfa..a09586fd35f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RLike.java @@ -6,28 +6,29 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.NodeInfo; public class RLike extends RegexMatch { - public RLike(Location location, Expression left, Literal right) { - super(location, left, right); + private final String pattern; + + public RLike(Location location, Expression left, String pattern) { + super(location, left, pattern); + this.pattern = pattern; + } + + public String pattern() { + return pattern; } @Override protected NodeInfo info() { - return NodeInfo.create(this, RLike::new, left(), (Literal) right()); + return NodeInfo.create(this, RLike::new, field(), pattern); } @Override - protected RLike replaceChildren(Expression newLeft, Expression newRight) { - return new RLike(location(), newLeft, (Literal) newRight); - } - - @Override - protected String asString(Expression pattern) { - return pattern.fold().toString(); + protected RLike replaceChild(Expression newChild) { + return new RLike(location(), newChild, pattern); } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java index e1e41006492..f9390fdfa45 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexMatch.java @@ -7,15 +7,19 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.predicate.BinaryPredicate; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; import org.elasticsearch.xpack.sql.expression.predicate.regex.RegexProcessor.RegexOperation; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; -public abstract class RegexMatch extends BinaryPredicate { +public abstract class RegexMatch extends UnaryScalarFunction { - protected RegexMatch(Location location, Expression value, Expression pattern) { - super(location, value, pattern, RegexOperation.INSTANCE); + private final String pattern; + + protected RegexMatch(Location location, Expression value, String pattern) { + super(location, value); + this.pattern = pattern; } @Override @@ -23,18 +27,25 @@ public abstract class RegexMatch extends BinaryPredicate info() { - return NodeInfo.create(this, RegexPipe::new, expression(), left(), right()); - } - - @Override - protected BinaryPipe replaceChildren(Pipe left, Pipe right) { - return new RegexPipe(location(), expression(), left, right); - } - - @Override - public RegexProcessor asProcessor() { - return new RegexProcessor(left().asProcessor(), right().asProcessor()); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java index 16f6f0a6949..7f9a2ed7623 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/regex/RegexProcessor.java @@ -7,66 +7,47 @@ package org.elasticsearch.xpack.sql.expression.predicate.regex; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.gen.processor.BinaryProcessor; import org.elasticsearch.xpack.sql.expression.gen.processor.Processor; -import org.elasticsearch.xpack.sql.expression.predicate.PredicateBiFunction; import java.io.IOException; import java.util.Objects; import java.util.regex.Pattern; -public class RegexProcessor extends BinaryProcessor { +public class RegexProcessor implements Processor { - public static class RegexOperation implements PredicateBiFunction { + public static class RegexOperation { - public static final RegexOperation INSTANCE = new RegexOperation(); + public static Boolean match(Object value, Pattern pattern) { + if (pattern == null) { + return Boolean.TRUE; + } - @Override - public String name() { - return symbol(); - } - - @Override - public String symbol() { - return "REGEX"; - } - - @Override - public Boolean doApply(String value, String pattern) { - return match(value, pattern); - } - - public static Boolean match(Object value, Object pattern) { - if (value == null || pattern == null) { + if (value == null) { return null; } - Pattern p = Pattern.compile(pattern.toString()); - return p.matcher(value.toString()).matches(); + return pattern.matcher(value.toString()).matches(); + } + + public static Boolean match(Object value, String pattern) { + if (pattern == null) { + return Boolean.TRUE; + } + + if (value == null) { + return null; + } + + return Pattern.compile(pattern).matcher(value.toString()).matches(); } } public static final String NAME = "rgx"; - public RegexProcessor(Processor value, Processor pattern) { - super(value, pattern); - } + private Pattern pattern; - public RegexProcessor(StreamInput in) throws IOException { - super(in); - } - - @Override - protected Boolean doProcess(Object value, Object pattern) { - return RegexOperation.match(value, pattern); - } - - @Override - protected void checkParameter(Object param) { - if (!(param instanceof String || param instanceof Character)) { - throw new SqlIllegalArgumentException("A string/char is required; received [{}]", param); - } + public RegexProcessor(String pattern) { + this.pattern = pattern != null ? Pattern.compile(pattern) : null; } @Override @@ -74,12 +55,23 @@ public class RegexProcessor extends BinaryProcessor { return NAME; } + public RegexProcessor(StreamInput in) throws IOException { + this(in.readOptionalString()); + } + @Override - protected void doWrite(StreamOutput out) throws IOException {} + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(pattern != null ? pattern.toString() : null); + } + + @Override + public Object process(Object input) { + return RegexOperation.match(input, pattern); + } @Override public int hashCode() { - return Objects.hash(left(), right()); + return Objects.hash(pattern); } @Override @@ -93,6 +85,6 @@ public class RegexProcessor extends BinaryProcessor { } RegexProcessor other = (RegexProcessor) obj; - return Objects.equals(left(), other.left()) && Objects.equals(right(), other.right()); + return Objects.equals(pattern, other.pattern); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index cd1cb189b6a..f7d659a2933 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -232,7 +232,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { e = new Like(loc, exp, visitPattern(pCtx.pattern())); break; case SqlBaseParser.RLIKE: - e = new RLike(loc, exp, new Literal(source(pCtx.regex), string(pCtx.regex), DataType.KEYWORD)); + e = new RLike(loc, exp, string(pCtx.regex)); break; case SqlBaseParser.NULL: // shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists)) @@ -301,7 +301,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { } } - return new LikePattern(source(ctx), pattern, escape); + return new LikePattern(pattern, escape); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index a757bde89e8..af180aae90b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.sql.expression.function.grouping.Histogram; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction; +import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate; import org.elasticsearch.xpack.sql.expression.literal.Intervals; import org.elasticsearch.xpack.sql.expression.predicate.Range; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.MatchQueryPredicate; @@ -103,7 +104,6 @@ import java.util.function.Supplier; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.sql.expression.Foldables.doubleValuesOf; -import static org.elasticsearch.xpack.sql.expression.Foldables.stringValueOf; import static org.elasticsearch.xpack.sql.expression.Foldables.valueOf; final class QueryTranslator { @@ -121,7 +121,8 @@ final class QueryTranslator { new Likes(), new StringQueries(), new Matches(), - new MultiMatches() + new MultiMatches(), + new Scalars() ); private static final List> AGG_TRANSLATORS = Arrays.asList( @@ -447,13 +448,13 @@ final class QueryTranslator { boolean inexact = true; String target = null; - if (e.left() instanceof FieldAttribute) { - FieldAttribute fa = (FieldAttribute) e.left(); + if (e.field() instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) e.field(); inexact = fa.isInexact(); target = nameOf(inexact ? fa : fa.exactAttribute()); } else { throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", - Expressions.name(e.left())); + Expressions.name(e.field())); } if (e instanceof Like) { @@ -462,21 +463,21 @@ final class QueryTranslator { q = new QueryStringQuery(e.location(), p.asLuceneWildcard(), target); } else { - q = new WildcardQuery(e.location(), nameOf(e.left()), p.asLuceneWildcard()); + q = new WildcardQuery(e.location(), nameOf(e.field()), p.asLuceneWildcard()); } } if (e instanceof RLike) { - String pattern = stringValueOf(e.right()); + String pattern = ((RLike) e).pattern(); if (inexact) { q = new QueryStringQuery(e.location(), "/" + pattern + "/", target); } else { - q = new RegexQuery(e.location(), nameOf(e.left()), pattern); + q = new RegexQuery(e.location(), nameOf(e.field()), pattern); } } - return q != null ? new QueryTranslation(wrapIfNested(q, e.left())) : null; + return q != null ? new QueryTranslation(wrapIfNested(q, e.field())) : null; } } @@ -529,8 +530,16 @@ final class QueryTranslator { if (onAggs) { aggFilter = new AggFilter(not.id().toString(), not.asScript()); } else { - query = handleQuery(not, not.field(), - () -> new NotQuery(not.location(), toQuery(not.field(), false).query)); + Expression e = not.field(); + Query wrappedQuery = toQuery(not.field(), false).query; + Query q = wrappedQuery instanceof ScriptQuery ? new ScriptQuery(not.location(), + not.asScript()) : new NotQuery(not.location(), wrappedQuery); + + if (e instanceof FieldAttribute) { + query = wrapIfNested(q, e); + } + + query = q; } return new QueryTranslation(query, aggFilter); @@ -547,8 +556,14 @@ final class QueryTranslator { if (onAggs) { aggFilter = new AggFilter(isNotNull.id().toString(), isNotNull.asScript()); } else { - query = handleQuery(isNotNull, isNotNull.field(), - () -> new ExistsQuery(isNotNull.location(), nameOf(isNotNull.field()))); + Query q = null; + if (isNotNull.field() instanceof FieldAttribute) { + q = new ExistsQuery(isNotNull.location(), nameOf(isNotNull.field())); + } else { + q = new ScriptQuery(isNotNull.location(), isNotNull.asScript()); + } + final Query qu = q; + query = handleQuery(isNotNull, isNotNull.field(), () -> qu); } return new QueryTranslation(query, aggFilter); @@ -565,8 +580,15 @@ final class QueryTranslator { if (onAggs) { aggFilter = new AggFilter(isNull.id().toString(), isNull.asScript()); } else { - query = handleQuery(isNull, isNull.field(), - () -> new NotQuery(isNull.location(), new ExistsQuery(isNull.location(), nameOf(isNull.field())))); + Query q = null; + if (isNull.field() instanceof FieldAttribute) { + q = new NotQuery(isNull.location(), new ExistsQuery(isNull.location(), nameOf(isNull.field()))); + } else { + q = new ScriptQuery(isNull.location(), isNull.asScript()); + } + final Query qu = q; + + query = handleQuery(isNull, isNull.field(), () -> qu); } return new QueryTranslation(query, aggFilter); @@ -678,7 +700,14 @@ final class QueryTranslator { aggFilter = new AggFilter(at.id().toString(), in.asScript()); } else { - query = handleQuery(in, ne, () -> new TermsQuery(in.location(), ne.name(), in.list())); + Query q = null; + if (in.value() instanceof FieldAttribute) { + q = new TermsQuery(in.location(), ne.name(), in.list()); + } else { + q = new ScriptQuery(in.location(), in.asScript()); + } + Query qu = q; + query = handleQuery(in, ne, () -> qu); } return new QueryTranslation(query, aggFilter); } @@ -719,6 +748,25 @@ final class QueryTranslator { } } } + + static class Scalars extends ExpressionTranslator { + + @Override + protected QueryTranslation asQuery(ScalarFunction f, boolean onAggs) { + ScriptTemplate script = f.asScript(); + + Query query = null; + AggFilter aggFilter = null; + + if (onAggs) { + aggFilter = new AggFilter(f.id().toString(), script); + } else { + query = handleQuery(f, f, () -> new ScriptQuery(f.location(), script)); + } + + return new QueryTranslation(query, aggFilter); + } + } // @@ -862,8 +910,9 @@ final class QueryTranslator { protected static Query handleQuery(ScalarFunction sf, Expression field, Supplier query) { + Query q = query.get(); if (field instanceof FieldAttribute) { - return wrapIfNested(query.get(), field); + return wrapIfNested(q, field); } return new ScriptQuery(sf.location(), sf.asScript()); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java index 8c8a64c79f2..2412342c69c 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/optimizer/OptimizerTests.java @@ -322,10 +322,10 @@ public class OptimizerTests extends ESTestCase { public void testConstantFoldingLikes() { assertEquals(Literal.TRUE, - new ConstantFolding().rule(new Like(EMPTY, Literal.of(EMPTY, "test_emp"), new LikePattern(EMPTY, "test%", (char) 0))) + new ConstantFolding().rule(new Like(EMPTY, Literal.of(EMPTY, "test_emp"), new LikePattern("test%", (char) 0))) .canonical()); assertEquals(Literal.TRUE, - new ConstantFolding().rule(new RLike(EMPTY, Literal.of(EMPTY, "test_emp"), Literal.of(EMPTY, "test.emp"))).canonical()); + new ConstantFolding().rule(new RLike(EMPTY, Literal.of(EMPTY, "test_emp"), "test.emp")).canonical()); } public void testConstantFoldingDatetime() { @@ -419,7 +419,7 @@ public class OptimizerTests extends ESTestCase { // comparison assertNullLiteral(rule.rule(new GreaterThan(EMPTY, getFieldAttribute(), Literal.NULL))); // regex - assertNullLiteral(rule.rule(new RLike(EMPTY, getFieldAttribute(), Literal.NULL))); + assertNullLiteral(rule.rule(new RLike(EMPTY, Literal.NULL, "123"))); } public void testSimplifyCoalesceNulls() { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java index 963498bb9b6..cc91cdf6eab 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/tree/NodeSubclassTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.sql.expression.predicate.conditional.IfNull; import org.elasticsearch.xpack.sql.expression.predicate.fulltext.FullTextPredicate; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.sql.expression.predicate.operator.comparison.InPipe; +import org.elasticsearch.xpack.sql.expression.predicate.regex.Like; import org.elasticsearch.xpack.sql.expression.predicate.regex.LikePattern; import org.elasticsearch.xpack.sql.tree.NodeTests.ChildrenAreAProperty; import org.elasticsearch.xpack.sql.tree.NodeTests.Dummy; @@ -449,14 +450,12 @@ public class NodeSubclassTests> extends ESTestCas } return b.toString(); } - } else if (toBuildClass == LikePattern.class) { - /* - * The pattern and escape character have to be valid together - * so we pick an escape character that isn't used - */ - if (argClass == char.class) { - return randomFrom('\\', '|', '/', '`'); + } else if (toBuildClass == Like.class) { + + if (argClass == LikePattern.class) { + return new LikePattern(randomAlphaOfLength(16), randomFrom('\\', '|', '/', '`')); } + } else if (toBuildClass == Histogram.class) { if (argClass == Expression.class) { return LiteralTests.randomLiteral(); From 27ad733dce885914f46c24b40f35d51e447fa78e Mon Sep 17 00:00:00 2001 From: Evgenia Badyanova Date: Mon, 17 Dec 2018 11:59:45 -0500 Subject: [PATCH 11/26] Fixing line length for EnvironmentTests and RecoveryTests (#36657) Relates #34884 --- .../resources/checkstyle_suppressions.xml | 6 -- .../elasticsearch/env/EnvironmentTests.java | 3 +- .../env/NodeEnvironmentTests.java | 7 +- .../recovery/FullRollingRestartIT.java | 25 +++++--- .../recovery/RecoveryWhileUnderLoadIT.java | 64 ++++++++++++++----- .../elasticsearch/recovery/RelocationIT.java | 55 +++++++++++----- .../recovery/TruncatedRecoveryIT.java | 6 +- 7 files changed, 113 insertions(+), 53 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 6e628eab0cb..55fdcecb084 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -64,17 +64,11 @@ - - - - - - diff --git a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java index 5ada31b6129..c87a896d318 100644 --- a/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/EnvironmentTests.java @@ -55,7 +55,8 @@ public class EnvironmentTests extends ESTestCase { Environment environment = newEnvironment(); assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue()); - environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); + environment = newEnvironment(Settings.builder() + .putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build()); assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue()); assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue()); diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 7a24ebaf048..63635f5cbe7 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -352,7 +352,8 @@ public class NodeEnvironmentTests extends ESTestCase { for (int i = 0; i < iters; i++) { int shard = randomIntBetween(0, counts.length - 1); try { - try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), scaledRandomIntBetween(0, 10))) { + try (ShardLock autoCloses = env.shardLock(new ShardId("foo", "fooUUID", shard), + scaledRandomIntBetween(0, 10))) { counts[shard].value++; countsAtomic[shard].incrementAndGet(); assertEquals(flipFlop[shard].incrementAndGet(), 1); @@ -386,7 +387,9 @@ public class NodeEnvironmentTests extends ESTestCase { final Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "myindexUUID").build(); IndexSettings s1 = IndexSettingsModule.newIndexSettings("myindex", indexSettings); - IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder().put(indexSettings).put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); + IndexSettings s2 = IndexSettingsModule.newIndexSettings("myindex", Settings.builder() + .put(indexSettings) + .put(IndexMetaData.SETTING_DATA_PATH, "/tmp/foo").build()); Index index = new Index("myindex", "myindexUUID"); ShardId sid = new ShardId(index, 0); diff --git a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 6624d4eb8de..0fb5f7ac114 100644 --- a/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -76,14 +76,16 @@ public class FullRollingRestartIT extends ESIntegTestCase { internalCluster().startNode(settings); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); logger.info("--> add two more nodes"); internalCluster().startNode(settings); internalCluster().startNode(settings); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("5")); logger.info("--> refreshing and checking data"); refresh(); @@ -94,11 +96,13 @@ public class FullRollingRestartIT extends ESIntegTestCase { // now start shutting nodes down internalCluster().stopRandomDataNode(); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("4")); internalCluster().stopRandomDataNode(); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("3")); logger.info("--> stopped two nodes, verifying data"); refresh(); @@ -109,12 +113,14 @@ public class FullRollingRestartIT extends ESIntegTestCase { // closing the 3rd node internalCluster().stopRandomDataNode(); // make sure the cluster state is green, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForGreenStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("2")); internalCluster().stopRandomDataNode(); // make sure the cluster state is yellow, and all has been recovered - assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1")); + assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout) + .setWaitForYellowStatus().setWaitForNoRelocatingShards(true).setWaitForNodes("1")); logger.info("--> one node left, verifying data"); refresh(); @@ -133,7 +139,9 @@ public class FullRollingRestartIT extends ESIntegTestCase { * to relocating to the restarting node since all had 2 shards and now one node has nothing allocated. * We have a fix for this to wait until we have allocated unallocated shards now so this shouldn't happen. */ - prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6").put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0").put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get(); + prepareCreate("test").setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "6") + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") + .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueMinutes(1))).get(); for (int i = 0; i < 100; i++) { client().prepareIndex("test", "type1", Long.toString(i)) @@ -152,7 +160,8 @@ public class FullRollingRestartIT extends ESIntegTestCase { recoveryResponse = client().admin().indices().prepareRecoveries("test").get(); for (RecoveryState recoveryState : recoveryResponse.shardRecoveryStates().get("test")) { - assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " + recoveryState.getTargetNode()+ "-- \nbefore: \n" + state, + assertTrue("relocated from: " + recoveryState.getSourceNode() + " to: " + + recoveryState.getTargetNode()+ "-- \nbefore: \n" + state, recoveryState.getRecoverySource().getType() != RecoverySource.Type.PEER || recoveryState.getPrimary() == false); } } diff --git a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 0d2235c30a4..c0345be6fae 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -53,14 +53,18 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; -@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE,org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE") +@TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.cluster.service:TRACE," + + "org.elasticsearch.index.seqno:TRACE,org.elasticsearch.indices.recovery:TRACE") public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { private final Logger logger = LogManager.getLogger(RecoveryWhileUnderLoadIT.class); public void testRecoverWhileUnderLoadAllocateReplicasTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -92,7 +96,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info("--> waiting for GREEN health status ..."); // make sure the cluster state is green, and all has been recovered - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs); waitForDocs(totalNumDocs, indexer); @@ -113,7 +118,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadAllocateReplicasRelocatePrimariesTest() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 1, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 1, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -142,7 +150,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { allowNodes("test", 4); logger.info("--> waiting for GREEN health status ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()); logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs); @@ -164,7 +173,9 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public void testRecoverWhileUnderLoadWithReducedAllowedNodes() throws Exception { logger.info("--> creating test index ..."); int numberOfShards = numberOfShards(); - assertAcked(prepareCreate("test", 2, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 2, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numberOfShards).put(SETTING_NUMBER_OF_REPLICAS, 1) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int totalNumDocs = scaledRandomIntBetween(200, 10000); int waitFor = totalNumDocs / 10; @@ -194,7 +205,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { allowNodes("test", 4); logger.info("--> waiting for GREEN health status ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus().setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForGreenStatus() + .setWaitForNoRelocatingShards(true)); logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs); waitForDocs(totalNumDocs, indexer); @@ -205,23 +219,31 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info("--> allow 3 nodes for index [test] ..."); allowNodes("test", 3); logger.info("--> waiting for relocations ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> allow 2 nodes for index [test] ..."); allowNodes("test", 2); logger.info("--> waiting for relocations ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> allow 1 nodes for index [test] ..."); allowNodes("test", 1); logger.info("--> waiting for relocations ..."); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> marking and waiting for indexing threads to stop ..."); indexer.stop(); logger.info("--> indexing threads stopped"); - assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForNoRelocatingShards(true)); + assertNoTimeout(client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID).setTimeout("5m") + .setWaitForNoRelocatingShards(true)); logger.info("--> refreshing the index"); refreshAndAssert(); @@ -235,7 +257,10 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { final int numReplicas = 0; logger.info("--> creating test index ..."); int allowNodes = 2; - assertAcked(prepareCreate("test", 3, Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numShards).put(SETTING_NUMBER_OF_REPLICAS, numReplicas).put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); + assertAcked(prepareCreate("test", 3, Settings.builder() + .put(SETTING_NUMBER_OF_SHARDS, numShards) + .put(SETTING_NUMBER_OF_REPLICAS, numReplicas) + .put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), Translog.Durability.ASYNC))); final int numDocs = scaledRandomIntBetween(200, 9999); @@ -258,7 +283,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info("--> indexing threads stopped"); logger.info("--> bump up number of replicas to 1 and allow all nodes to hold the index"); allowNodes("test", 3); - assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("number_of_replicas", 1)).get()); + assertAcked(client().admin().indices().prepareUpdateSettings("test") + .setSettings(Settings.builder().put("number_of_replicas", 1)).get()); ensureGreen(TimeValue.timeValueMinutes(5)); logger.info("--> refreshing the index"); @@ -273,7 +299,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { SearchResponse[] iterationResults = new SearchResponse[iterations]; boolean error = false; for (int i = 0; i < iterations; i++) { - SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).addSort("id", SortOrder.ASC).get(); + SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()) + .addSort("id", SortOrder.ASC).get(); logSearchResponse(numberOfShards, numberOfDocs, i, searchResponse); iterationResults[i] = searchResponse; if (searchResponse.getHits().getTotalHits().value != numberOfDocs) { @@ -286,7 +313,8 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().get(); for (ShardStats shardStats : indicesStatsResponse.getShards()) { DocsStats docsStats = shardStats.getStats().docs; - logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary()); + logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), + shardStats.getShardRouting().primary()); } ClusterService clusterService = clusterService(); @@ -332,12 +360,14 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { } private void logSearchResponse(int numberOfShards, long numberOfDocs, int iteration, SearchResponse searchResponse) { - logger.info("iteration [{}] - successful shards: {} (expected {})", iteration, searchResponse.getSuccessfulShards(), numberOfShards); + logger.info("iteration [{}] - successful shards: {} (expected {})", iteration, + searchResponse.getSuccessfulShards(), numberOfShards); logger.info("iteration [{}] - failed shards: {} (expected 0)", iteration, searchResponse.getFailedShards()); if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) { logger.info("iteration [{}] - shard failures: {}", iteration, Arrays.toString(searchResponse.getShardFailures())); } - logger.info("iteration [{}] - returned documents: {} (expected {})", iteration, searchResponse.getHits().getTotalHits().value, numberOfDocs); + logger.info("iteration [{}] - returned documents: {} (expected {})", iteration, + searchResponse.getHits().getTotalHits().value, numberOfDocs); } private void refreshAndAssert() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java index b27e4fd229a..62208a40488 100644 --- a/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -133,7 +133,8 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> start another node"); final String node_2 = internalCluster().startNode(); - ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNodes("2").execute().actionGet(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNodes("2").execute().actionGet(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); logger.info("--> relocate the shard from node1 to node2"); @@ -141,7 +142,8 @@ public class RelocationIT extends ESIntegTestCase { .add(new MoveAllocationCommand("test", 0, node_1, node_2)) .execute().actionGet(); - clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); + clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); logger.info("--> verifying count again..."); @@ -155,7 +157,8 @@ public class RelocationIT extends ESIntegTestCase { int numberOfReplicas = randomBoolean() ? 0 : 1; int numberOfNodes = numberOfReplicas == 0 ? 2 : 3; - logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes); + logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", + numberOfRelocations, numberOfReplicas, numberOfNodes); String[] nodes = new String[numberOfNodes]; logger.info("--> starting [node1] ..."); @@ -172,8 +175,10 @@ public class RelocationIT extends ESIntegTestCase { logger.info("--> starting [node{}] ...", i); nodes[i - 1] = internalCluster().startNode(); if (i != numberOfNodes) { - ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID) - .setWaitForNodes(Integer.toString(i)).setWaitForGreenStatus().execute().actionGet(); + ClusterHealthResponse healthResponse = client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNodes(Integer.toString(i)) + .setWaitForGreenStatus().execute().actionGet(); assertThat(healthResponse.isTimedOut(), equalTo(false)); } } @@ -202,7 +207,10 @@ public class RelocationIT extends ESIntegTestCase { logger.debug("--> flushing"); client().admin().indices().prepareFlush().get(); } - ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForNoRelocatingShards(true).setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); + ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(ACCEPTABLE_RELOCATION_TIME).execute().actionGet(); assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); indexer.pauseIndexing(); logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode); @@ -218,7 +226,8 @@ public class RelocationIT extends ESIntegTestCase { boolean ranOnce = false; for (int i = 0; i < 10; i++) { logger.info("--> START search test round {}", i + 1); - SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits(); + SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()) + .setSize((int) indexer.totalIndexedDocs()).storedFields().execute().actionGet().getHits(); ranOnce = true; if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; @@ -252,7 +261,8 @@ public class RelocationIT extends ESIntegTestCase { int numberOfReplicas = randomBoolean() ? 0 : 1; int numberOfNodes = numberOfReplicas == 0 ? 2 : 3; - logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", numberOfRelocations, numberOfReplicas, numberOfNodes); + logger.info("testRelocationWhileIndexingRandom(numRelocations={}, numberOfReplicas={}, numberOfNodes={})", + numberOfRelocations, numberOfReplicas, numberOfNodes); String[] nodes = new String[numberOfNodes]; logger.info("--> starting [node_0] ..."); @@ -281,13 +291,15 @@ public class RelocationIT extends ESIntegTestCase { final Semaphore postRecoveryShards = new Semaphore(0); final IndexEventListener listener = new IndexEventListener() { @Override - public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, IndexShardState currentState, @Nullable String reason) { + public void indexShardStateChanged(IndexShard indexShard, @Nullable IndexShardState previousState, + IndexShardState currentState, @Nullable String reason) { if (currentState == IndexShardState.POST_RECOVERY) { postRecoveryShards.release(); } } }; - for (MockIndexEventListener.TestEventListener eventListener : internalCluster().getInstances(MockIndexEventListener.TestEventListener.class)) { + for (MockIndexEventListener.TestEventListener eventListener : internalCluster() + .getInstances(MockIndexEventListener.TestEventListener.class)) { eventListener.setNewDelegate(listener); } @@ -327,7 +339,10 @@ public class RelocationIT extends ESIntegTestCase { indexRandom(true, true, builders2); // verify cluster was finished. - assertFalse(client().admin().cluster().prepareHealth().setWaitForNoRelocatingShards(true).setWaitForEvents(Priority.LANGUID).setTimeout("30s").get().isTimedOut()); + assertFalse(client().admin().cluster().prepareHealth() + .setWaitForNoRelocatingShards(true) + .setWaitForEvents(Priority.LANGUID) + .setTimeout("30s").get().isTimedOut()); logger.info("--> DONE relocate the shard from {} to {}", fromNode, toNode); logger.debug("--> verifying all searches return the same number of docs"); @@ -374,17 +389,20 @@ public class RelocationIT extends ESIntegTestCase { MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, p_node); for (DiscoveryNode node : clusterService.state().nodes()) { if (!node.equals(clusterService.localNode())) { - mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), new RecoveryCorruption(corruptionCount)); + mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, node.getName()), + new RecoveryCorruption(corruptionCount)); } } - client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get(); + client().admin().indices().prepareUpdateSettings(indexName).setSettings(Settings.builder() + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)).get(); corruptionCount.await(); logger.info("--> stopping replica assignment"); assertAcked(client().admin().cluster().prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); + .setTransientSettings(Settings.builder() + .put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"))); logger.info("--> wait for all replica shards to be removed, on all nodes"); assertBusy(() -> { @@ -408,7 +426,8 @@ public class RelocationIT extends ESIntegTestCase { Files.walkFileTree(shardLoc, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), not(startsWith("recovery."))); + assertThat("found a temporary recovery file: " + file, file.getFileName().toString(), + not(startsWith("recovery."))); return FileVisitResult.CONTINUE; } }); @@ -496,13 +515,15 @@ public class RelocationIT extends ESIntegTestCase { } @Override - public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException { + public void sendRequest(Transport.Connection connection, long requestId, String action, TransportRequest request, + TransportRequestOptions options) throws IOException { if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest chunkRequest = (RecoveryFileChunkRequest) request; if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) { // corrupting the segments_N files in order to make sure future recovery re-send files logger.debug("corrupting [{}] to {}. file name: [{}]", action, connection.getNode(), chunkRequest.name()); - assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes : "no internal reference!!"; + assert chunkRequest.content().toBytesRef().bytes == + chunkRequest.content().toBytesRef().bytes : "no internal reference!!"; byte[] array = chunkRequest.content().toBytesRef().bytes; array[0] = (byte) ~array[0]; // flip one byte in the content corruptionCount.countDown(); diff --git a/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index ac8688c9847..973c687ebe8 100644 --- a/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -89,7 +89,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase { // we have no replicas so far and make sure that we allocate the primary on the lucky node assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=text", "the_id", "type=text") - .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards()) + .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numberOfShards()) .put("index.routing.allocation.include._name", primariesNode.getNode().getName()))); // only allocate on the lucky node // index some docs and check if they are coming back @@ -112,7 +113,8 @@ public class TruncatedRecoveryIT extends ESIntegTestCase { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean truncate = new AtomicBoolean(true); for (NodeStats dataNode : dataNodeStats) { - MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getNode().getName())); + MockTransportService mockTransportService = ((MockTransportService) internalCluster() + .getInstance(TransportService.class, dataNode.getNode().getName())); mockTransportService.addSendBehavior(internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), (connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { From 1f80c80b3ae1f99f77ea2d3a5fd519c722485fcf Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Mon, 17 Dec 2018 19:06:23 +0200 Subject: [PATCH 12/26] Add back one line removed by mistake regarding java version check and COMPAT jvm parameter existence --- .../function/scalar/datetime/NamedDateTimeProcessorTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java index ae152bba5d8..3531152c69b 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java @@ -41,6 +41,7 @@ public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase } public void testValidDayNamesInUTC() { + assumeJava9PlusAndCompatLocaleProviderSetting(); NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, UTC); assertEquals("Thursday", proc.process(dateTime(0L))); assertEquals("Saturday", proc.process(dateTime(-64164233612338L))); From cd632de11636d5995d2e1c8c8cfe4abd001a9642 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 17 Dec 2018 13:24:31 -0500 Subject: [PATCH 13/26] Do not resolve addresses in remote connection info (#36671) The remote connection info API leads to resolving addresses of seed nodes when invoked. This is problematic because if a hostname fails to resolve, we would not display any remote connection info. Yet, a hostname not resolving can happen across remote clusters, especially in the modern world of cloud services with dynamically chaning IPs. Instead, the remote connection info API should be providing the configured seed nodes. This commit changes the remote connection info to display the configured seed nodes, avoiding a hostname resolution. Note that care was taken to preserve backwards compatibility with previous versions that expect the remote connection info to serialize a transport address instead of a string representing the hostname. --- .../transport/RemoteClusterAware.java | 29 ++-- .../transport/RemoteClusterConnection.java | 25 ++- .../transport/RemoteClusterService.java | 16 +- .../transport/RemoteConnectionInfo.java | 52 +++++- .../RemoteClusterConnectionTests.java | 163 +++++++++--------- .../transport/RemoteClusterServiceTests.java | 27 +-- 6 files changed, 188 insertions(+), 124 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 2c36af8638f..237e73e572a 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -183,10 +183,11 @@ public abstract class RemoteClusterAware { * (ProxyAddresss, [SeedNodeSuppliers]). If a cluster is configured with a proxy address all seed nodes will point to * {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node. */ - protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) { - final Map>>> remoteSeeds = + protected static Map>>>> buildRemoteClustersDynamicConfig( + final Settings settings) { + final Map>>>> remoteSeeds = buildRemoteClustersDynamicConfig(settings, REMOTE_CLUSTERS_SEEDS); - final Map>>> searchRemoteSeeds = + final Map>>>> searchRemoteSeeds = buildRemoteClustersDynamicConfig(settings, SEARCH_REMOTE_CLUSTERS_SEEDS); // sort the intersection for predictable output order final NavigableSet intersection = @@ -205,7 +206,7 @@ public abstract class RemoteClusterAware { .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } - private static Map>>> buildRemoteClustersDynamicConfig( + private static Map>>>> buildRemoteClustersDynamicConfig( final Settings settings, final Setting.AffixSetting> seedsSetting) { final Stream>> allConcreteSettings = seedsSetting.getAllConcreteSettings(settings); return allConcreteSettings.collect( @@ -214,9 +215,9 @@ public abstract class RemoteClusterAware { List addresses = concreteSetting.get(settings); final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).existsOrFallbackExists(settings); - List> nodes = new ArrayList<>(addresses.size()); + List>> nodes = new ArrayList<>(addresses.size()); for (String address : addresses) { - nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); + nodes.add(Tuple.tuple(address, () -> buildSeedNode(clusterName, address, proxyMode))); } return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes); })); @@ -304,16 +305,24 @@ public abstract class RemoteClusterAware { (namespace, value) -> {}); } - - protected static InetSocketAddress parseSeedAddress(String remoteHost) { - String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost)); + static InetSocketAddress parseSeedAddress(String remoteHost) { + final Tuple hostPort = parseHostPort(remoteHost); + final String host = hostPort.v1(); + assert hostPort.v2() != null : remoteHost; + final int port = hostPort.v2(); InetAddress hostAddress; try { hostAddress = InetAddress.getByName(host); } catch (UnknownHostException e) { throw new IllegalArgumentException("unknown host [" + host + "]", e); } - return new InetSocketAddress(hostAddress, parsePort(remoteHost)); + return new InetSocketAddress(hostAddress, port); + } + + public static Tuple parseHostPort(final String remoteHost) { + final String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost)); + final int port = parsePort(remoteHost); + return Tuple.tuple(host, port); } private static int parsePort(String remoteHost) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 87dd99e6590..7ea55925262 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -35,6 +35,7 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -95,7 +96,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos private final Predicate nodePredicate; private final ThreadPool threadPool; private volatile String proxyAddress; - private volatile List> seedNodes; + private volatile List>> seedNodes; private volatile boolean skipUnavailable; private final ConnectHandler connectHandler; private final TimeValue initialConnectionTimeout; @@ -111,7 +112,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * @param nodePredicate a predicate to filter eligible remote nodes to connect to * @param proxyAddress the proxy address */ - RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, + RemoteClusterConnection(Settings settings, String clusterAlias, List>> seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate, String proxyAddress) { this(settings, clusterAlias, seedNodes, transportService, maxNumRemoteConnections, nodePredicate, proxyAddress, @@ -119,7 +120,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos } // Public for tests to pass a StubbableConnectionManager - RemoteClusterConnection(Settings settings, String clusterAlias, List> seedNodes, + RemoteClusterConnection(Settings settings, String clusterAlias, List>> seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate, String proxyAddress, ConnectionManager connectionManager) { this.transportService = transportService; @@ -155,7 +156,10 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos /** * Updates the list of seed nodes for this cluster connection */ - synchronized void updateSeedNodes(String proxyAddress, List> seedNodes, ActionListener connectListener) { + synchronized void updateSeedNodes( + final String proxyAddress, + final List>> seedNodes, + final ActionListener connectListener) { this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes)); this.proxyAddress = proxyAddress; connectHandler.connect(connectListener); @@ -465,7 +469,7 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos maybeConnect(); } }); - collectRemoteNodes(seedNodes.iterator(), transportService, connectionManager, listener); + collectRemoteNodes(seedNodes.stream().map(Tuple::v2).iterator(), transportService, connectionManager, listener); } }); } @@ -672,10 +676,13 @@ final class RemoteClusterConnection implements TransportConnectionListener, Clos * Get the information about remote nodes to be rendered on {@code _remote/info} requests. */ public RemoteConnectionInfo getConnectionInfo() { - List seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect - (Collectors.toList()); - return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(), - initialConnectionTimeout, skipUnavailable); + return new RemoteConnectionInfo( + clusterAlias, + seedNodes.stream().map(Tuple::v1).collect(Collectors.toList()), + maxNumRemoteConnections, + connectedNodes.size(), + initialConnectionTimeout, + skipUnavailable); } int getNumNodesConnected() { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index fda0b90f19e..cb802f13fdb 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -201,7 +201,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl * @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes * @param connectionListener a listener invoked once every configured cluster has been connected to */ - private synchronized void updateRemoteClusters(Map>>> seeds, + private synchronized void updateRemoteClusters(Map>>>> seeds, ActionListener connectionListener) { if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) { throw new IllegalArgumentException("remote clusters must not have the empty string as its key"); @@ -212,8 +212,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl } else { CountDown countDown = new CountDown(seeds.size()); remoteClusters.putAll(this.remoteClusters); - for (Map.Entry>>> entry : seeds.entrySet()) { - List> seedList = entry.getValue().v2(); + for (Map.Entry>>>> entry : seeds.entrySet()) { + List>> seedList = entry.getValue().v2(); String proxyAddress = entry.getValue().v1(); RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey()); @@ -408,9 +408,10 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl final List addresses, final String proxyAddress, final ActionListener connectionListener) { - final List> nodes = addresses.stream().>map(address -> () -> - buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress)) - ).collect(Collectors.toList()); + final List>> nodes = + addresses.stream().>>map(address -> Tuple.tuple(address, () -> + buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress))) + ).collect(Collectors.toList()); updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener); } @@ -421,7 +422,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl void initializeRemoteClusters() { final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings); final PlainActionFuture future = new PlainActionFuture<>(); - Map>>> seeds = RemoteClusterAware.buildRemoteClustersDynamicConfig(settings); + Map>>>> seeds = + RemoteClusterAware.buildRemoteClustersDynamicConfig(settings); updateRemoteClusters(seeds, future); try { future.get(timeValue.millis(), TimeUnit.MILLISECONDS); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java index c2024e39228..7c51ca7b9c8 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionInfo.java @@ -16,9 +16,11 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.transport; import org.elasticsearch.Version; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -27,25 +29,29 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; -import static java.util.Collections.emptyList; - import java.io.IOException; +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Arrays; import java.util.List; import java.util.Objects; +import java.util.stream.Collectors; + +import static java.util.Collections.emptyList; /** * This class encapsulates all remote cluster information to be rendered on * {@code _remote/info} requests. */ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable { - final List seedNodes; + final List seedNodes; final int connectionsPerCluster; final TimeValue initialConnectionTimeout; final int numNodesConnected; final String clusterAlias; final boolean skipUnavailable; - RemoteConnectionInfo(String clusterAlias, List seedNodes, + RemoteConnectionInfo(String clusterAlias, List seedNodes, int connectionsPerCluster, int numNodesConnected, TimeValue initialConnectionTimeout, boolean skipUnavailable) { this.clusterAlias = clusterAlias; @@ -57,7 +63,17 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable } public RemoteConnectionInfo(StreamInput input) throws IOException { - seedNodes = input.readList(TransportAddress::new); + if (input.getVersion().onOrAfter(Version.V_7_0_0)) { + seedNodes = Arrays.asList(input.readStringArray()); + } else { + // versions prior to 7.0.0 sent the resolved transport address of the seed nodes + final List transportAddresses = input.readList(TransportAddress::new); + seedNodes = + transportAddresses + .stream() + .map(a -> a.address().getHostString() + ":" + a.address().getPort()) + .collect(Collectors.toList()); + } if (input.getVersion().before(Version.V_7_0_0)) { /* * Versions before 7.0 sent the HTTP addresses of all nodes in the @@ -78,7 +94,26 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable @Override public void writeTo(StreamOutput out) throws IOException { - out.writeList(seedNodes); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeStringArray(seedNodes.toArray(new String[0])); + } else { + // versions prior to 7.0.0 received the resolved transport address of the seed nodes + out.writeList(seedNodes + .stream() + .map( + s -> { + final Tuple hostPort = RemoteClusterAware.parseHostPort(s); + assert hostPort.v2() != null : s; + try { + return new TransportAddress( + InetAddress.getByAddress(hostPort.v1(), TransportAddress.META_ADDRESS.getAddress()), + hostPort.v2()); + } catch (final UnknownHostException e) { + throw new AssertionError(e); + } + }) + .collect(Collectors.toList())); + } if (out.getVersion().before(Version.V_7_0_0)) { /* * Versions before 7.0 sent the HTTP addresses of all nodes in the @@ -104,8 +139,8 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable builder.startObject(clusterAlias); { builder.startArray("seeds"); - for (TransportAddress addr : seedNodes) { - builder.value(addr.toString()); + for (String addr : seedNodes) { + builder.value(addr); } builder.endArray(); builder.field("connected", numNodesConnected > 0); @@ -136,4 +171,5 @@ public final class RemoteConnectionInfo implements ToXContentFragment, Writeable return Objects.hash(seedNodes, connectionsPerCluster, initialConnectionTimeout, numNodesConnected, clusterAlias, skipUnavailable); } + } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 50391162367..02e701ed4bc 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; @@ -80,6 +81,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -164,9 +166,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -206,9 +208,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -259,9 +261,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); @@ -282,7 +284,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(incompatibleTransport.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> incompatibleSeedNode, () -> seedNode); + List>> seedNodes = Arrays.asList( + Tuple.tuple(incompatibleSeedNode.toString(), () -> incompatibleSeedNode), + Tuple.tuple(seedNode.toString(), () -> seedNode)); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -317,9 +321,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertFalse(connectionManager.nodeConnected(spareNode)); @@ -367,9 +371,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> n.equals(rejectedNode) == false, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); if (rejectedNode.equals(seedNode)) { assertFalse(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); @@ -382,11 +386,15 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } } - private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes) throws Exception { + private void updateSeedNodes( + final RemoteClusterConnection connection, final List>> seedNodes) throws Exception { updateSeedNodes(connection, seedNodes, null); } - private void updateSeedNodes(RemoteClusterConnection connection, List> seedNodes, String proxyAddress) + private void updateSeedNodes( + final RemoteClusterConnection connection, + final List>> seedNodes, + final String proxyAddress) throws Exception { CountDownLatch latch = new CountDownLatch(1); AtomicReference exceptionAtomicReference = new AtomicReference<>(); @@ -428,9 +436,11 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - expectThrows(Exception.class, () -> updateSeedNodes(connection, Arrays.asList(() -> seedNode))); + expectThrows( + Exception.class, + () -> updateSeedNodes(connection, Arrays.asList(Tuple.tuple(seedNode.toString(), () -> seedNode)))); assertFalse(connectionManager.nodeConnected(seedNode)); assertTrue(connection.assertNoRunningConnections()); } @@ -481,7 +491,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { connection.addConnectedNode(seedNode); for (DiscoveryNode node : knownNodes) { final Transport.Connection transportConnection = connection.getConnection(node); @@ -524,7 +534,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { CountDownLatch listenerCalled = new CountDownLatch(1); AtomicReference exceptionReference = new AtomicReference<>(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ActionListener listener = ActionListener.wrap(x -> { listenerCalled.countDown(); fail("expected exception"); @@ -532,7 +542,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { exceptionReference.set(x); listenerCalled.countDown(); }); - connection.updateSeedNodes(null, Arrays.asList(() -> seedNode), listener); + connection.updateSeedNodes(null, seedNodes(seedNode), listener); acceptedLatch.await(); connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on assertTrue(connection.assertNoRunningConnections()); @@ -548,6 +558,18 @@ public class RemoteClusterConnectionTests extends ESTestCase { } } + private List>> seedNodes(final DiscoveryNode... seedNodes) { + if (seedNodes.length == 0) { + return Collections.emptyList(); + } else if (seedNodes.length == 1) { + return Collections.singletonList(Tuple.tuple(seedNodes[0].toString(), () -> seedNodes[0])); + } else { + return Arrays.stream(seedNodes) + .map(s -> Tuple.tuple(s.toString(), (Supplier)() -> s)) + .collect(Collectors.toList()); + } + } + public void testFetchShards() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); @@ -559,11 +581,11 @@ public class RemoteClusterConnectionTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); - List> nodes = Collections.singletonList(() -> seedNode); + final List>> seedNodes = seedNodes(seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - nodes, service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes, service, Integer.MAX_VALUE, n -> true, null)) { if (randomBoolean()) { - updateSeedNodes(connection, nodes); + updateSeedNodes(connection, seedNodes); } if (randomBoolean()) { connection.updateSkipUnavailable(randomBoolean()); @@ -599,9 +621,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); - List> nodes = Collections.singletonList(() -> seedNode); + final List>> seedNodes = seedNodes(seedNode); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - nodes, service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes, service, Integer.MAX_VALUE, n -> true, null)) { SearchRequest request = new SearchRequest("test-index"); Thread[] threads = new Thread[10]; for (int i = 0; i < threads.length; i++) { @@ -655,7 +677,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); SearchRequest request = new SearchRequest("test-index"); @@ -759,7 +781,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(seedTransport1.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode); + List>> seedNodes = seedNodes(seedNode1, seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -839,7 +861,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(discoverableTransport.getLocalDiscoNode()); knownNodes.add(seedTransport1.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> seedNode1, () -> seedNode); + List>> seedNodes = seedNodes(seedNode1, seedNode); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -926,7 +948,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { knownNodes.add(transport3.getLocalDiscoNode()); knownNodes.add(transport2.getLocalDiscoNode()); Collections.shuffle(knownNodes, random()); - List> seedNodes = Arrays.asList(() -> node3, () -> node1, () -> node2); + List>> seedNodes = seedNodes(node3, node1, node2); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -958,44 +980,32 @@ public class RemoteClusterConnectionTests extends ESTestCase { } public void testRemoteConnectionInfo() throws IOException { - RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), false); + RemoteConnectionInfo stats = + new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats); - RemoteConnectionInfo stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 4, TimeValue.timeValueMinutes(30), true); + RemoteConnectionInfo stats1 = + new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 4, TimeValue.timeValueMinutes(30), true); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster_1", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), false); + stats1 = new RemoteConnectionInfo("test_cluster_1", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 15)), - 4, 3, TimeValue.timeValueMinutes(30), false); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:15"), 4, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), true); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(325), true); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(325), true); assertSerialization(stats1); assertNotEquals(stats, stats1); - stats1 = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 5, 3, TimeValue.timeValueMinutes(30), false); + stats1 = new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 5, 3, TimeValue.timeValueMinutes(30), false); assertSerialization(stats1); assertNotEquals(stats, stats1); } @@ -1016,9 +1026,8 @@ public class RemoteClusterConnectionTests extends ESTestCase { public void testRemoteConnectionInfoBwComp() throws IOException { final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_1_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)); - RemoteConnectionInfo expected = new RemoteConnectionInfo("test_cluster", - Collections.singletonList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 4, new TimeValue(30, TimeUnit.MINUTES), false); + RemoteConnectionInfo expected = + new RemoteConnectionInfo("test_cluster", Arrays.asList("0.0.0.0:1"), 4, 4, new TimeValue(30, TimeUnit.MINUTES), false); // This version was created using the serialization code in use from 6.1 but before 7.0 String encoded = "AQQAAAAABzAuMC4wLjAAAAABAQQAAAAABzAuMC4wLjAAAABQBDwEBAx0ZXN0X2NsdXN0ZXIA"; @@ -1042,27 +1051,25 @@ public class RemoteClusterConnectionTests extends ESTestCase { } public void testRenderConnectionInfoXContent() throws IOException { - RemoteConnectionInfo stats = new RemoteConnectionInfo("test_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1)), - 4, 3, TimeValue.timeValueMinutes(30), true); + RemoteConnectionInfo stats = + new RemoteConnectionInfo("test_cluster", Arrays.asList("seed:1"), 4, 3, TimeValue.timeValueMinutes(30), true); stats = assertSerialization(stats); XContentBuilder builder = XContentFactory.jsonBuilder(); builder.startObject(); stats.toXContent(builder, null); builder.endObject(); - assertEquals("{\"test_cluster\":{\"seeds\":[\"0.0.0.0:1\"],\"connected\":true," + + assertEquals("{\"test_cluster\":{\"seeds\":[\"seed:1\"],\"connected\":true," + "\"num_nodes_connected\":3,\"max_connections_per_cluster\":4,\"initial_connect_timeout\":\"30m\"," + "\"skip_unavailable\":true}}", Strings.toString(builder)); - stats = new RemoteConnectionInfo("some_other_cluster", - Arrays.asList(new TransportAddress(TransportAddress.META_ADDRESS, 1), new TransportAddress(TransportAddress.META_ADDRESS, 2)), - 2, 0, TimeValue.timeValueSeconds(30), false); + stats = new RemoteConnectionInfo( + "some_other_cluster", Arrays.asList("seed:1", "seed:2"), 2, 0, TimeValue.timeValueSeconds(30), false); stats = assertSerialization(stats); builder = XContentFactory.jsonBuilder(); builder.startObject(); stats.toXContent(builder, null); builder.endObject(); - assertEquals("{\"some_other_cluster\":{\"seeds\":[\"0.0.0.0:1\",\"0.0.0.0:2\"]," + assertEquals("{\"some_other_cluster\":{\"seeds\":[\"seed:1\",\"seed:2\"]," + "\"connected\":false,\"num_nodes_connected\":0,\"max_connections_per_cluster\":2,\"initial_connect_timeout\":\"30s\"," + "\"skip_unavailable\":false}}", Strings.toString(builder)); } @@ -1081,7 +1088,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); assertFalse(connectionManager.nodeConnected(seedNode)); assertFalse(connectionManager.nodeConnected(discoverableNode)); @@ -1131,9 +1138,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList(() -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { if (randomBoolean()) { - updateSeedNodes(connection, Arrays.asList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); } CountDownLatch responseLatch = new CountDownLatch(1); AtomicReference> reference = new AtomicReference<>(); @@ -1165,14 +1172,14 @@ public class RemoteClusterConnectionTests extends ESTestCase { List discoverableTransports = new CopyOnWriteArrayList<>(); try { final int numDiscoverableNodes = randomIntBetween(5, 20); - List> discoverableNodes = new ArrayList<>(numDiscoverableNodes); + List>> discoverableNodes = new ArrayList<>(numDiscoverableNodes); for (int i = 0; i < numDiscoverableNodes; i++ ) { MockTransportService transportService = startTransport("discoverable_node" + i, knownNodes, Version.CURRENT); - discoverableNodes.add(transportService::getLocalDiscoNode); + discoverableNodes.add(Tuple.tuple("discoverable_node" + i, transportService::getLocalDiscoNode)); discoverableTransports.add(transportService); } - List> seedNodes = randomSubsetOf(discoverableNodes); + List>> seedNodes = randomSubsetOf(discoverableNodes); Collections.shuffle(seedNodes, random()); try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { @@ -1221,7 +1228,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { discoverableTransports.add(transportService); connection.addConnectedNode(transportService.getLocalDiscoNode()); } else { - DiscoveryNode node = randomFrom(discoverableNodes).get(); + DiscoveryNode node = randomFrom(discoverableNodes).v2().get(); connection.onNodeDisconnected(node); } } @@ -1269,14 +1276,16 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Arrays.asList( () -> seedNode), service, Integer.MAX_VALUE, n -> true, null)) { + seedNodes(seedNode), service, Integer.MAX_VALUE, n -> true, null)) { ConnectionManager connectionManager = connection.getConnectionManager(); - updateSeedNodes(connection, Collections.singletonList(() -> seedNode)); + updateSeedNodes(connection, seedNodes(seedNode)); assertTrue(connectionManager.nodeConnected(seedNode)); assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); - List> discoveryNodes = - Arrays.asList(otherClusterTransport::getLocalDiscoNode, () -> seedNode); + List>> discoveryNodes = + Arrays.asList( + Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode), + Tuple.tuple(seedNode.toString(), () -> seedNode)); Collections.shuffle(discoveryNodes, random()); updateSeedNodes(connection, discoveryNodes); assertTrue(connectionManager.nodeConnected(seedNode)); @@ -1287,7 +1296,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { assertTrue(connectionManager.nodeConnected(discoverableNode)); assertTrue(connection.assertNoRunningConnections()); IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, () -> - updateSeedNodes(connection, Arrays.asList(() -> otherClusterTransport.getLocalDiscoNode()))); + updateSeedNodes(connection, Arrays.asList(Tuple.tuple("other", otherClusterTransport::getLocalDiscoNode)))); assertThat(illegalStateException.getMessage(), startsWith("handshake failed, mismatched cluster name [Cluster [otherCluster]]" + " - {other_cluster_discoverable_node}")); @@ -1339,7 +1348,7 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", - Collections.singletonList(() -> connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { + seedNodes(connectedNode), service, Integer.MAX_VALUE, n -> true, null, connectionManager)) { connection.addConnectedNode(connectedNode); for (int i = 0; i < 10; i++) { //always a direct connection as the remote node is already connected @@ -1376,10 +1385,10 @@ public class RemoteClusterConnectionTests extends ESTestCase { service.start(); service.acceptIncomingRequests(); CountDownLatch multipleResolveLatch = new CountDownLatch(2); - Supplier seedSupplier = () -> { + Tuple> seedSupplier = Tuple.tuple(seedNode.toString(), () -> { multipleResolveLatch.countDown(); return seedNode; - }; + }); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, null)) { updateSeedNodes(connection, Arrays.asList(seedSupplier)); @@ -1409,9 +1418,9 @@ public class RemoteClusterConnectionTests extends ESTestCase { threadPool, null, Collections.emptySet())) { service.start(); service.acceptIncomingRequests(); - Supplier seedSupplier = () -> - RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true); - assertEquals("node_0", seedSupplier.get().getAttributes().get("server_name")); + Tuple> seedSupplier = Tuple.tuple("node_0", () -> + RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true)); + assertEquals("node_0", seedSupplier.v2().get().getAttributes().get("server_name")); try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", Arrays.asList(seedSupplier), service, Integer.MAX_VALUE, n -> true, proxyAddress)) { updateSeedNodes(connection, Arrays.asList(seedSupplier), proxyAddress); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index dfc5d4367b4..34dfc420133 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -125,41 +125,42 @@ public class RemoteClusterServiceTests extends ESTestCase { } public void testBuildRemoteClustersDynamicConfig() throws Exception { - Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig( - Settings.builder() - .put("cluster.remote.foo.seeds", "192.168.0.1:8080") - .put("cluster.remote.bar.seeds", "[::1]:9090") - .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") - .put("cluster.remote.boom.proxy", "foo.bar.com:1234") - .put("search.remote.quux.seeds", "quux:9300") - .put("search.remote.quux.proxy", "quux-proxy:19300") - .build()); + Map>>>> map = + RemoteClusterService.buildRemoteClustersDynamicConfig( + Settings.builder() + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "[::1]:9090") + .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") + .put("cluster.remote.boom.proxy", "foo.bar.com:1234") + .put("search.remote.quux.seeds", "quux:9300") + .put("search.remote.quux.proxy", "quux-proxy:19300") + .build()); assertThat(map.keySet(), containsInAnyOrder(equalTo("foo"), equalTo("bar"), equalTo("boom"), equalTo("quux"))); assertThat(map.get("foo").v2(), hasSize(1)); assertThat(map.get("bar").v2(), hasSize(1)); assertThat(map.get("boom").v2(), hasSize(1)); assertThat(map.get("quux").v2(), hasSize(1)); - DiscoveryNode foo = map.get("foo").v2().get(0).get(); + DiscoveryNode foo = map.get("foo").v2().get(0).v2().get(); assertEquals("", map.get("foo").v1()); assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080))); assertEquals(foo.getId(), "foo#192.168.0.1:8080"); assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode bar = map.get("bar").v2().get(0).get(); + DiscoveryNode bar = map.get("bar").v2().get(0).v2().get(); assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090))); assertEquals(bar.getId(), "bar#[::1]:9090"); assertEquals("", map.get("bar").v1()); assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode boom = map.get("boom").v2().get(0).get(); + DiscoveryNode boom = map.get("boom").v2().get(0).v2().get(); assertEquals(boom.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); assertEquals("boom-node1.internal", boom.getHostName()); assertEquals(boom.getId(), "boom#boom-node1.internal:1000"); assertEquals("foo.bar.com:1234", map.get("boom").v1()); assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode quux = map.get("quux").v2().get(0).get(); + DiscoveryNode quux = map.get("quux").v2().get(0).v2().get(); assertEquals(quux.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0)); assertEquals("quux", quux.getHostName()); assertEquals(quux.getId(), "quux#quux:9300"); From a0e7e571e448d1c4f91e641dc411d51023f42ebc Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 17 Dec 2018 10:50:19 -0800 Subject: [PATCH 14/26] [Painless] Add boxed type to boxed type casts for method/return (#36571) This adds implicit boxed type to boxed types casts for non-def types to create asymmetric casting relative to the def type when calling methods or returning values. This means that a user calling a method taking an Integer can call it with a Byte, Short, etc. legally which matches the way def works. This creates consistency in the casting model that did not previously exist. --- .../painless/AnalyzerCaster.java | 182 ++----- .../elasticsearch/painless/MethodWriter.java | 4 + .../painless/lookup/PainlessCast.java | 9 + .../painless/BoxedCastTests.java | 511 ++++++++++++++++++ 4 files changed, 573 insertions(+), 133 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index f00a30a62c4..ac21be1f5c0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -73,38 +73,6 @@ public final class AnalyzerCaster { } else if (expected == Double.class) { return PainlessCast.originalTypetoTargetType(def.class, Double.class, explicit); } - } else if (actual == Object.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Byte.class, true, byte.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Short.class, true, short.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Character.class, true, char.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Integer.class, true, int.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Long.class, true, long.class); - } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Float.class, true, float.class); - } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTargetType(Object.class, Double.class, true, double.class); - } - } else if (actual == Number.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Byte.class, true, byte.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Short.class, true, short.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Character.class, true, char.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Integer.class, true, int.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Long.class, true, long.class); - } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Float.class, true, float.class); - } else if (expected == double.class && explicit && internal) { - return PainlessCast.unboxTargetType(Number.class, Double.class, true, double.class); - } } else if (actual == String.class) { if (expected == char.class && explicit) { return PainlessCast.originalTypetoTargetType(String.class, char.class, true); @@ -140,8 +108,6 @@ public final class AnalyzerCaster { return PainlessCast.boxTargetType(byte.class, byte.class, explicit, byte.class); } else if (expected == Short.class && internal) { return PainlessCast.boxTargetType(byte.class, short.class, explicit, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(byte.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { return PainlessCast.boxTargetType(byte.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { @@ -170,12 +136,8 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(short.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(short.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(short.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { return PainlessCast.boxTargetType(short.class, short.class, explicit, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(short.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { return PainlessCast.boxTargetType(short.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { @@ -206,10 +168,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(char.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(char.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(char.class, byte.class, true, byte.class); - } else if (expected == Short.class && internal) { - return PainlessCast.boxTargetType(char.class, short.class, explicit, short.class); } else if (expected == Character.class && internal) { return PainlessCast.boxTargetType(char.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { @@ -240,12 +198,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(int.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(int.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(int.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(int.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(int.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { return PainlessCast.boxTargetType(int.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { @@ -274,14 +226,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(long.class, float.class, explicit); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(long.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, char.class, true, char.class); - } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTargetType(long.class, int.class, true, int.class); } else if (expected == Long.class && internal) { return PainlessCast.boxTargetType(long.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { @@ -308,16 +252,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(float.class, long.class, true); } else if (expected == double.class) { return PainlessCast.originalTypetoTargetType(float.class, double.class, explicit); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, char.class, true, char.class); - } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, int.class, true, int.class); - } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTargetType(float.class, long.class, true, long.class); } else if (expected == Float.class && internal) { return PainlessCast.boxTargetType(float.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { @@ -342,18 +276,6 @@ public final class AnalyzerCaster { return PainlessCast.originalTypetoTargetType(double.class, long.class, true); } else if (expected == float.class && explicit) { return PainlessCast.originalTypetoTargetType(double.class, float.class, true); - } else if (expected == Byte.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, byte.class, true, byte.class); - } else if (expected == Short.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, short.class, true, short.class); - } else if (expected == Character.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, char.class, true, char.class); - } else if (expected == Integer.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, int.class, true, int.class); - } else if (expected == Long.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, long.class, true, long.class); - } else if (expected == Float.class && explicit && internal) { - return PainlessCast.boxTargetType(double.class, float.class, true, float.class); } else if (expected == Double.class && internal) { return PainlessCast.boxTargetType(double.class, double.class, explicit, double.class); } @@ -366,8 +288,6 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(byte.class, byte.class, explicit, byte.class); } else if (expected == short.class && internal) { return PainlessCast.unboxOriginalType(byte.class, short.class, explicit, byte.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(byte.class, char.class, true, byte.class); } else if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(byte.class, int.class, explicit, byte.class); } else if (expected == long.class && internal) { @@ -376,14 +296,20 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(byte.class, float.class, explicit, byte.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(byte.class, double.class, explicit, byte.class); + } else if (expected == Short.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, short.class); + } else if (expected == Integer.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, byte.class, double.class); } } else if (actual == Short.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(short.class, byte.class, true, short.class); - } else if (expected == short.class && internal) { + if (expected == short.class && internal) { return PainlessCast.unboxOriginalType(short.class, short.class, explicit, short.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(short.class, char.class, true, short.class); } else if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(short.class, int.class, explicit, short.class); } else if (expected == long.class && internal) { @@ -392,13 +318,17 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(short.class, float.class, explicit, short.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(short.class, double.class, explicit, short.class); + } else if (expected == Integer.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, short.class, double.class); } } else if (actual == Character.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(char.class, byte.class, true, char.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(char.class, short.class, true, char.class); - } else if (expected == char.class && internal) { + if (expected == char.class && internal) { return PainlessCast.unboxOriginalType(char.class, char.class, explicit, char.class); } else if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(char.class, int.class, explicit, char.class); @@ -408,15 +338,17 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(char.class, float.class, explicit, char.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(char.class, double.class, explicit, char.class); + } else if (expected == Integer.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, char.class, double.class); } } else if (actual == Integer.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(int.class, byte.class, true, int.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(int.class, short.class, true, int.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(int.class, char.class, true, int.class); - } else if (expected == int.class && internal) { + if (expected == int.class && internal) { return PainlessCast.unboxOriginalType(int.class, int.class, explicit, int.class); } else if (expected == long.class && internal) { return PainlessCast.unboxOriginalType(int.class, long.class, explicit, int.class); @@ -424,61 +356,45 @@ public final class AnalyzerCaster { return PainlessCast.unboxOriginalType(int.class, float.class, explicit, int.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(int.class, double.class, explicit, int.class); + } else if (expected == Long.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, int.class, double.class); } } else if (actual == Long.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, byte.class, true, long.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, short.class, true, long.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, char.class, true, long.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxOriginalType(long.class, int.class, true, long.class); - } else if (expected == long.class && internal) { + if (expected == long.class && internal) { return PainlessCast.unboxOriginalType(long.class, long.class, explicit, long.class); } else if (expected == float.class && internal) { return PainlessCast.unboxOriginalType(long.class, float.class, explicit, long.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(long.class, double.class, explicit, long.class); + } else if (expected == Float.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, long.class, double.class); } } else if (actual == Float.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, byte.class, true, float.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, short.class, true, float.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, char.class, true, float.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, int.class, true, float.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxOriginalType(float.class, long.class, true, float.class); - } else if (expected == float.class && internal) { + if (expected == float.class && internal) { return PainlessCast.unboxOriginalType(float.class, float.class, explicit, float.class); } else if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(float.class, double.class, explicit, float.class); + } else if (expected == Double.class && internal) { + return PainlessCast.unboxOriginalTypeToBoxTargetType(explicit, float.class, double.class); } } else if (actual == Double.class) { - if (expected == byte.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, byte.class, true, double.class); - } else if (expected == short.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, short.class, true, double.class); - } else if (expected == char.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, char.class, true, double.class); - } else if (expected == int.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, int.class, true, double.class); - } else if (expected == long.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, long.class, true, double.class); - } else if (expected == float.class && explicit && internal) { - return PainlessCast.unboxOriginalType(double.class, float.class, true, double.class); - } else if (expected == double.class && internal) { + if (expected == double.class && internal) { return PainlessCast.unboxOriginalType(double.class, double.class, explicit, double.class); } } - if ( actual == def.class || + if ( + actual == def.class || (actual != void.class && expected == def.class) || - expected.isAssignableFrom(actual) || - (actual.isAssignableFrom(expected) && explicit)) { + expected.isAssignableFrom(actual) || + (actual.isAssignableFrom(expected) && explicit) + ) { return PainlessCast.originalTypetoTargetType(actual, expected, explicit); } else { throw location.createError(new ClassCastException("Cannot cast from " + diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index a2433689db3..ea58e7df7b4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -154,6 +154,10 @@ public final class MethodWriter extends GeneratorAdapter { invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); } else if (cast.originalType == String.class && cast.targetType == char.class) { invokeStatic(UTILITY_TYPE, STRING_TO_CHAR); + } else if (cast.unboxOriginalType != null && cast.boxTargetType != null) { + unbox(getType(cast.unboxOriginalType)); + writeCast(cast.unboxOriginalType, cast.boxTargetType); + box(getType(cast.boxTargetType)); } else if (cast.unboxOriginalType != null) { unbox(getType(cast.unboxOriginalType)); writeCast(cast.originalType, cast.targetType); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java index 98968465d34..5a3fb848a61 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -75,6 +75,15 @@ public class PainlessCast { return new PainlessCast(originalType, targetType, explicitCast, null, null, null, boxTargetType); } + /** Create a cast where the original type is unboxed, cast to a target type, and the target type is boxed. */ + public static PainlessCast unboxOriginalTypeToBoxTargetType(boolean explicitCast, Class unboxOriginalType, Class boxTargetType) { + + Objects.requireNonNull(unboxOriginalType); + Objects.requireNonNull(boxTargetType); + + return new PainlessCast(null, null, explicitCast, unboxOriginalType, null, null, boxTargetType); + } + public final Class originalType; public final Class targetType; public final boolean explicitCast; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java new file mode 100644 index 00000000000..67a2b683ab6 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BoxedCastTests.java @@ -0,0 +1,511 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +public class BoxedCastTests extends ScriptTestCase { + + public void testMethodCallByteToBoxedCasts() { + assertEquals(0, exec("byte u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("byte u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Byte u = Byte.valueOf((byte)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("byte u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("byte u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("byte u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Byte u = Byte.valueOf((byte)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Byte u = Byte.valueOf((byte)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("def u = (byte)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (byte)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + assertEquals(0, exec("def u = (byte)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (byte)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (byte)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallShortToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("short u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("short u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Short u = Short.valueOf((short)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Short u = Short.valueOf((short)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (short)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (short)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallCharacterToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("char u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("char u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Character u = Character.valueOf((char)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Character u = Character.valueOf((char)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (char)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (char)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallIntegerToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("int u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("int u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Integer u = Integer.valueOf((int)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Integer u = Integer.valueOf((int)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (int)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (int)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallLongToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("long u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("long u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Long u = Long.valueOf((long)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Long u = Long.valueOf((long)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (long)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (long)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallFloatToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("float u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("float u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Float u = Float.valueOf((float)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Float u = Float.valueOf((float)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (float)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (float)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } + + public void testMethodCallDoubleToBoxedCasts() { + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("double u = 1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Double u = Double.valueOf((double)1); Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("double u = 1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("double u = 1; def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("Double u = Double.valueOf((double)1); def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("Double u = Double.valueOf((double)1); def b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Byte b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Short b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Character b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Integer b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Long b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; Float b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (double)1; Double b = Double.valueOf((double)1); b.compareTo(u);")); + + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Byte.valueOf((byte)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Short.valueOf((short)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Character.valueOf((char)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Integer.valueOf((int)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Long.valueOf((long)1); b.compareTo(u);")); + expectScriptThrows(ClassCastException.class, + () -> exec("def u = (double)1; def b = Float.valueOf((float)1); b.compareTo(u);")); + assertEquals(0, exec("def u = (double)1; def b = Double.valueOf((double)1); b.compareTo(u);")); + } +} From 4d0bb9dd0a5cf1dfe1104b4aa8de9e56f5aa80f1 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 17 Dec 2018 21:02:59 +0100 Subject: [PATCH 15/26] SNAPSHOTS: Adjust BwC Versions in Restore Logic (#36718) * Re-enables bwc tests with adjusted version conditions now that #36397 enables concurrent snapshots in 6.6+ --- build.gradle | 4 ++-- .../java/org/elasticsearch/cluster/RestoreInProgress.java | 6 +++--- .../org/elasticsearch/cluster/routing/RecoverySource.java | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/build.gradle b/build.gradle index 36412c047a7..7e067b89978 100644 --- a/build.gradle +++ b/build.gradle @@ -163,8 +163,8 @@ task verifyVersions { * the enabled state of every bwc task. It should be set back to true * after the backport of the backcompat code is complete. */ -final boolean bwc_tests_enabled = false -final String bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/pull/36659" /* place a PR link here when committing bwc changes */ +final boolean bwc_tests_enabled = true +final String bwc_tests_disabled_issue = "" /* place a PR link here when committing bwc changes */ if (bwc_tests_enabled == false) { if (bwc_tests_disabled_issue.isEmpty()) { throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false") diff --git a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java index c229a826ee8..d71a3f94d40 100644 --- a/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/RestoreInProgress.java @@ -46,7 +46,7 @@ import java.util.UUID; public class RestoreInProgress extends AbstractNamedDiffable implements Custom, Iterable { /** - * Fallback UUID used for restore operations that were started before v7.0 and don't have a uuid in the cluster state. + * Fallback UUID used for restore operations that were started before v6.6 and don't have a uuid in the cluster state. */ public static final String BWC_UUID = new UUID(0, 0).toString(); @@ -436,7 +436,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements final ImmutableOpenMap.Builder entriesBuilder = ImmutableOpenMap.builder(count); for (int i = 0; i < count; i++) { final String uuid; - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + if (in.getVersion().onOrAfter(Version.V_6_6_0)) { uuid = in.readString(); } else { uuid = BWC_UUID; @@ -468,7 +468,7 @@ public class RestoreInProgress extends AbstractNamedDiffable implements out.writeVInt(entries.size()); for (ObjectCursor v : entries.values()) { Entry entry = v.value; - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + if (out.getVersion().onOrAfter(Version.V_6_6_0)) { out.writeString(entry.uuid); } entry.snapshot().writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index 3654d66ad58..25a605088ef 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -222,7 +222,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { } SnapshotRecoverySource(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + if (in.getVersion().onOrAfter(Version.V_6_6_0)) { restoreUUID = in.readString(); } else { restoreUUID = RestoreInProgress.BWC_UUID; @@ -250,7 +250,7 @@ public abstract class RecoverySource implements Writeable, ToXContentObject { @Override protected void writeAdditionalFields(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + if (out.getVersion().onOrAfter(Version.V_6_6_0)) { out.writeString(restoreUUID); } snapshot.writeTo(out); From 7bf822bbbb7a624d80825b50e0506036bcd68691 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Mon, 17 Dec 2018 14:10:13 -0600 Subject: [PATCH 16/26] ingest: fix on_failure with Drop processor (#36686) This commit allows a document to be dropped when a Drop processor is used in the on_failure fork of the processor chain. Fixes #36151 --- .../test/ingest/220_drop_processor.yml | 41 +++++++++++++++++++ .../ingest/CompoundProcessor.java | 15 +++++-- .../ingest/CompoundProcessorTests.java | 29 +++++++++++++ 3 files changed, 81 insertions(+), 4 deletions(-) diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml index 3be038aca24..accc30faa21 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -57,3 +57,44 @@ teardown: type: test id: 2 - match: { _source.foo: "blub" } + +--- +"Test Drop Processor On Failure": +- do: + ingest.put_pipeline: + id: "my_pipeline_with_failure" + body: > + { + "description" : "pipeline with on failure drop", + "processors": [ + { + "fail": { + "message": "failed", + "on_failure": [ + { + "drop": {} + } + ] + } + } + ] + } +- match: { acknowledged: true } + +- do: + index: + index: test + type: test + id: 3 + pipeline: "my_pipeline_with_failure" + body: { + foo: "bar" + } + +- do: + catch: missing + get: + index: test + type: test + id: 3 +- match: { found: false } diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index 3b8281bd471..a095d7647d9 100644 --- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -134,7 +134,9 @@ public class CompoundProcessor implements Processor { if (onFailureProcessors.isEmpty()) { throw compoundProcessorException; } else { - executeOnFailure(ingestDocument, compoundProcessorException); + if (executeOnFailure(ingestDocument, compoundProcessorException) == false) { + return null; + } break; } } finally { @@ -145,13 +147,17 @@ public class CompoundProcessor implements Processor { return ingestDocument; } - - void executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { + /** + * @return true if execution should continue, false if document is dropped. + */ + boolean executeOnFailure(IngestDocument ingestDocument, ElasticsearchException exception) throws Exception { try { putFailureMetadata(ingestDocument, exception); for (Processor processor : onFailureProcessors) { try { - processor.execute(ingestDocument); + if (processor.execute(ingestDocument) == null) { + return false; + } } catch (Exception e) { throw newCompoundProcessorException(e, processor.getType(), processor.getTag()); } @@ -159,6 +165,7 @@ public class CompoundProcessor implements Processor { } finally { removeFailureMetadata(ingestDocument); } + return true; } private void putFailureMetadata(IngestDocument ingestDocument, ElasticsearchException cause) { diff --git a/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java b/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java index dabcae533a0..24e3dcd7677 100644 --- a/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/CompoundProcessorTests.java @@ -129,6 +129,35 @@ public class CompoundProcessorTests extends ESTestCase { assertThat(processor2.getInvokedCounter(), equalTo(1)); } + public void testSingleProcessorWithOnFailureDropProcessor() throws Exception { + TestProcessor processor1 = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); + Processor processor2 = new Processor() { + @Override + public IngestDocument execute(IngestDocument ingestDocument) throws Exception { + //Simulates the drop processor + return null; + } + + @Override + public String getType() { + return "drop"; + } + + @Override + public String getTag() { + return null; + } + }; + + LongSupplier relativeTimeProvider = mock(LongSupplier.class); + when(relativeTimeProvider.getAsLong()).thenReturn(0L); + CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.singletonList(processor1), + Collections.singletonList(processor2), relativeTimeProvider); + assertNull(compoundProcessor.execute(ingestDocument)); + assertThat(processor1.getInvokedCounter(), equalTo(1)); + assertStats(compoundProcessor, 1, 1, 0); + } + public void testSingleProcessorWithNestedFailures() throws Exception { TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");}); TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> { From 3dd5a5a3c5ff73e7798e54d84a36ff735e72be55 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Mon, 17 Dec 2018 13:19:32 -0700 Subject: [PATCH 17/26] Initialize startup `CcrRepositories` (#36730) Currently, the CcrRepositoryManger only listens for settings updates and installs new repositories. It does not install the repositories that are in the initial settings. This commit, modifies the manager to install the initial repositories. Additionally, it modifies the ccr integration test to configure the remote leader node at startup, instead of using a settings update. --- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 5 +- .../xpack/ccr/CcrRepositoryManager.java | 68 +++++++++++++++---- .../elasticsearch/xpack/CcrIntegTestCase.java | 26 ++++--- 3 files changed, 67 insertions(+), 32 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index 70d4905d943..58ba11e4d04 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ccr; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.Client; @@ -111,7 +110,6 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E private final boolean enabled; private final Settings settings; private final CcrLicenseChecker ccrLicenseChecker; - private final SetOnce repositoryManager = new SetOnce<>(); private Client client; /** @@ -152,10 +150,9 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E return emptyList(); } - this.repositoryManager.set(new CcrRepositoryManager(settings, clusterService, client)); - return Arrays.asList( ccrLicenseChecker, + new CcrRepositoryManager(settings, clusterService, client), new AutoFollowCoordinator(client, clusterService, ccrLicenseChecker, threadPool::relativeTimeInMillis) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java index a1504ff2f8a..54403df3678 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrRepositoryManager.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.ccr.action.repositories.DeleteInternalCcrRepositoryAction; @@ -18,31 +19,70 @@ import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryA import org.elasticsearch.xpack.ccr.action.repositories.PutInternalCcrRepositoryRequest; import org.elasticsearch.xpack.ccr.repository.CcrRepository; +import java.io.IOException; import java.util.List; +import java.util.Set; -class CcrRepositoryManager extends RemoteClusterAware { +class CcrRepositoryManager extends AbstractLifecycleComponent { private final Client client; + private final RemoteSettingsUpdateListener updateListener; CcrRepositoryManager(Settings settings, ClusterService clusterService, Client client) { super(settings); this.client = client; - listenForUpdates(clusterService.getClusterSettings()); + updateListener = new RemoteSettingsUpdateListener(settings); + updateListener.listenForUpdates(clusterService.getClusterSettings()); } @Override - protected void updateRemoteCluster(String clusterAlias, List addresses, String proxyAddress) { - String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias; - if (addresses.isEmpty()) { - DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName); - PlainActionFuture f = PlainActionFuture.newFuture(); - client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f); - assert f.isDone() : "Should be completed as it is executed synchronously"; - } else { - ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE); - PlainActionFuture f = PlainActionFuture.newFuture(); - client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f); - assert f.isDone() : "Should be completed as it is executed synchronously"; + protected void doStart() { + updateListener.init(); + } + + @Override + protected void doStop() { + } + + @Override + protected void doClose() throws IOException { + } + + private void putRepository(String repositoryName) { + ActionRequest request = new PutInternalCcrRepositoryRequest(repositoryName, CcrRepository.TYPE); + PlainActionFuture f = PlainActionFuture.newFuture(); + client.execute(PutInternalCcrRepositoryAction.INSTANCE, request, f); + assert f.isDone() : "Should be completed as it is executed synchronously"; + } + + private void deleteRepository(String repositoryName) { + DeleteInternalCcrRepositoryRequest request = new DeleteInternalCcrRepositoryRequest(repositoryName); + PlainActionFuture f = PlainActionFuture.newFuture(); + client.execute(DeleteInternalCcrRepositoryAction.INSTANCE, request, f); + assert f.isDone() : "Should be completed as it is executed synchronously"; + } + + private class RemoteSettingsUpdateListener extends RemoteClusterAware { + + private RemoteSettingsUpdateListener(Settings settings) { + super(settings); + } + + void init() { + Set clusterAliases = buildRemoteClustersDynamicConfig(settings).keySet(); + for (String clusterAlias : clusterAliases) { + putRepository(CcrRepository.NAME_PREFIX + clusterAlias); + } + } + + @Override + protected void updateRemoteCluster(String clusterAlias, List addresses, String proxy) { + String repositoryName = CcrRepository.NAME_PREFIX + clusterAlias; + if (addresses.isEmpty()) { + deleteRepository(repositoryName); + } else { + putRepository(repositoryName); + } } } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 5abe852ca5f..8865c536917 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; @@ -117,27 +116,23 @@ public abstract class CcrIntegTestCase extends ESTestCase { } stopClusters(); - NodeConfigurationSource nodeConfigurationSource = createNodeConfigurationSource(); Collection> mockPlugins = Arrays.asList(ESIntegTestCase.TestSeedPlugin.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class, getTestTransportPlugin()); InternalTestCluster leaderCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(), - numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "leader", mockPlugins, + numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(null), 0, "leader", mockPlugins, Function.identity()); - InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(), - numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, "follower", mockPlugins, - Function.identity()); - clusterGroup = new ClusterGroup(leaderCluster, followerCluster); - leaderCluster.beforeTest(random(), 0.0D); leaderCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster()); + + String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString(); + InternalTestCluster followerCluster = new InternalTestCluster(randomLong(), createTempDir(), true, true, numberOfNodesPerCluster(), + numberOfNodesPerCluster(), UUIDs.randomBase64UUID(random()), createNodeConfigurationSource(address), 0, "follower", + mockPlugins, Function.identity()); + clusterGroup = new ClusterGroup(leaderCluster, followerCluster); + followerCluster.beforeTest(random(), 0.0D); followerCluster.ensureAtLeastNumDataNodes(numberOfNodesPerCluster()); - - ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); - String address = leaderCluster.getDataNodeInstance(TransportService.class).boundAddress().publishAddress().toString(); - updateSettingsRequest.persistentSettings(Settings.builder().put("cluster.remote.leader_cluster.seeds", address)); - assertAcked(followerClient().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); } /** @@ -175,7 +170,7 @@ public abstract class CcrIntegTestCase extends ESTestCase { } } - private NodeConfigurationSource createNodeConfigurationSource() { + private NodeConfigurationSource createNodeConfigurationSource(String leaderSeedAddress) { Settings.Builder builder = Settings.builder(); builder.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE); // Default the watermarks to absurdly low to prevent the tests @@ -195,6 +190,9 @@ public abstract class CcrIntegTestCase extends ESTestCase { builder.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); builder.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false); builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); + if (leaderSeedAddress != null) { + builder.put("cluster.remote.leader_cluster.seeds", leaderSeedAddress); + } return new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { From f1e1f93943ad17c1ed48b1aec7fdb9f074ea6560 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Mon, 17 Dec 2018 21:19:39 +0100 Subject: [PATCH 18/26] [TEST] fix float comparison in RandomObjects#getExpectedParsedValue This commit fixes a test bug introduced with #36597. This caused some test failure as stored field values comparisons would not work when CBOR xcontent type was used. Closes #29080 --- .../main/java/org/elasticsearch/test/RandomObjects.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index c81d0810f08..4669284685c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -135,13 +135,16 @@ public final class RandomObjects { } } if (value instanceof Float) { + if (xContentType == XContentType.CBOR) { + //with CBOR we get back a float + return value; + } if (xContentType == XContentType.SMILE) { //with SMILE we get back a double (this will change in Jackson 2.9 where it will return a Float) return ((Float)value).doubleValue(); - } else { - //with JSON AND YAML we get back a double, but with float precision. - return Double.parseDouble(value.toString()); } + //with JSON AND YAML we get back a double, but with float precision. + return Double.parseDouble(value.toString()); } if (value instanceof Byte) { return ((Byte)value).intValue(); From 5bc7822562a6eefa4a64743233160cdc9f431adf Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Mon, 17 Dec 2018 14:38:14 -0600 Subject: [PATCH 19/26] [Geo] Integrate Lucene's LatLonShape (BKD Backed GeoShapes) as default `geo_shape` indexing approach (#35320) This commit exposes lucene's LatLonShape field as the default type in GeoShapeFieldMapper. To use the new indexing approach, simply set "type" : "geo_shape" in the mappings without setting any of the strategy, precision, tree_levels, or distance_error_pct parameters. Note the following when using the new indexing approach: * geo_shape query does not support querying by MULTIPOINT. * LINESTRING and MULTILINESTRING queries do not yet support WITHIN relation. * CONTAINS relation is not yet supported. The tree, precision, tree_levels, distance_error_pct, and points_only parameters are deprecated. --- .../mapping/types/geo-shape.asciidoc | 184 +++-- .../migration/migrate_7_0/mappings.asciidoc | 16 + .../query-dsl/geo-shape-query.asciidoc | 5 +- .../common/geo/ShapeRelation.java | 12 + .../builders/GeometryCollectionBuilder.java | 3 - .../common/geo/parsers/GeoJsonParser.java | 24 +- .../common/geo/parsers/GeoWKTParser.java | 13 +- .../common/geo/parsers/ShapeParser.java | 4 +- .../index/mapper/BaseGeoShapeFieldMapper.java | 336 +++++++++ .../index/mapper/GeoShapeFieldMapper.java | 600 ++------------- .../mapper/LegacyGeoShapeFieldMapper.java | 596 +++++++++++++++ .../index/query/GeoShapeQueryBuilder.java | 117 ++- .../elasticsearch/indices/IndicesModule.java | 8 +- .../common/geo/GeoJsonShapeParserTests.java | 8 +- .../common/geo/GeoWKTShapeParserTests.java | 19 +- .../index/mapper/ExternalMapper.java | 21 +- .../ExternalValuesMapperIntegrationIT.java | 6 +- .../mapper/GeoShapeFieldMapperTests.java | 452 ++--------- .../index/mapper/GeoShapeFieldTypeTests.java | 52 +- .../LegacyGeoShapeFieldMapperTests.java | 714 ++++++++++++++++++ .../mapper/LegacyGeoShapeFieldTypeTests.java | 86 +++ .../query/GeoShapeQueryBuilderTests.java | 75 +- .../query/LegacyGeoShapeFieldQueryTests.java | 94 +++ .../index/query/MatchQueryBuilderTests.java | 1 + .../query/QueryStringQueryBuilderTests.java | 6 + .../elasticsearch/search/geo/GeoFilterIT.java | 1 + .../search/geo/GeoShapeIntegrationIT.java | 25 +- .../search/geo/GeoShapeQueryTests.java | 186 ++++- .../geo/LegacyGeoShapeIntegrationIT.java | 170 +++++ .../test/geo/RandomShapeGenerator.java | 2 + .../test/AbstractBuilderTestCase.java | 20 +- 31 files changed, 2629 insertions(+), 1227 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 2f51465d110..8efb184afa6 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -21,48 +21,59 @@ type. |======================================================================= |Option |Description| Default -|`tree` |Name of the PrefixTree implementation to be used: `geohash` for -GeohashPrefixTree and `quadtree` for QuadPrefixTree. -| `geohash` +|`tree |deprecated[6.6, PrefixTrees no longer used] Name of the PrefixTree +implementation to be used: `geohash` for GeohashPrefixTree and `quadtree` +for QuadPrefixTree. Note: This parameter is only relevant for `term` and +`recursive` strategies. +| `quadtree` -|`precision` |This parameter may be used instead of `tree_levels` to set -an appropriate value for the `tree_levels` parameter. The value -specifies the desired precision and Elasticsearch will calculate the -best tree_levels value to honor this precision. The value should be a -number followed by an optional distance unit. Valid distance units -include: `in`, `inch`, `yd`, `yard`, `mi`, `miles`, `km`, `kilometers`, -`m`,`meters`, `cm`,`centimeters`, `mm`, `millimeters`. +|`precision` |deprecated[6.6, PrefixTrees no longer used] This parameter may +be used instead of `tree_levels` to set an appropriate value for the +`tree_levels` parameter. The value specifies the desired precision and +Elasticsearch will calculate the best tree_levels value to honor this +precision. The value should be a number followed by an optional distance +unit. Valid distance units include: `in`, `inch`, `yd`, `yard`, `mi`, +`miles`, `km`, `kilometers`, `m`,`meters`, `cm`,`centimeters`, `mm`, +`millimeters`. Note: This parameter is only relevant for `term` and +`recursive` strategies. | `50m` -|`tree_levels` |Maximum number of layers to be used by the PrefixTree. -This can be used to control the precision of shape representations and -therefore how many terms are indexed. Defaults to the default value of -the chosen PrefixTree implementation. Since this parameter requires a -certain level of understanding of the underlying implementation, users -may use the `precision` parameter instead. However, Elasticsearch only -uses the tree_levels parameter internally and this is what is returned -via the mapping API even if you use the precision parameter. +|`tree_levels` |deprecated[6.6, PrefixTrees no longer used] Maximum number +of layers to be used by the PrefixTree. This can be used to control the +precision of shape representations andtherefore how many terms are +indexed. Defaults to the default value of the chosen PrefixTree +implementation. Since this parameter requires a certain level of +understanding of the underlying implementation, users may use the +`precision` parameter instead. However, Elasticsearch only uses the +tree_levels parameter internally and this is what is returned via the +mapping API even if you use the precision parameter. Note: This parameter +is only relevant for `term` and `recursive` strategies. | various -|`strategy` |The strategy parameter defines the approach for how to -represent shapes at indexing and search time. It also influences the -capabilities available so it is recommended to let Elasticsearch set -this parameter automatically. There are two strategies available: -`recursive` and `term`. Term strategy supports point types only (the -`points_only` parameter will be automatically set to true) while -Recursive strategy supports all shape types. (IMPORTANT: see -<> for more detailed information) +|`strategy` |deprecated[6.6, PrefixTrees no longer used] The strategy +parameter defines the approach for how to represent shapes at indexing +and search time. It also influences the capabilities available so it +is recommended to let Elasticsearch set this parameter automatically. +There are two strategies available: `recursive`, and `term`. +Recursive and Term strategies are deprecated and will be removed in a +future version. While they are still available, the Term strategy +supports point types only (the `points_only` parameter will be +automatically set to true) while Recursive strategy supports all +shape types. (IMPORTANT: see <> for more +detailed information about these strategies) | `recursive` -|`distance_error_pct` |Used as a hint to the PrefixTree about how -precise it should be. Defaults to 0.025 (2.5%) with 0.5 as the maximum -supported value. PERFORMANCE NOTE: This value will default to 0 if a `precision` or -`tree_level` definition is explicitly defined. This guarantees spatial precision -at the level defined in the mapping. This can lead to significant memory usage -for high resolution shapes with low error (e.g., large shapes at 1m with < 0.001 error). -To improve indexing performance (at the cost of query accuracy) explicitly define -`tree_level` or `precision` along with a reasonable `distance_error_pct`, noting -that large shapes will have greater false positives. +|`distance_error_pct` |deprecated[6.6, PrefixTrees no longer used] Used as a +hint to the PrefixTree about how precise it should be. Defaults to 0.025 (2.5%) +with 0.5 as the maximum supported value. PERFORMANCE NOTE: This value will +default to 0 if a `precision` or `tree_level` definition is explicitly defined. +This guarantees spatial precision at the level defined in the mapping. This can +lead to significant memory usage for high resolution shapes with low error +(e.g., large shapes at 1m with < 0.001 error). To improve indexing performance +(at the cost of query accuracy) explicitly define `tree_level` or `precision` +along with a reasonable `distance_error_pct`, noting that large shapes will have +greater false positives. Note: This parameter is only relevant for `term` and +`recursive` strategies. | `0.025` |`orientation` |Optionally define how to interpret vertex order for @@ -77,13 +88,13 @@ sets vertex order for the coordinate list of a geo_shape field but can be overridden in each individual GeoJSON or WKT document. | `ccw` -|`points_only` |Setting this option to `true` (defaults to `false`) configures -the `geo_shape` field type for point shapes only (NOTE: Multi-Points are not -yet supported). This optimizes index and search performance for the `geohash` and -`quadtree` when it is known that only points will be indexed. At present geo_shape -queries can not be executed on `geo_point` field types. This option bridges the gap -by improving point performance on a `geo_shape` field so that `geo_shape` queries are -optimal on a point only field. +|`points_only` |deprecated[6.6, PrefixTrees no longer used] Setting this option to +`true` (defaults to `false`) configures the `geo_shape` field type for point +shapes only (NOTE: Multi-Points are not yet supported). This optimizes index and +search performance for the `geohash` and `quadtree` when it is known that only points +will be indexed. At present geo_shape queries can not be executed on `geo_point` +field types. This option bridges the gap by improving point performance on a +`geo_shape` field so that `geo_shape` queries are optimal on a point only field. | `false` |`ignore_malformed` |If true, malformed GeoJSON or WKT shapes are ignored. If @@ -100,16 +111,35 @@ and reject the whole document. |======================================================================= + +[[geoshape-indexing-approach]] +[float] +==== Indexing approach +GeoShape types are indexed by decomposing the shape into a triangular mesh and +indexing each triangle as a 7 dimension point in a BKD tree. This provides +near perfect spatial resolution (down to 1e-7 decimal degree precision) since all +spatial relations are computed using an encoded vector representation of the +original shape instead of a raster-grid representation as used by the +<> indexing approach. Performance of the tessellator primarily +depends on the number of vertices that define the polygon/multi-polyogn. While +this is the default indexing technique prefix trees can still be used by setting +the `tree` or `strategy` parameters according to the appropriate +<>. Note that these parameters are now deprecated +and will be removed in a future version. + [[prefix-trees]] [float] ==== Prefix trees -To efficiently represent shapes in the index, Shapes are converted into -a series of hashes representing grid squares (commonly referred to as "rasters") -using implementations of a PrefixTree. The tree notion comes from the fact that -the PrefixTree uses multiple grid layers, each with an increasing level of -precision to represent the Earth. This can be thought of as increasing the level -of detail of a map or image at higher zoom levels. +deprecated[6.6, PrefixTrees no longer used] To efficiently represent shapes in +an inverted index, Shapes are converted into a series of hashes representing +grid squares (commonly referred to as "rasters") using implementations of a +PrefixTree. The tree notion comes from the fact that the PrefixTree uses multiple +grid layers, each with an increasing level of precision to represent the Earth. +This can be thought of as increasing the level of detail of a map or image at higher +zoom levels. Since this approach causes precision issues with indexed shape, it has +been deprecated in favor of a vector indexing approach that indexes the shapes as a +triangular mesh (see <>). Multiple PrefixTree implementations are provided: @@ -131,9 +161,10 @@ number of levels for the quad trees in Elasticsearch is 29; the default is 21. [[spatial-strategy]] [float] ===== Spatial strategies -The PrefixTree implementations rely on a SpatialStrategy for decomposing -the provided Shape(s) into approximated grid squares. Each strategy answers -the following: +deprecated[6.6, PrefixTrees no longer used] The indexing implementation +selected relies on a SpatialStrategy for choosing how to decompose the shapes +(either as grid squares or a tessellated triangular mesh). Each strategy +answers the following: * What type of Shapes can be indexed? * What types of Query Operations and Shapes can be used? @@ -146,7 +177,7 @@ are provided: |======================================================================= |Strategy |Supported Shapes |Supported Queries |Multiple Shapes -|`recursive` |<> |`INTERSECTS`, `DISJOINT`, `WITHIN`, `CONTAINS` |Yes +|`recursive` |<> |`INTERSECTS`, `DISJOINT`, `WITHIN`, `CONTAINS` |Yes |`term` |<> |`INTERSECTS` |Yes |======================================================================= @@ -154,13 +185,13 @@ are provided: [float] ===== Accuracy -Geo_shape does not provide 100% accuracy and depending on how it is configured -it may return some false positives for `INTERSECTS`, `WITHIN` and `CONTAINS` -queries, and some false negatives for `DISJOINT` queries. To mitigate this, it -is important to select an appropriate value for the tree_levels parameter and -to adjust expectations accordingly. For example, a point may be near the border -of a particular grid cell and may thus not match a query that only matches the -cell right next to it -- even though the shape is very close to the point. +`Recursive` and `Term` strategies do not provide 100% accuracy and depending on +how they are configured it may return some false positives for `INTERSECTS`, +`WITHIN` and `CONTAINS` queries, and some false negatives for `DISJOINT` queries. +To mitigate this, it is important to select an appropriate value for the tree_levels +parameter and to adjust expectations accordingly. For example, a point may be near +the border of a particular grid cell and may thus not match a query that only matches +the cell right next to it -- even though the shape is very close to the point. [float] ===== Example @@ -173,9 +204,7 @@ PUT /example "doc": { "properties": { "location": { - "type": "geo_shape", - "tree": "quadtree", - "precision": "100m" + "type": "geo_shape" } } } @@ -185,22 +214,23 @@ PUT /example // CONSOLE // TESTSETUP -This mapping maps the location field to the geo_shape type using the -quad_tree implementation and a precision of 100m. Elasticsearch translates -this into a tree_levels setting of 20. +This mapping definition maps the location field to the geo_shape +type using the default vector implementation. It provides +approximately 1e-7 decimal degree precision. [float] -===== Performance considerations +===== Performance considerations with Prefix Trees -Elasticsearch uses the paths in the prefix tree as terms in the index -and in queries. The higher the level is (and thus the precision), the -more terms are generated. Of course, calculating the terms, keeping them in +deprecated[6.6, PrefixTrees no longer used] With prefix trees, +Elasticsearch uses the paths in the tree as terms in the inverted index +and in queries. The higher the level (and thus the precision), the more +terms are generated. Of course, calculating the terms, keeping them in memory, and storing them on disk all have a price. Especially with higher -tree levels, indices can become extremely large even with a modest -amount of data. Additionally, the size of the features also matters. -Big, complex polygons can take up a lot of space at higher tree levels. -Which setting is right depends on the use case. Generally one trades off -accuracy against index size and query performance. +tree levels, indices can become extremely large even with a modest amount +of data. Additionally, the size of the features also matters. Big, complex +polygons can take up a lot of space at higher tree levels. Which setting +is right depends on the use case. Generally one trades off accuracy against +index size and query performance. The defaults in Elasticsearch for both implementations are a compromise between index size and a reasonable level of precision of 50m at the @@ -598,7 +628,10 @@ POST /example/doc ===== Circle Elasticsearch supports a `circle` type, which consists of a center -point with a radius: +point with a radius. Note that this circle representation can only +be indexed when using the `recursive` Prefix Tree strategy. For +the default <> circles should be approximated using +a `POLYGON`. [source,js] -------------------------------------------------- @@ -612,6 +645,7 @@ POST /example/doc } -------------------------------------------------- // CONSOLE +// TEST[skip:not supported in default] Note: The inner `radius` field is required. If not specified, then the units of the `radius` will default to `METERS`. diff --git a/docs/reference/migration/migrate_7_0/mappings.asciidoc b/docs/reference/migration/migrate_7_0/mappings.asciidoc index 5ee1615796c..f08ea3ab89c 100644 --- a/docs/reference/migration/migrate_7_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_7_0/mappings.asciidoc @@ -52,3 +52,19 @@ as a better alternative. An error will now be thrown when unknown configuration options are provided to similarities. Such unknown parameters were ignored before. + +[float] +==== deprecated `geo_shape` Prefix Tree indexing + +`geo_shape` types now default to using a vector indexing approach based on Lucene's new +`LatLonShape` field type. This indexes shapes as a triangular mesh instead of decomposing +them into individual grid cells. To index using legacy prefix trees `recursive` or `term` +strategy must be explicitly defined. Note that these strategies are now deprecated and will +be removed in a future version. + +[float] +==== deprecated `geo_shape` parameters + +The following type parameters are deprecated for the `geo_shape` field type: `tree`, +`precision`, `tree_levels`, `distance_error_pct`, `points_only`, and `strategy`. They +will be removed in a future version. \ No newline at end of file diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index 4e00a2f49b4..f796881d520 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -7,7 +7,7 @@ Requires the <>. The `geo_shape` query uses the same grid square representation as the `geo_shape` mapping to find documents that have a shape that intersects -with the query shape. It will also use the same PrefixTree configuration +with the query shape. It will also use the same Prefix Tree configuration as defined for the field mapping. The query supports two ways of defining the query shape, either by @@ -157,7 +157,8 @@ has nothing in common with the query geometry. * `WITHIN` - Return all documents whose `geo_shape` field is within the query geometry. * `CONTAINS` - Return all documents whose `geo_shape` field -contains the query geometry. +contains the query geometry. Note: this is only supported using the +`recursive` Prefix Tree Strategy deprecated[6.6] [float] ==== Ignore Unmapped diff --git a/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java b/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java index e83e18ce432..e2e177c8f0f 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java +++ b/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.geo; +import org.apache.lucene.document.LatLonShape.QueryRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -62,6 +63,17 @@ public enum ShapeRelation implements Writeable { return null; } + /** Maps ShapeRelation to Lucene's LatLonShapeRelation */ + public QueryRelation getLuceneRelation() { + switch (this) { + case INTERSECTS: return QueryRelation.INTERSECTS; + case DISJOINT: return QueryRelation.DISJOINT; + case WITHIN: return QueryRelation.WITHIN; + default: + throw new IllegalArgumentException("ShapeRelation [" + this + "] not supported"); + } + } + public String getRelationName() { return relationName; } diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index b6e94c012c6..fdf7073bd74 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -197,9 +197,6 @@ public class GeometryCollectionBuilder extends ShapeBuilder coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); - Explicit ignoreZValue = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : shapeMapper.ignoreZValue(); + Orientation orientation = (shapeMapper == null) + ? BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value() + : shapeMapper.orientation(); + Explicit coerce = (shapeMapper == null) + ? BaseGeoShapeFieldMapper.Defaults.COERCE + : shapeMapper.coerce(); + Explicit ignoreZValue = (shapeMapper == null) + ? BaseGeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE + : shapeMapper.ignoreZValue(); String malformedException = null; @@ -102,7 +108,7 @@ abstract class GeoJsonParser { malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; } subParser.nextToken(); - requestedOrientation = ShapeBuilder.Orientation.fromString(subParser.text()); + orientation = ShapeBuilder.Orientation.fromString(subParser.text()); } else { subParser.nextToken(); subParser.skipChildren(); @@ -128,7 +134,7 @@ abstract class GeoJsonParser { return geometryCollections; } - return shapeType.getBuilder(coordinateNode, radius, requestedOrientation, coerce.value()); + return shapeType.getBuilder(coordinateNode, radius, orientation, coerce.value()); } /** @@ -202,7 +208,7 @@ abstract class GeoJsonParser { * @return Geometry[] geometries of the GeometryCollection * @throws IOException Thrown if an error occurs while reading from the XContentParser */ - static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws + static GeometryCollectionBuilder parseGeometries(XContentParser parser, BaseGeoShapeFieldMapper mapper) throws IOException { if (parser.currentToken() != XContentParser.Token.START_ARRAY) { throw new ElasticsearchParseException("geometries must be an array of geojson objects"); diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java index e1d990f0cff..bf26980c926 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; import org.locationtech.jts.geom.Coordinate; import java.io.IOException; @@ -63,7 +63,7 @@ public class GeoWKTParser { // no instance private GeoWKTParser() {} - public static ShapeBuilder parse(XContentParser parser, final GeoShapeFieldMapper shapeMapper) + public static ShapeBuilder parse(XContentParser parser, final BaseGeoShapeFieldMapper shapeMapper) throws IOException, ElasticsearchParseException { return parseExpectedType(parser, null, shapeMapper); } @@ -75,12 +75,12 @@ public class GeoWKTParser { /** throws an exception if the parsed geometry type does not match the expected shape type */ public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType, - final GeoShapeFieldMapper shapeMapper) + final BaseGeoShapeFieldMapper shapeMapper) throws IOException, ElasticsearchParseException { try (StringReader reader = new StringReader(parser.text())) { - Explicit ignoreZValue = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : + Explicit ignoreZValue = (shapeMapper == null) ? BaseGeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : shapeMapper.ignoreZValue(); - Explicit coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); + Explicit coerce = (shapeMapper == null) ? BaseGeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); // setup the tokenizer; configured to read words w/o numbers StreamTokenizer tokenizer = new StreamTokenizer(reader); tokenizer.resetSyntax(); @@ -257,7 +257,8 @@ public class GeoWKTParser { if (nextEmptyOrOpen(stream).equals(EMPTY)) { return null; } - PolygonBuilder builder = new PolygonBuilder(parseLinearRing(stream, ignoreZValue, coerce), ShapeBuilder.Orientation.RIGHT); + PolygonBuilder builder = new PolygonBuilder(parseLinearRing(stream, ignoreZValue, coerce), + BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value()); while (nextCloserOrComma(stream).equals(COMMA)) { builder.hole(parseLinearRing(stream, ignoreZValue, coerce)); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java index 79582c3365b..21d1bd9f255 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; import java.io.IOException; @@ -46,7 +46,7 @@ public interface ShapeParser { * if the parsers current token has been null * @throws IOException if the input could not be read */ - static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException { + static ShapeBuilder parse(XContentParser parser, BaseGeoShapeFieldMapper shapeMapper) throws IOException { if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { return null; } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java new file mode 100644 index 00000000000..3f1e49e525e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java @@ -0,0 +1,336 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.Version; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.DeprecatedParameters; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED; + +/** + * Base class for {@link GeoShapeFieldMapper} and {@link LegacyGeoShapeFieldMapper} + */ +public abstract class BaseGeoShapeFieldMapper extends FieldMapper { + public static final String CONTENT_TYPE = "geo_shape"; + + public static class Names { + public static final ParseField ORIENTATION = new ParseField("orientation"); + public static final ParseField COERCE = new ParseField("coerce"); + } + + public static class Defaults { + public static final Explicit ORIENTATION = new Explicit<>(Orientation.RIGHT, false); + public static final Explicit COERCE = new Explicit<>(false, false); + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + public static final Explicit IGNORE_Z_VALUE = new Explicit<>(true, false); + } + + public abstract static class Builder + extends FieldMapper.Builder { + protected Boolean coerce; + protected Boolean ignoreMalformed; + protected Boolean ignoreZValue; + protected Orientation orientation; + + /** default builder - used for external mapper*/ + public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) { + super(name, fieldType, defaultFieldType); + } + + public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType, + boolean coerce, boolean ignoreMalformed, Orientation orientation, boolean ignoreZ) { + super(name, fieldType, defaultFieldType); + this.coerce = coerce; + this.ignoreMalformed = ignoreMalformed; + this.orientation = orientation; + this.ignoreZValue = ignoreZ; + } + + public Builder coerce(boolean coerce) { + this.coerce = coerce; + return this; + } + + protected Explicit coerce(BuilderContext context) { + if (coerce != null) { + return new Explicit<>(coerce, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); + } + return Defaults.COERCE; + } + + public Builder orientation(Orientation orientation) { + this.orientation = orientation; + return this; + } + + protected Explicit orientation() { + if (orientation != null) { + return new Explicit<>(orientation, true); + } + return Defaults.ORIENTATION; + } + + @Override + protected boolean defaultDocValues(Version indexCreated) { + return false; + } + + public Builder ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return this; + } + + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; + } + + protected Explicit ignoreZValue() { + if (ignoreZValue != null) { + return new Explicit<>(ignoreZValue, true); + } + return Defaults.IGNORE_Z_VALUE; + } + + public Builder ignoreZValue(final boolean ignoreZValue) { + this.ignoreZValue = ignoreZValue; + return this; + } + + @Override + protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); + + // field mapper handles this at build time + // but prefix tree strategies require a name, so throw a similar exception + if (name().isEmpty()) { + throw new IllegalArgumentException("name cannot be empty string"); + } + + BaseGeoShapeFieldType ft = (BaseGeoShapeFieldType)fieldType(); + ft.setOrientation(orientation().value()); + } + } + + public static class TypeParser implements Mapper.TypeParser { + + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + boolean coerce = Defaults.COERCE.value(); + boolean ignoreZ = Defaults.IGNORE_Z_VALUE.value(); + boolean ignoreMalformed = Defaults.IGNORE_MALFORMED.value(); + Orientation orientation = Defaults.ORIENTATION.value(); + DeprecatedParameters deprecatedParameters = new DeprecatedParameters(); + boolean parsedDeprecatedParams = false; + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String fieldName = entry.getKey(); + Object fieldNode = entry.getValue(); + if (DeprecatedParameters.parse(name, fieldName, fieldNode, deprecatedParameters)) { + parsedDeprecatedParams = true; + iterator.remove(); + } else if (Names.ORIENTATION.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + orientation = ShapeBuilder.Orientation.fromString(fieldNode.toString()); + iterator.remove(); + } else if (IGNORE_MALFORMED.equals(fieldName)) { + ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode, name + ".ignore_malformed"); + iterator.remove(); + } else if (Names.COERCE.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + coerce = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE.getPreferredName()); + iterator.remove(); + } else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) { + ignoreZ = XContentMapValues.nodeBooleanValue(fieldNode, + name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName()); + iterator.remove(); + } + } + return getBuilder(name, coerce, ignoreMalformed, orientation, ignoreZ, parsedDeprecatedParams ? deprecatedParameters : null); + } + + private Builder getBuilder(String name, boolean coerce, boolean ignoreMalformed, Orientation orientation, + boolean ignoreZ, DeprecatedParameters deprecatedParameters) { + if (deprecatedParameters != null) { + return getLegacyBuilder(name, coerce, ignoreMalformed, orientation, ignoreZ, deprecatedParameters); + } + return new GeoShapeFieldMapper.Builder(name, coerce, ignoreMalformed, orientation, ignoreZ); + } + + private Builder getLegacyBuilder(String name, boolean coerce, boolean ignoreMalformed, Orientation orientation, + boolean ignoreZ, DeprecatedParameters deprecatedParameters) { + return new LegacyGeoShapeFieldMapper.Builder(name, coerce, ignoreMalformed, orientation, ignoreZ, deprecatedParameters); + } + } + + public abstract static class BaseGeoShapeFieldType extends MappedFieldType { + protected Orientation orientation = Defaults.ORIENTATION.value(); + + protected BaseGeoShapeFieldType() { + setIndexOptions(IndexOptions.DOCS); + setTokenized(false); + setStored(false); + setStoreTermVectors(false); + setOmitNorms(true); + } + + protected BaseGeoShapeFieldType(BaseGeoShapeFieldType ref) { + super(ref); + this.orientation = ref.orientation; + } + + @Override + public boolean equals(Object o) { + if (!super.equals(o)) return false; + BaseGeoShapeFieldType that = (BaseGeoShapeFieldType) o; + return orientation == that.orientation; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), orientation); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public void checkCompatibility(MappedFieldType fieldType, List conflicts) { + super.checkCompatibility(fieldType, conflicts); + } + + public Orientation orientation() { return this.orientation; } + + public void setOrientation(Orientation orientation) { + checkIfFrozen(); + this.orientation = orientation; + } + + @Override + public Query existsQuery(QueryShardContext context) { + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); + } + + @Override + public Query termQuery(Object value, QueryShardContext context) { + throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead"); + } + } + + protected Explicit coerce; + protected Explicit ignoreMalformed; + protected Explicit ignoreZValue; + + protected BaseGeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Explicit ignoreMalformed, Explicit coerce, + Explicit ignoreZValue, Settings indexSettings, + MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + this.coerce = coerce; + this.ignoreMalformed = ignoreMalformed; + this.ignoreZValue = ignoreZValue; + } + + @Override + protected void doMerge(Mapper mergeWith) { + super.doMerge(mergeWith); + BaseGeoShapeFieldMapper gsfm = (BaseGeoShapeFieldMapper)mergeWith; + if (gsfm.coerce.explicit()) { + this.coerce = gsfm.coerce; + } + if (gsfm.ignoreMalformed.explicit()) { + this.ignoreMalformed = gsfm.ignoreMalformed; + } + if (gsfm.ignoreZValue.explicit()) { + this.ignoreZValue = gsfm.ignoreZValue; + } + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + builder.field("type", contentType()); + BaseGeoShapeFieldType ft = (BaseGeoShapeFieldType)fieldType(); + if (includeDefaults || ft.orientation() != Defaults.ORIENTATION.value()) { + builder.field(Names.ORIENTATION.getPreferredName(), ft.orientation()); + } + if (includeDefaults || coerce.explicit()) { + builder.field(Names.COERCE.getPreferredName(), coerce.value()); + } + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field(IGNORE_MALFORMED, ignoreMalformed.value()); + } + if (includeDefaults || ignoreZValue.explicit()) { + builder.field(GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), ignoreZValue.value()); + } + } + + public Explicit coerce() { + return coerce; + } + + public Explicit ignoreMalformed() { + return ignoreMalformed; + } + + public Explicit ignoreZValue() { + return ignoreZValue; + } + + public Orientation orientation() { + return ((BaseGeoShapeFieldType)fieldType).orientation(); + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 7de40fe337d..65ee2e428fa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -18,48 +18,24 @@ */ package org.elasticsearch.index.mapper; -import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.LatLonShape; +import org.apache.lucene.geo.Line; +import org.apache.lucene.geo.Polygon; +import org.apache.lucene.geo.Rectangle; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; -import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; -import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; -import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.geo.SpatialStrategy; -import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.QueryShardException; -import org.locationtech.spatial4j.shape.Point; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED; /** - * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. + * FieldMapper for indexing {@link org.apache.lucene.document.LatLonShape}s. *

* Currently Shapes can only be indexed and can only be queried using * {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently @@ -73,554 +49,128 @@ import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MA * [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] * ] * } + *

+ * or: + *

+ * "field" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0)) */ -public class GeoShapeFieldMapper extends FieldMapper { - - public static final String CONTENT_TYPE = "geo_shape"; - - public static class Names { - public static final String TREE = "tree"; - public static final String TREE_GEOHASH = "geohash"; - public static final String TREE_QUADTREE = "quadtree"; - public static final String TREE_LEVELS = "tree_levels"; - public static final String TREE_PRESISION = "precision"; - public static final String DISTANCE_ERROR_PCT = "distance_error_pct"; - public static final String ORIENTATION = "orientation"; - public static final String STRATEGY = "strategy"; - public static final String STRATEGY_POINTS_ONLY = "points_only"; - public static final String COERCE = "coerce"; - } - - public static class Defaults { - public static final String TREE = Names.TREE_GEOHASH; - public static final String STRATEGY = SpatialStrategy.RECURSIVE.getStrategyName(); - public static final boolean POINTS_ONLY = false; - public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m"); - public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m"); - public static final Orientation ORIENTATION = Orientation.RIGHT; - public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; - public static final Explicit COERCE = new Explicit<>(false, false); - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - public static final Explicit IGNORE_Z_VALUE = new Explicit<>(true, false); - - public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); - - static { - // setting name here is a hack so freeze can be called...instead all these options should be - // moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers... - FIELD_TYPE.setName("DoesNotExist"); - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setStored(false); - FIELD_TYPE.setStoreTermVectors(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends FieldMapper.Builder { - - private Boolean coerce; - private Boolean ignoreMalformed; - private Boolean ignoreZValue; +public class GeoShapeFieldMapper extends BaseGeoShapeFieldMapper { + public static class Builder extends BaseGeoShapeFieldMapper.Builder { public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); + super (name, new GeoShapeFieldType(), new GeoShapeFieldType()); } - @Override - public GeoShapeFieldType fieldType() { - return (GeoShapeFieldType)fieldType; - } - - public Builder coerce(boolean coerce) { - this.coerce = coerce; - return this; - } - - @Override - protected boolean defaultDocValues(Version indexCreated) { - return false; - } - - protected Explicit coerce(BuilderContext context) { - if (coerce != null) { - return new Explicit<>(coerce, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); - } - return Defaults.COERCE; - } - - public Builder ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return this; - } - - protected Explicit ignoreMalformed(BuilderContext context) { - if (ignoreMalformed != null) { - return new Explicit<>(ignoreMalformed, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); - } - return Defaults.IGNORE_MALFORMED; - } - - protected Explicit ignoreZValue(BuilderContext context) { - if (ignoreZValue != null) { - return new Explicit<>(ignoreZValue, true); - } - return Defaults.IGNORE_Z_VALUE; - } - - public Builder ignoreZValue(final boolean ignoreZValue) { - this.ignoreZValue = ignoreZValue; - return this; + public Builder(String name, boolean coerce, boolean ignoreMalformed, ShapeBuilder.Orientation orientation, + boolean ignoreZ) { + super(name, new GeoShapeFieldType(), new GeoShapeFieldType(), coerce, ignoreMalformed, orientation, ignoreZ); } @Override public GeoShapeFieldMapper build(BuilderContext context) { - GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; - - if (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0) { - geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT); - } setupFieldType(context); - - return new GeoShapeFieldMapper(name, fieldType, ignoreMalformed(context), coerce(context), ignoreZValue(context), - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + return new GeoShapeFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), + ignoreZValue(), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } - public static class TypeParser implements Mapper.TypeParser { - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(name); - Boolean pointsOnly = null; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (Names.TREE.equals(fieldName)) { - builder.fieldType().setTree(fieldNode.toString()); - iterator.remove(); - } else if (Names.TREE_LEVELS.equals(fieldName)) { - builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString())); - iterator.remove(); - } else if (Names.TREE_PRESISION.equals(fieldName)) { - builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(), - DistanceUnit.DEFAULT, DistanceUnit.DEFAULT)); - iterator.remove(); - } else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) { - builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString())); - iterator.remove(); - } else if (Names.ORIENTATION.equals(fieldName)) { - builder.fieldType().setOrientation(ShapeBuilder.Orientation.fromString(fieldNode.toString())); - iterator.remove(); - } else if (Names.STRATEGY.equals(fieldName)) { - builder.fieldType().setStrategyName(fieldNode.toString()); - iterator.remove(); - } else if (IGNORE_MALFORMED.equals(fieldName)) { - builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(fieldNode, name + ".ignore_malformed")); - iterator.remove(); - } else if (Names.COERCE.equals(fieldName)) { - builder.coerce(XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE)); - iterator.remove(); - } else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) { - builder.ignoreZValue(XContentMapValues.nodeBooleanValue(fieldNode, - name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName())); - iterator.remove(); - } else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)) { - pointsOnly = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.STRATEGY_POINTS_ONLY); - iterator.remove(); - } - } - if (pointsOnly != null) { - if (builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) && pointsOnly == false) { - throw new IllegalArgumentException("points_only cannot be set to false for term strategy"); - } else { - builder.fieldType().setPointsOnly(pointsOnly); - } - } - return builder; + public static final class GeoShapeFieldType extends BaseGeoShapeFieldType { + public GeoShapeFieldType() { + super(); } - } - - public static final class GeoShapeFieldType extends MappedFieldType { - - private String tree = Defaults.TREE; - private String strategyName = Defaults.STRATEGY; - private boolean pointsOnly = Defaults.POINTS_ONLY; - private int treeLevels = 0; - private double precisionInMeters = -1; - private Double distanceErrorPct; - private double defaultDistanceErrorPct = 0.0; - private Orientation orientation = Defaults.ORIENTATION; - - // these are built when the field type is frozen - private PrefixTreeStrategy defaultStrategy; - private RecursivePrefixTreeStrategy recursiveStrategy; - private TermQueryPrefixTreeStrategy termStrategy; - - public GeoShapeFieldType() {} protected GeoShapeFieldType(GeoShapeFieldType ref) { super(ref); - this.tree = ref.tree; - this.strategyName = ref.strategyName; - this.pointsOnly = ref.pointsOnly; - this.treeLevels = ref.treeLevels; - this.precisionInMeters = ref.precisionInMeters; - this.distanceErrorPct = ref.distanceErrorPct; - this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct; - this.orientation = ref.orientation; } @Override public GeoShapeFieldType clone() { return new GeoShapeFieldType(this); } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - GeoShapeFieldType that = (GeoShapeFieldType) o; - return treeLevels == that.treeLevels && - precisionInMeters == that.precisionInMeters && - defaultDistanceErrorPct == that.defaultDistanceErrorPct && - Objects.equals(tree, that.tree) && - Objects.equals(strategyName, that.strategyName) && - pointsOnly == that.pointsOnly && - Objects.equals(distanceErrorPct, that.distanceErrorPct) && - orientation == that.orientation; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), tree, strategyName, pointsOnly, treeLevels, precisionInMeters, distanceErrorPct, - defaultDistanceErrorPct, orientation); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void freeze() { - super.freeze(); - // This is a bit hackish: we need to setup the spatial tree and strategies once the field name is set, which - // must be by the time freeze is called. - SpatialPrefixTree prefixTree; - if ("geohash".equals(tree)) { - prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, - getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true)); - } else if ("legacyquadtree".equals(tree)) { - prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, - getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false)); - } else if ("quadtree".equals(tree)) { - prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, - getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false)); - } else { - throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); - } - - recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, name()); - recursiveStrategy.setDistErrPct(distanceErrorPct()); - recursiveStrategy.setPruneLeafyBranches(false); - termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, name()); - termStrategy.setDistErrPct(distanceErrorPct()); - defaultStrategy = resolveStrategy(strategyName); - defaultStrategy.setPointsOnly(pointsOnly); - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts) { - super.checkCompatibility(fieldType, conflicts); - GeoShapeFieldType other = (GeoShapeFieldType)fieldType; - // prevent user from changing strategies - if (strategyName().equals(other.strategyName()) == false) { - conflicts.add("mapper [" + name() + "] has different [strategy]"); - } - - // prevent user from changing trees (changes encoding) - if (tree().equals(other.tree()) == false) { - conflicts.add("mapper [" + name() + "] has different [tree]"); - } - - if ((pointsOnly() != other.pointsOnly())) { - conflicts.add("mapper [" + name() + "] has different points_only"); - } - - // TODO we should allow this, but at the moment levels is used to build bookkeeping variables - // in lucene's SpatialPrefixTree implementations, need a patch to correct that first - if (treeLevels() != other.treeLevels()) { - conflicts.add("mapper [" + name() + "] has different [tree_levels]"); - } - if (precisionInMeters() != other.precisionInMeters()) { - conflicts.add("mapper [" + name() + "] has different [precision]"); - } - } - - private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) { - if (treeLevels > 0 || precisionInMeters >= 0) { - return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters) - : GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0); - } - return defaultLevels; - } - - public String tree() { - return tree; - } - - public void setTree(String tree) { - checkIfFrozen(); - this.tree = tree; - } - - public String strategyName() { - return strategyName; - } - - public void setStrategyName(String strategyName) { - checkIfFrozen(); - this.strategyName = strategyName; - if (this.strategyName.equals(SpatialStrategy.TERM.getStrategyName())) { - this.pointsOnly = true; - } - } - - public boolean pointsOnly() { - return pointsOnly; - } - - public void setPointsOnly(boolean pointsOnly) { - checkIfFrozen(); - this.pointsOnly = pointsOnly; - } - public int treeLevels() { - return treeLevels; - } - - public void setTreeLevels(int treeLevels) { - checkIfFrozen(); - this.treeLevels = treeLevels; - } - - public double precisionInMeters() { - return precisionInMeters; - } - - public void setPrecisionInMeters(double precisionInMeters) { - checkIfFrozen(); - this.precisionInMeters = precisionInMeters; - } - - public double distanceErrorPct() { - return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct; - } - - public void setDistanceErrorPct(double distanceErrorPct) { - checkIfFrozen(); - this.distanceErrorPct = distanceErrorPct; - } - - public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) { - checkIfFrozen(); - this.defaultDistanceErrorPct = defaultDistanceErrorPct; - } - - public Orientation orientation() { return this.orientation; } - - public void setOrientation(Orientation orientation) { - checkIfFrozen(); - this.orientation = orientation; - } - - public PrefixTreeStrategy defaultStrategy() { - return this.defaultStrategy; - } - - public PrefixTreeStrategy resolveStrategy(SpatialStrategy strategy) { - return resolveStrategy(strategy.getStrategyName()); - } - - public PrefixTreeStrategy resolveStrategy(String strategyName) { - if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { - return recursiveStrategy; - } - if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { - return termStrategy; - } - throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); - } - - @Override - public Query existsQuery(QueryShardContext context) { - return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); - } - - @Override - public Query termQuery(Object value, QueryShardContext context) { - throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead"); - } } - protected Explicit coerce; - protected Explicit ignoreMalformed; - protected Explicit ignoreZValue; - - public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit ignoreMalformed, - Explicit coerce, Explicit ignoreZValue, Settings indexSettings, + public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Explicit ignoreMalformed, Explicit coerce, + Explicit ignoreZValue, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo); - this.coerce = coerce; - this.ignoreMalformed = ignoreMalformed; - this.ignoreZValue = ignoreZValue; + super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, ignoreZValue, indexSettings, + multiFields, copyTo); } @Override public GeoShapeFieldType fieldType() { return (GeoShapeFieldType) super.fieldType(); } + + /** parsing logic for {@link LatLonShape} indexing */ @Override public void parse(ParseContext context) throws IOException { try { - Shape shape = context.parseExternalValue(Shape.class); + Object shape = context.parseExternalValue(Object.class); if (shape == null) { ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this); if (shapeBuilder == null) { return; } - shape = shapeBuilder.buildS4J(); - } - if (fieldType().pointsOnly() == true) { - // index configured for pointsOnly - if (shape instanceof XShapeCollection && XShapeCollection.class.cast(shape).pointsOnly()) { - // MULTIPOINT data: index each point separately - List shapes = ((XShapeCollection) shape).getShapes(); - for (Shape s : shapes) { - indexShape(context, s); - } - return; - } else if (shape instanceof Point == false) { - throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " - + ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) - + " was found"); - } + shape = shapeBuilder.buildLucene(); } indexShape(context, shape); } catch (Exception e) { if (ignoreMalformed.value() == false) { throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), - fieldType().typeName()); + fieldType().typeName()); } - context.addIgnoredField(fieldType.name()); + context.addIgnoredField(fieldType().name()); } } - private void indexShape(ParseContext context, Shape shape) { - List fields = new ArrayList<>(Arrays.asList(fieldType().defaultStrategy().createIndexableFields(shape))); - createFieldNamesField(context, fields); - for (IndexableField field : fields) { - context.doc().add(field); - } - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - } - - @Override - protected void doMerge(Mapper mergeWith) { - super.doMerge(mergeWith); - - GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)mergeWith; - if (gsfm.coerce.explicit()) { - this.coerce = gsfm.coerce; - } - if (gsfm.ignoreMalformed.explicit()) { - this.ignoreMalformed = gsfm.ignoreMalformed; - } - if (gsfm.ignoreZValue.explicit()) { - this.ignoreZValue = gsfm.ignoreZValue; - } - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - builder.field("type", contentType()); - - if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) { - builder.field(Names.TREE, fieldType().tree()); - } - - if (fieldType().treeLevels() != 0) { - builder.field(Names.TREE_LEVELS, fieldType().treeLevels()); - } else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified - if ("geohash".equals(fieldType().tree())) { - builder.field(Names.TREE_LEVELS, Defaults.GEOHASH_LEVELS); - } else if ("legacyquadtree".equals(fieldType().tree())) { - builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); - } else if ("quadtree".equals(fieldType().tree())) { - builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); - } else { - throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]"); + private void indexShape(ParseContext context, Object luceneShape) { + if (luceneShape instanceof GeoPoint) { + GeoPoint pt = (GeoPoint) luceneShape; + indexFields(context, LatLonShape.createIndexableFields(name(), pt.lat(), pt.lon())); + } else if (luceneShape instanceof double[]) { + double[] pt = (double[]) luceneShape; + indexFields(context, LatLonShape.createIndexableFields(name(), pt[1], pt[0])); + } else if (luceneShape instanceof Line) { + indexFields(context, LatLonShape.createIndexableFields(name(), (Line)luceneShape)); + } else if (luceneShape instanceof Polygon) { + indexFields(context, LatLonShape.createIndexableFields(name(), (Polygon) luceneShape)); + } else if (luceneShape instanceof double[][]) { + double[][] pts = (double[][])luceneShape; + for (int i = 0; i < pts.length; ++i) { + indexFields(context, LatLonShape.createIndexableFields(name(), pts[i][1], pts[i][0])); } - } - if (fieldType().precisionInMeters() != -1) { - builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters())); - } else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified - builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(50)); - } - if (includeDefaults || fieldType().strategyName().equals(Defaults.STRATEGY) == false) { - builder.field(Names.STRATEGY, fieldType().strategyName()); - } - if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) { - builder.field(Names.DISTANCE_ERROR_PCT, fieldType().distanceErrorPct()); - } - if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) { - builder.field(Names.ORIENTATION, fieldType().orientation()); - } - if (fieldType().strategyName().equals(SpatialStrategy.TERM.getStrategyName())) { - // For TERMs strategy the defaults for points only change to true - if (includeDefaults || fieldType().pointsOnly() != true) { - builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly()); + } else if (luceneShape instanceof Line[]) { + Line[] lines = (Line[]) luceneShape; + for (int i = 0; i < lines.length; ++i) { + indexFields(context, LatLonShape.createIndexableFields(name(), lines[i])); + } + } else if (luceneShape instanceof Polygon[]) { + Polygon[] polys = (Polygon[]) luceneShape; + for (int i = 0; i < polys.length; ++i) { + indexFields(context, LatLonShape.createIndexableFields(name(), polys[i])); + } + } else if (luceneShape instanceof Rectangle) { + // index rectangle as a polygon + Rectangle r = (Rectangle) luceneShape; + Polygon p = new Polygon(new double[]{r.minLat, r.minLat, r.maxLat, r.maxLat, r.minLat}, + new double[]{r.minLon, r.maxLon, r.maxLon, r.minLon, r.minLon}); + indexFields(context, LatLonShape.createIndexableFields(name(), p)); + } else if (luceneShape instanceof Object[]) { + // recurse to index geometry collection + for (Object o : (Object[])luceneShape) { + indexShape(context, o); } } else { - if (includeDefaults || fieldType().pointsOnly() != GeoShapeFieldMapper.Defaults.POINTS_ONLY) { - builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly()); - } - } - if (includeDefaults || coerce.explicit()) { - builder.field(Names.COERCE, coerce.value()); - } - if (includeDefaults || ignoreMalformed.explicit()) { - builder.field(IGNORE_MALFORMED, ignoreMalformed.value()); - } - if (includeDefaults || ignoreZValue.explicit()) { - builder.field(GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), ignoreZValue.value()); + throw new IllegalArgumentException("invalid shape type found [" + luceneShape.getClass() + "] while indexing shape"); } } - public Explicit coerce() { - return coerce; - } - - public Explicit ignoreMalformed() { - return ignoreMalformed; - } - - public Explicit ignoreZValue() { - return ignoreZValue; - } - - @Override - protected String contentType() { - return CONTENT_TYPE; + private void indexFields(ParseContext context, Field[] fields) { + ArrayList flist = new ArrayList<>(Arrays.asList(fields)); + createFieldNamesField(context, flist); + for (IndexableField f : flist) { + context.doc().add(f); + } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java new file mode 100644 index 00000000000..b68e48305b2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java @@ -0,0 +1,596 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; +import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; +import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.ShapesAvailability; +import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.common.geo.parsers.ShapeParser; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. + *

+ * Currently Shapes can only be indexed and can only be queried using + * {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently + * a lot of behavior in this Mapper is disabled. + *

+ * Format supported: + *

+ * "field" : { + * "type" : "polygon", + * "coordinates" : [ + * [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] + * ] + * } + *

+ * or: + *

+ * "field" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0)) + * + * @deprecated use {@link GeoShapeFieldMapper} + */ +@Deprecated +public class LegacyGeoShapeFieldMapper extends BaseGeoShapeFieldMapper { + + public static final String CONTENT_TYPE = "geo_shape"; + + @Deprecated + public static class DeprecatedParameters { + public static class Names { + public static final ParseField STRATEGY = new ParseField("strategy"); + public static final ParseField TREE = new ParseField("tree"); + public static final ParseField TREE_LEVELS = new ParseField("tree_levels"); + public static final ParseField PRECISION = new ParseField("precision"); + public static final ParseField DISTANCE_ERROR_PCT = new ParseField("distance_error_pct"); + public static final ParseField POINTS_ONLY = new ParseField("points_only"); + } + + public static class PrefixTrees { + public static final String LEGACY_QUADTREE = "legacyquadtree"; + public static final String QUADTREE = "quadtree"; + public static final String GEOHASH = "geohash"; + } + + public static class Defaults { + public static final SpatialStrategy STRATEGY = SpatialStrategy.RECURSIVE; + public static final String TREE = "quadtree"; + public static final String PRECISION = "50m"; + public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision(PRECISION); + public static final int GEOHASH_TREE_LEVELS = GeoUtils.geoHashLevelsForPrecision(PRECISION); + public static final boolean POINTS_ONLY = false; + public static final double DISTANCE_ERROR_PCT = 0.025d; + } + + public SpatialStrategy strategy = null; + public String tree = null; + public int treeLevels = Integer.MIN_VALUE; + public String precision = null; + public Boolean pointsOnly = null; + public double distanceErrorPct = Double.NaN; + + public void setSpatialStrategy(SpatialStrategy strategy) { + this.strategy = strategy; + } + + public void setTree(String prefixTree) { + this.tree = prefixTree; + } + + public void setTreeLevels(int treeLevels) { + this.treeLevels = treeLevels; + } + + public void setPrecision(String precision) { + this.precision = precision; + } + + public void setPointsOnly(boolean pointsOnly) { + if (this.strategy == SpatialStrategy.TERM && pointsOnly == false) { + throw new ElasticsearchParseException("points_only cannot be set to false for term strategy"); + } + this.pointsOnly = pointsOnly; + } + + public void setDistanceErrorPct(double distanceErrorPct) { + this.distanceErrorPct = distanceErrorPct; + } + + protected void setup() { + if (strategy == null) { + strategy = Defaults.STRATEGY; + } + if (tree == null) { + tree = Defaults.TREE; + } + if (Double.isNaN(distanceErrorPct)) { + if (precision != null || treeLevels != Integer.MIN_VALUE) { + distanceErrorPct = 0d; + } else { + distanceErrorPct = Defaults.DISTANCE_ERROR_PCT; + } + } + if (treeLevels == Integer.MIN_VALUE && precision == null) { + // set default precision if treeLevels is not explicitly set + precision = Defaults.PRECISION; + } + if (treeLevels == Integer.MIN_VALUE) { + if (precision.equals(Defaults.PRECISION)) { + treeLevels = tree.equals(Defaults.TREE) + ? Defaults.QUADTREE_LEVELS + : Defaults.GEOHASH_TREE_LEVELS; + } else { + treeLevels = tree == Defaults.TREE + ? GeoUtils.quadTreeLevelsForPrecision(precision) + : GeoUtils.geoHashLevelsForPrecision(precision); + } + } + if (pointsOnly == null) { + if (strategy == SpatialStrategy.TERM) { + pointsOnly = true; + } else { + pointsOnly = Defaults.POINTS_ONLY; + } + } + } + + public static boolean parse(String name, String fieldName, Object fieldNode, DeprecatedParameters deprecatedParameters) { + if (Names.STRATEGY.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + checkPrefixTreeSupport(fieldName); + deprecatedParameters.setSpatialStrategy(SpatialStrategy.fromString(fieldNode.toString())); + } else if (Names.TREE.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + checkPrefixTreeSupport(fieldName); + deprecatedParameters.setTree(fieldNode.toString()); + } else if (Names.TREE_LEVELS.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + checkPrefixTreeSupport(fieldName); + deprecatedParameters.setTreeLevels(Integer.parseInt(fieldNode.toString())); + } else if (Names.PRECISION.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + checkPrefixTreeSupport(fieldName); + deprecatedParameters.setPrecision(fieldNode.toString()); + } else if (Names.DISTANCE_ERROR_PCT.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + checkPrefixTreeSupport(fieldName); + deprecatedParameters.setDistanceErrorPct(Double.parseDouble(fieldNode.toString())); + } else if (Names.POINTS_ONLY.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { + checkPrefixTreeSupport(fieldName); + deprecatedParameters.setPointsOnly( + XContentMapValues.nodeBooleanValue(fieldNode, name + "." + DeprecatedParameters.Names.POINTS_ONLY)); + } else { + return false; + } + return true; + } + + private static void checkPrefixTreeSupport(String fieldName) { + if (ShapesAvailability.JTS_AVAILABLE == false || ShapesAvailability.SPATIAL4J_AVAILABLE == false) { + throw new ElasticsearchParseException("Field parameter [{}] is not supported for [{}] field type", + fieldName, CONTENT_TYPE); + } + DEPRECATION_LOGGER.deprecated("Field parameter [{}] is deprecated and will be removed in a future version.", + fieldName); + } + } + + private static final Logger logger = LogManager.getLogger(LegacyGeoShapeFieldMapper.class); + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(logger); + + public static class Builder extends BaseGeoShapeFieldMapper.Builder { + + DeprecatedParameters deprecatedParameters; + + public Builder(String name) { + super(name, new GeoShapeFieldType(), new GeoShapeFieldType()); + this.deprecatedParameters = new DeprecatedParameters(); + this.deprecatedParameters.setup(); + } + + public Builder(String name, boolean coerce, boolean ignoreMalformed, Orientation orientation, + boolean ignoreZ, DeprecatedParameters deprecatedParameters) { + super(name, new GeoShapeFieldType(), new GeoShapeFieldType(), coerce, ignoreMalformed, orientation, ignoreZ); + this.deprecatedParameters = deprecatedParameters; + this.deprecatedParameters.setup(); + } + + @Override + public GeoShapeFieldType fieldType() { + return (GeoShapeFieldType)fieldType; + } + + private void setupFieldTypeDeprecatedParameters() { + GeoShapeFieldType ft = fieldType(); + ft.setStrategy(deprecatedParameters.strategy); + ft.setTree(deprecatedParameters.tree); + ft.setTreeLevels(deprecatedParameters.treeLevels); + if (deprecatedParameters.precision != null) { + // precision is only set iff: a. treeLevel is not explicitly set, b. its explicitly set + ft.setPrecisionInMeters(DistanceUnit.parse(deprecatedParameters.precision, + DistanceUnit.DEFAULT, DistanceUnit.DEFAULT)); + } + ft.setDistanceErrorPct(deprecatedParameters.distanceErrorPct); + ft.setPointsOnly(deprecatedParameters.pointsOnly); + } + + private void setupPrefixTrees() { + GeoShapeFieldType ft = fieldType(); + SpatialPrefixTree prefixTree; + if (ft.tree().equals(DeprecatedParameters.PrefixTrees.GEOHASH)) { + prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, + getLevels(ft.treeLevels(), ft.precisionInMeters(), DeprecatedParameters.Defaults.GEOHASH_TREE_LEVELS, true)); + } else if (ft.tree().equals(DeprecatedParameters.PrefixTrees.LEGACY_QUADTREE)) { + prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, + getLevels(ft.treeLevels(), ft.precisionInMeters(), DeprecatedParameters.Defaults.QUADTREE_LEVELS, false)); + } else if (ft.tree().equals(DeprecatedParameters.PrefixTrees.QUADTREE)) { + prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, + getLevels(ft.treeLevels(), ft.precisionInMeters(), DeprecatedParameters.Defaults.QUADTREE_LEVELS, false)); + } else { + throw new IllegalArgumentException("Unknown prefix tree type [" + ft.tree() + "]"); + } + + // setup prefix trees regardless of strategy (this is used for the QueryBuilder) + // recursive: + RecursivePrefixTreeStrategy rpts = new RecursivePrefixTreeStrategy(prefixTree, ft.name()); + rpts.setDistErrPct(ft.distanceErrorPct()); + rpts.setPruneLeafyBranches(false); + ft.recursiveStrategy = rpts; + + // term: + TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, ft.name()); + termStrategy.setDistErrPct(ft.distanceErrorPct()); + ft.termStrategy = termStrategy; + + // set default (based on strategy): + ft.defaultPrefixTreeStrategy = ft.resolvePrefixTreeStrategy(ft.strategy()); + ft.defaultPrefixTreeStrategy.setPointsOnly(ft.pointsOnly()); + } + + @Override + protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); + + // field mapper handles this at build time + // but prefix tree strategies require a name, so throw a similar exception + if (fieldType().name().isEmpty()) { + throw new IllegalArgumentException("name cannot be empty string"); + } + + // setup the deprecated parameters and the prefix tree configuration + setupFieldTypeDeprecatedParameters(); + setupPrefixTrees(); + } + + private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) { + if (treeLevels > 0 || precisionInMeters >= 0) { + return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters) + : GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0); + } + return defaultLevels; + } + + @Override + public LegacyGeoShapeFieldMapper build(BuilderContext context) { + setupFieldType(context); + + return new LegacyGeoShapeFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), + coerce(context), orientation(), ignoreZValue(), context.indexSettings(), + multiFieldsBuilder.build(this, context), copyTo); + } + } + + public static final class GeoShapeFieldType extends BaseGeoShapeFieldType { + + private String tree = DeprecatedParameters.Defaults.TREE; + private SpatialStrategy strategy = DeprecatedParameters.Defaults.STRATEGY; + private boolean pointsOnly = DeprecatedParameters.Defaults.POINTS_ONLY; + private int treeLevels = 0; + private double precisionInMeters = -1; + private Double distanceErrorPct; + private double defaultDistanceErrorPct = 0.0; + + // these are built when the field type is frozen + private PrefixTreeStrategy defaultPrefixTreeStrategy; + private RecursivePrefixTreeStrategy recursiveStrategy; + private TermQueryPrefixTreeStrategy termStrategy; + + public GeoShapeFieldType() { + setIndexOptions(IndexOptions.DOCS); + setTokenized(false); + setStored(false); + setStoreTermVectors(false); + setOmitNorms(true); + } + + protected GeoShapeFieldType(GeoShapeFieldType ref) { + super(ref); + this.tree = ref.tree; + this.strategy = ref.strategy; + this.pointsOnly = ref.pointsOnly; + this.treeLevels = ref.treeLevels; + this.precisionInMeters = ref.precisionInMeters; + this.distanceErrorPct = ref.distanceErrorPct; + this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct; + } + + @Override + public GeoShapeFieldType clone() { + return new GeoShapeFieldType(this); + } + + @Override + public boolean equals(Object o) { + if (!super.equals(o)) return false; + GeoShapeFieldType that = (GeoShapeFieldType) o; + return treeLevels == that.treeLevels && + precisionInMeters == that.precisionInMeters && + defaultDistanceErrorPct == that.defaultDistanceErrorPct && + Objects.equals(tree, that.tree) && + Objects.equals(strategy, that.strategy) && + pointsOnly == that.pointsOnly && + Objects.equals(distanceErrorPct, that.distanceErrorPct); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), tree, strategy, pointsOnly, treeLevels, precisionInMeters, distanceErrorPct, + defaultDistanceErrorPct); + } + + @Override + public void checkCompatibility(MappedFieldType fieldType, List conflicts) { + super.checkCompatibility(fieldType, conflicts); + GeoShapeFieldType other = (GeoShapeFieldType)fieldType; + // prevent user from changing strategies + if (strategy() != other.strategy()) { + conflicts.add("mapper [" + name() + "] has different [strategy]"); + } + + // prevent user from changing trees (changes encoding) + if (tree().equals(other.tree()) == false) { + conflicts.add("mapper [" + name() + "] has different [tree]"); + } + + if ((pointsOnly() != other.pointsOnly())) { + conflicts.add("mapper [" + name() + "] has different points_only"); + } + + // TODO we should allow this, but at the moment levels is used to build bookkeeping variables + // in lucene's SpatialPrefixTree implementations, need a patch to correct that first + if (treeLevels() != other.treeLevels()) { + conflicts.add("mapper [" + name() + "] has different [tree_levels]"); + } + if (precisionInMeters() != other.precisionInMeters()) { + conflicts.add("mapper [" + name() + "] has different [precision]"); + } + } + + public String tree() { + return tree; + } + + public void setTree(String tree) { + checkIfFrozen(); + this.tree = tree; + } + + public SpatialStrategy strategy() { + return strategy; + } + + public void setStrategy(SpatialStrategy strategy) { + checkIfFrozen(); + this.strategy = strategy; + if (this.strategy.equals(SpatialStrategy.TERM)) { + this.pointsOnly = true; + } + } + + public boolean pointsOnly() { + return pointsOnly; + } + + public void setPointsOnly(boolean pointsOnly) { + checkIfFrozen(); + this.pointsOnly = pointsOnly; + } + public int treeLevels() { + return treeLevels; + } + + public void setTreeLevels(int treeLevels) { + checkIfFrozen(); + this.treeLevels = treeLevels; + } + + public double precisionInMeters() { + return precisionInMeters; + } + + public void setPrecisionInMeters(double precisionInMeters) { + checkIfFrozen(); + this.precisionInMeters = precisionInMeters; + } + + public double distanceErrorPct() { + return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct; + } + + public void setDistanceErrorPct(double distanceErrorPct) { + checkIfFrozen(); + this.distanceErrorPct = distanceErrorPct; + } + + public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) { + checkIfFrozen(); + this.defaultDistanceErrorPct = defaultDistanceErrorPct; + } + + public PrefixTreeStrategy defaultPrefixTreeStrategy() { + return this.defaultPrefixTreeStrategy; + } + + public PrefixTreeStrategy resolvePrefixTreeStrategy(SpatialStrategy strategy) { + return resolvePrefixTreeStrategy(strategy.getStrategyName()); + } + + public PrefixTreeStrategy resolvePrefixTreeStrategy(String strategyName) { + if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { + return recursiveStrategy; + } + if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { + return termStrategy; + } + throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); + } + } + + public LegacyGeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, + Explicit ignoreMalformed, Explicit coerce, Explicit orientation, + Explicit ignoreZValue, Settings indexSettings, + MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, ignoreZValue, indexSettings, + multiFields, copyTo); + } + + @Override + public GeoShapeFieldType fieldType() { + return (GeoShapeFieldType) super.fieldType(); + } + + @Override + public void parse(ParseContext context) throws IOException { + try { + Shape shape = context.parseExternalValue(Shape.class); + if (shape == null) { + ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this); + if (shapeBuilder == null) { + return; + } + shape = shapeBuilder.buildS4J(); + } + if (fieldType().pointsOnly() == true) { + // index configured for pointsOnly + if (shape instanceof XShapeCollection && XShapeCollection.class.cast(shape).pointsOnly()) { + // MULTIPOINT data: index each point separately + List shapes = ((XShapeCollection) shape).getShapes(); + for (Shape s : shapes) { + indexShape(context, s); + } + return; + } else if (shape instanceof Point == false) { + throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " + + ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + + " was found"); + } + } + indexShape(context, shape); + } catch (Exception e) { + if (ignoreMalformed.value() == false) { + throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), + fieldType().typeName()); + } + context.addIgnoredField(fieldType.name()); + } + } + + private void indexShape(ParseContext context, Shape shape) { + List fields = new ArrayList<>(Arrays.asList(fieldType().defaultPrefixTreeStrategy().createIndexableFields(shape))); + createFieldNamesField(context, fields); + for (IndexableField field : fields) { + context.doc().add(field); + } + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + if (includeDefaults || fieldType().tree().equals(DeprecatedParameters.Defaults.TREE) == false) { + builder.field(DeprecatedParameters.Names.TREE.getPreferredName(), fieldType().tree()); + } + + if (fieldType().treeLevels() != 0) { + builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), fieldType().treeLevels()); + } else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified + if (DeprecatedParameters.PrefixTrees.GEOHASH.equals(fieldType().tree())) { + builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), + DeprecatedParameters.Defaults.GEOHASH_TREE_LEVELS); + } else if (DeprecatedParameters.PrefixTrees.LEGACY_QUADTREE.equals(fieldType().tree())) { + builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), + DeprecatedParameters.Defaults.QUADTREE_LEVELS); + } else if (DeprecatedParameters.PrefixTrees.QUADTREE.equals(fieldType().tree())) { + builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), + DeprecatedParameters.Defaults.QUADTREE_LEVELS); + } else { + throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]"); + } + } + if (fieldType().precisionInMeters() != -1) { + builder.field(DeprecatedParameters.Names.PRECISION.getPreferredName(), + DistanceUnit.METERS.toString(fieldType().precisionInMeters())); + } else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified + builder.field(DeprecatedParameters.Names.PRECISION.getPreferredName(), + DistanceUnit.METERS.toString(50)); + } + + builder.field(DeprecatedParameters.Names.STRATEGY.getPreferredName(), fieldType().strategy().getStrategyName()); + + if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) { + builder.field(DeprecatedParameters.Names.DISTANCE_ERROR_PCT.getPreferredName(), fieldType().distanceErrorPct()); + } + if (fieldType().strategy() == SpatialStrategy.TERM) { + // For TERMs strategy the defaults for points only change to true + if (includeDefaults || fieldType().pointsOnly() != true) { + builder.field(DeprecatedParameters.Names.POINTS_ONLY.getPreferredName(), fieldType().pointsOnly()); + } + } else { + if (includeDefaults || fieldType().pointsOnly() != DeprecatedParameters.Defaults.POINTS_ONLY) { + builder.field(DeprecatedParameters.Names.POINTS_ONLY.getPreferredName(), fieldType().pointsOnly()); + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index c5170508969..6ee0f3f10dd 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -19,6 +19,10 @@ package org.elasticsearch.index.query; +import org.apache.lucene.document.LatLonShape; +import org.apache.lucene.geo.Line; +import org.apache.lucene.geo.Polygon; +import org.apache.lucene.geo.Rectangle; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -36,8 +40,9 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.parsers.ShapeParser; @@ -48,7 +53,8 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; +import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -329,9 +335,9 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder listener) { - if (ShapesAvailability.JTS_AVAILABLE == false) { - throw new IllegalStateException("JTS not available"); - } getRequest.preference("_local"); client.get(getRequest, new ActionListener(){ diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index a1038853c06..24b5d7f427c 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -25,13 +25,13 @@ import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition; import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition; import org.elasticsearch.action.resync.TransportResyncReplicationAction; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; +import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; @@ -39,7 +39,6 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; @@ -132,10 +131,7 @@ public class IndicesModule extends AbstractModule { mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser()); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); - - if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { - mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); - } + mappers.put(BaseGeoShapeFieldMapper.CONTENT_TYPE, new BaseGeoShapeFieldMapper.TypeParser()); for (MapperPlugin mapperPlugin : mapperPlugins) { for (Map.Entry entry : mapperPlugin.getMappers().entrySet()) { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index a9a21054906..2acabee8797 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; @@ -296,7 +296,8 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); + final LegacyGeoShapeFieldMapper mapperBuilder = + (LegacyGeoShapeFieldMapper) (new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext)); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).buildS4J()); @@ -896,7 +897,6 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject(); - ShapeCollection expected = shapeCollection( SPATIAL_CONTEXT.makePoint(100, 0), SPATIAL_CONTEXT.makePoint(101, 1.0)); @@ -968,7 +968,6 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { shellCoordinates.add(new Coordinate(102, 2)); shellCoordinates.add(new Coordinate(102, 3)); - shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null); @@ -1149,7 +1148,6 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { .startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject() .startObject("lala").field("type", "NotAPoint").endObject() .endObject(); - Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson, true); diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java index 1b4c0b9dce0..94c96e00d92 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; +import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.test.geo.RandomShapeGenerator; import org.locationtech.jts.geom.Coordinate; @@ -146,7 +147,6 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { @Override public void testParseLineString() throws IOException { List coordinates = randomLineStringCoords(); - LineString expected = GEOMETRY_FACTORY.createLineString(coordinates.toArray(new Coordinate[coordinates.size()])); assertExpected(jtsGeom(expected), new LineStringBuilder(coordinates), true); @@ -279,13 +279,14 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { parser.nextToken(); Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_7_0_0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(false).build(mockBuilderContext); + final GeoShapeFieldMapper mapperBuilder = + (GeoShapeFieldMapper) (new GeoShapeFieldMapper.Builder("test").ignoreZValue(false).build(mockBuilderContext)); // test store z disabled ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, @@ -323,7 +324,8 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); + final LegacyGeoShapeFieldMapper mapperBuilder = + (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext)); // test store z disabled ElasticsearchException e = expectThrows(ElasticsearchException.class, @@ -352,7 +354,8 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); + final LegacyGeoShapeFieldMapper mapperBuilder = + (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext)); ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); assertEquals(shapeBuilder.numDimensions(), 3); @@ -372,12 +375,14 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final GeoShapeFieldMapper defaultMapperBuilder = new GeoShapeFieldMapper.Builder("test").coerce(false).build(mockBuilderContext); + final LegacyGeoShapeFieldMapper defaultMapperBuilder = + (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").coerce(false).build(mockBuilderContext)); ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, () -> ShapeParser.parse(parser, defaultMapperBuilder)); assertEquals("invalid LinearRing found (coordinates are not closed)", exception.getMessage()); - final GeoShapeFieldMapper coercingMapperBuilder = new GeoShapeFieldMapper.Builder("test").coerce(true).build(mockBuilderContext); + final LegacyGeoShapeFieldMapper coercingMapperBuilder = + (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").coerce(true).build(mockBuilderContext)); ShapeBuilder shapeBuilder = ShapeParser.parse(parser, coercingMapperBuilder); assertNotNull(shapeBuilder); assertEquals("polygon ((100.0 5.0, 100.0 10.0, 90.0 10.0, 90.0 5.0, 100.0 5.0))", shapeBuilder.toWKT()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index 0e6854c41e3..20c49c00935 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -24,8 +24,8 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.Version; import org.elasticsearch.common.geo.builders.PointBuilder; -import org.locationtech.spatial4j.shape.Point; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -63,6 +63,7 @@ public class ExternalMapper extends FieldMapper { private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL); private GeoPointFieldMapper.Builder latLonPointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT); private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); + private LegacyGeoShapeFieldMapper.Builder legacyShapeBuilder = new LegacyGeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); private Mapper.Builder stringBuilder; private String generatedValue; private String mapperName; @@ -86,7 +87,9 @@ public class ExternalMapper extends FieldMapper { BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); GeoPointFieldMapper pointMapper = latLonPointBuilder.build(context); - GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context); + BaseGeoShapeFieldMapper shapeMapper = (context.indexCreatedVersion().before(Version.V_6_6_0)) + ? legacyShapeBuilder.build(context) + : shapeBuilder.build(context); FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); @@ -150,13 +153,13 @@ public class ExternalMapper extends FieldMapper { private BinaryFieldMapper binMapper; private BooleanFieldMapper boolMapper; private GeoPointFieldMapper pointMapper; - private GeoShapeFieldMapper shapeMapper; + private BaseGeoShapeFieldMapper shapeMapper; private FieldMapper stringMapper; public ExternalMapper(String simpleName, MappedFieldType fieldType, String generatedValue, String mapperName, BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, GeoPointFieldMapper pointMapper, - GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, + BaseGeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, new ExternalFieldType(), indexSettings, multiFields, copyTo); this.generatedValue = generatedValue; @@ -182,8 +185,12 @@ public class ExternalMapper extends FieldMapper { pointMapper.parse(context.createExternalValueContext(point)); // Let's add a Dummy Shape - Point shape = new PointBuilder(-100, 45).buildS4J(); - shapeMapper.parse(context.createExternalValueContext(shape)); + PointBuilder pb = new PointBuilder(-100, 45); + if (shapeMapper instanceof GeoShapeFieldMapper) { + shapeMapper.parse(context.createExternalValueContext(pb.buildLucene())); + } else { + shapeMapper.parse(context.createExternalValueContext(pb.buildS4J())); + } context = context.createExternalValueContext(generatedValue); @@ -210,7 +217,7 @@ public class ExternalMapper extends FieldMapper { BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType); BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType); GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); - GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); + BaseGeoShapeFieldMapper shapeMapperUpdate = (BaseGeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType); if (update == this && multiFieldsUpdate == multiFields diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java index e1158f77bd4..6d47e4a784e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java @@ -21,12 +21,13 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.builders.PointBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; +import org.locationtech.jts.geom.Coordinate; import java.util.Arrays; import java.util.Collection; @@ -118,7 +119,8 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { assertThat(response.getHits().getTotalHits().value, equalTo((long) 1)); response = client().prepareSearch("test-idx") - .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", new PointBuilder(-100, 45)).relation(ShapeRelation.WITHIN)) + .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", + new EnvelopeBuilder(new Coordinate(-101, 46), new Coordinate(-99, 44))).relation(ShapeRelation.WITHIN)) .execute().actionGet(); assertThat(response.getHits().getTotalHits().value, equalTo((long) 1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 20e689e9d7e..a5e2d7c31af 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -18,14 +18,9 @@ */ package org.elasticsearch.index.mapper; -import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; -import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -42,7 +37,6 @@ import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.not; public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { @@ -53,10 +47,10 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -64,12 +58,8 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.025d)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoShapeFieldMapper.Defaults.GEOHASH_LEVELS)); - assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), + equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION.value())); } /** @@ -77,11 +67,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { */ public void testOrientationParsing() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "left") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("orientation", "left") + .endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -95,11 +85,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { // explicit right orientation test mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "right") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("orientation", "right") + .endObject().endObject() + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -117,11 +107,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { */ public void testCoerceParsing() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("coerce", "true") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("coerce", "true") + .endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -133,11 +123,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { // explicit false coerce test mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("coerce", "false") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("coerce", "false") + .endObject().endObject() + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -146,6 +136,7 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { coerce = ((GeoShapeFieldMapper)fieldMapper).coerce().value(); assertThat(coerce, equalTo(false)); + assertFieldWarnings("tree"); } @@ -222,304 +213,45 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { assertThat(ignoreMalformed.value(), equalTo(false)); } - public void testGeohashConfiguration() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("tree_levels", "4") - .field("distance_error_pct", "0.1") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.1)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(4)); - } - - public void testQuadtreeConfiguration() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "6") - .field("distance_error_pct", "0.5") - .field("points_only", true) - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(6)); - assertThat(strategy.isPointsOnly(), equalTo(true)); - } - - public void testLevelPrecisionConfiguration() throws IOException { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "6") - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - // 70m is more precise so it wins - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d))); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "26") - .field("precision", "70m") - .endObject().endObject() - .endObject().endObject()); - - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - // distance_error_pct was not specified so we expect the mapper to take the highest precision between "precision" and - // "tree_levels" setting distErrPct to 0 to guarantee desired precision - assertThat(strategy.getDistErrPct(), equalTo(0.0)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - // 70m is less precise so it loses - assertThat(strategy.getGrid().getMaxLevels(), equalTo(26)); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("tree_levels", "6") - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - // 70m is more precise so it wins - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d))); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("tree_levels", GeoUtils.geoHashLevelsForPrecision(70d)+1) - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d)+1)); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", GeoUtils.quadTreeLevelsForPrecision(70d)+1) - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d)+1)); - } - } - - public void testPointsOnlyOption() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("points_only", true) - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.isPointsOnly(), equalTo(true)); - } - - public void testLevelDefaults() throws IOException { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - /* 50m is default */ - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(50d))); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - /* 50m is default */ - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(50d))); + private void assertFieldWarnings(String... fieldNames) { + String[] warnings = new String[fieldNames.length]; + for (int i = 0; i < fieldNames.length; ++i) { + warnings[i] = "Field parameter [" + fieldNames[i] + "] " + + "is deprecated and will be removed in a future version."; } } public void testGeoShapeMapperMerge() throws Exception { String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("shape").field("type", "geo_shape").field("tree", "geohash") - .field("strategy", "recursive") - .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01) - .field("orientation", "ccw") - .endObject().endObject().endObject().endObject()); + .startObject("shape").field("type", "geo_shape") + .field("orientation", "ccw") + .endObject().endObject().endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE); String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("shape").field("type", "geo_shape") - .field("tree", "quadtree") - .field("strategy", "term").field("precision", "1km") - .field("tree_levels", 26).field("distance_error_pct", 26) - .field("orientation", "cw").endObject().endObject().endObject().endObject()); - try { - mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); - assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]")); - assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]")); - assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]")); - } + .startObject("properties").startObject("shape").field("type", "geo_shape") + .field("orientation", "cw").endObject().endObject().endObject().endObject()); + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); // verify nothing changed Mapper fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getDistErrPct(), equalTo(0.01)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); - // correct mapping + // change mapping; orientation stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") - .field("tree_levels", 8).field("distance_error_pct", 0.001) - .field("orientation", "cw").endObject().endObject().endObject().endObject()); + .startObject("properties").startObject("shape").field("type", "geo_shape") + .field("orientation", "cw").endObject().endObject().endObject().endObject()); docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getDistErrPct(), equalTo(0.001)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW)); } @@ -544,112 +276,12 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") - .field("tree", "quadtree") .endObject().endObject() .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":21")); + assertTrue(serialized, serialized.contains("\"orientation\":\"" + BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value() + "\"")); } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":9")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "6") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertFalse(serialized, serialized.contains("\"precision\":")); - assertTrue(serialized, serialized.contains("\"tree_levels\":6")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "6") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); - assertFalse(serialized, serialized.contains("\"tree_levels\":")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "6m") - .field("tree_levels", "5") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":5")); - } - } - - public void testPointsOnlyDefaultsWithTermStrategy() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "10m") - .field("strategy", "term") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); - - GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.0)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(23)); - assertThat(strategy.isPointsOnly(), equalTo(true)); - // term strategy changes the default for points_only, check that we handle it correctly - assertThat(toXContentString(geoShapeFieldMapper, false), not(containsString("points_only"))); - } - - - public void testPointsOnlyFalseWithTermStrategy() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "10m") - .field("strategy", "term") - .field("points_only", false) - .endObject().endObject() - .endObject().endObject()); - - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type1", new CompressedXContent(mapping)) - ); - assertThat(e.getMessage(), containsString("points_only cannot be set to false for term strategy")); } public String toXContentString(GeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java index a1c225f8a06..c10ec5facf8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java @@ -18,69 +18,23 @@ */ package org.elasticsearch.index.mapper; -import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.index.mapper.GeoShapeFieldMapper.GeoShapeFieldType; import org.junit.Before; -import java.io.IOException; - public class GeoShapeFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new GeoShapeFieldMapper.GeoShapeFieldType(); + return new GeoShapeFieldType(); } @Before public void setupProperties() { - addModifier(new Modifier("tree", false) { + addModifier(new FieldTypeTestCase.Modifier("orientation", true) { @Override public void modify(MappedFieldType ft) { - ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTree("quadtree"); + ((GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); } }); - addModifier(new Modifier("strategy", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setStrategyName("term"); - } - }); - addModifier(new Modifier("tree_levels", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTreeLevels(10); - } - }); - addModifier(new Modifier("precision", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setPrecisionInMeters(20); - } - }); - addModifier(new Modifier("distance_error_pct", true) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5); - } - }); - addModifier(new Modifier("orientation", true) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); - } - }); - } - - /** - * Test for {@link GeoShapeFieldType#setStrategyName(String)} that checks that {@link GeoShapeFieldType#pointsOnly()} - * gets set as a side effect when using SpatialStrategy.TERM - */ - public void testSetStrategyName() throws IOException { - GeoShapeFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(); - assertFalse(fieldType.pointsOnly()); - fieldType.setStrategyName(SpatialStrategy.RECURSIVE.getStrategyName()); - assertFalse(fieldType.pointsOnly()); - fieldType.setStrategyName(SpatialStrategy.TERM.getStrategyName()); - assertTrue(fieldType.pointsOnly()); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java new file mode 100644 index 00000000000..11d8c72531d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java @@ -0,0 +1,714 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; +import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; + +public class LegacyGeoShapeFieldMapperTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + + public void testDefaultConfiguration() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("strategy", "recursive") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + assertThat(geoShapeFieldMapper.fieldType().tree(), + equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.TREE)); + assertThat(geoShapeFieldMapper.fieldType().treeLevels(), + equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.QUADTREE_LEVELS)); + assertThat(geoShapeFieldMapper.fieldType().pointsOnly(), + equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.POINTS_ONLY)); + assertThat(geoShapeFieldMapper.fieldType().distanceErrorPct(), + equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.DISTANCE_ERROR_PCT)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), + equalTo(LegacyGeoShapeFieldMapper.Defaults.ORIENTATION.value())); + assertFieldWarnings("strategy"); + } + + /** + * Test that orientation parameter correctly parses + */ + public void testOrientationParsing() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("orientation", "left") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + ShapeBuilder.Orientation orientation = ((LegacyGeoShapeFieldMapper)fieldMapper).fieldType().orientation(); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); + + // explicit right orientation test + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("orientation", "right") + .endObject().endObject() + .endObject().endObject()); + + defaultMapper = createIndex("test2").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + orientation = ((LegacyGeoShapeFieldMapper)fieldMapper).fieldType().orientation(); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); + assertFieldWarnings("tree"); + } + + /** + * Test that coerce parameter correctly parses + */ + public void testCoerceParsing() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("coerce", "true") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + boolean coerce = ((LegacyGeoShapeFieldMapper)fieldMapper).coerce().value(); + assertThat(coerce, equalTo(true)); + + // explicit false coerce test + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("coerce", "false") + .endObject().endObject() + .endObject().endObject()); + + defaultMapper = createIndex("test2").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + coerce = ((LegacyGeoShapeFieldMapper)fieldMapper).coerce().value(); + assertThat(coerce, equalTo(false)); + assertFieldWarnings("tree"); + } + + + /** + * Test that accept_z_value parameter correctly parses + */ + public void testIgnoreZValue() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("strategy", "recursive") + .field(IGNORE_Z_VALUE.getPreferredName(), "true") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + boolean ignoreZValue = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreZValue().value(); + assertThat(ignoreZValue, equalTo(true)); + + // explicit false accept_z_value test + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field(IGNORE_Z_VALUE.getPreferredName(), "false") + .endObject().endObject() + .endObject().endObject()); + + defaultMapper = createIndex("test2").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + ignoreZValue = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreZValue().value(); + assertThat(ignoreZValue, equalTo(false)); + assertFieldWarnings("strategy", "tree"); + } + + /** + * Test that ignore_malformed parameter correctly parses + */ + public void testIgnoreMalformedParsing() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("ignore_malformed", "true") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + Explicit ignoreMalformed = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreMalformed(); + assertThat(ignoreMalformed.value(), equalTo(true)); + + // explicit false ignore_malformed test + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("ignore_malformed", "false") + .endObject().endObject() + .endObject().endObject()); + + defaultMapper = createIndex("test2").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + ignoreMalformed = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreMalformed(); + assertThat(ignoreMalformed.explicit(), equalTo(true)); + assertThat(ignoreMalformed.value(), equalTo(false)); + assertFieldWarnings("tree"); + } + + public void testGeohashConfiguration() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("tree_levels", "4") + .field("distance_error_pct", "0.1") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.1)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(4)); + assertFieldWarnings("tree", "tree_levels", "distance_error_pct"); + } + + public void testQuadtreeConfiguration() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .field("distance_error_pct", "0.5") + .field("points_only", true) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(6)); + assertThat(strategy.isPointsOnly(), equalTo(true)); + assertFieldWarnings("tree", "tree_levels", "distance_error_pct", "points_only"); + } + + private void assertFieldWarnings(String... fieldNames) { + String[] warnings = new String[fieldNames.length]; + for (int i = 0; i < fieldNames.length; ++i) { + warnings[i] = "Field parameter [" + fieldNames[i] + "] " + + "is deprecated and will be removed in a future version."; + } + assertWarnings(warnings); + } + + public void testLevelPrecisionConfiguration() throws IOException { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + // 70m is more precise so it wins + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d))); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "26") + .field("precision", "70m") + .endObject().endObject() + .endObject().endObject()); + + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + // distance_error_pct was not specified so we expect the mapper to take the highest precision between "precision" and + // "tree_levels" setting distErrPct to 0 to guarantee desired precision + assertThat(strategy.getDistErrPct(), equalTo(0.0)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + // 70m is less precise so it loses + assertThat(strategy.getGrid().getMaxLevels(), equalTo(26)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("tree_levels", "6") + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + // 70m is more precise so it wins + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d))); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("tree_levels", GeoUtils.geoHashLevelsForPrecision(70d)+1) + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d)+1)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", GeoUtils.quadTreeLevelsForPrecision(70d)+1) + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d)+1)); + } + assertFieldWarnings("tree", "tree_levels", "precision", "distance_error_pct"); + } + + public void testPointsOnlyOption() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("points_only", true) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.isPointsOnly(), equalTo(true)); + assertFieldWarnings("tree", "points_only"); + } + + public void testLevelDefaults() throws IOException { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + /* 50m is default */ + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(50d))); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + /* 50m is default */ + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(50d))); + } + assertFieldWarnings("tree", "distance_error_pct"); + } + + public void testGeoShapeMapperMerge() throws Exception { + String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("shape").field("type", "geo_shape").field("tree", "geohash") + .field("strategy", "recursive") + .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01) + .field("orientation", "ccw") + .endObject().endObject().endObject().endObject()); + MapperService mapperService = createIndex("test").mapperService(); + DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), + MapperService.MergeReason.MAPPING_UPDATE); + String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("shape").field("type", "geo_shape") + .field("tree", "quadtree") + .field("strategy", "term").field("precision", "1km") + .field("tree_levels", 26).field("distance_error_pct", 26) + .field("orientation", "cw").endObject().endObject().endObject().endObject()); + try { + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]")); + } + + // verify nothing changed + Mapper fieldMapper = docMapper.mappers().getMapper("shape"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getDistErrPct(), equalTo(0.01)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); + + // correct mapping + stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("shape").field("type", "geo_shape") + .field("tree", "geohash") + .field("strategy", "recursive") + .field("precision", "1m") + .field("tree_levels", 8).field("distance_error_pct", 0.001) + .field("orientation", "cw").endObject().endObject().endObject().endObject()); + docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); + + fieldMapper = docMapper.mappers().getMapper("shape"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getDistErrPct(), equalTo(0.001)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW)); + + assertFieldWarnings("tree", "strategy", "precision", "tree_levels", "distance_error_pct"); + } + + public void testEmptyName() throws Exception { + // after 5.x + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject().endObject() + .endObject().endObject()); + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type1", new CompressedXContent(mapping)) + ); + assertThat(e.getMessage(), containsString("name cannot be empty string")); + assertFieldWarnings("tree"); + } + + public void testSerializeDefaults() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":21")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":9")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertFalse(serialized, serialized.contains("\"precision\":")); + assertTrue(serialized, serialized.contains("\"tree_levels\":6")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":10")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6m") + .field("tree_levels", "5") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":5")); + } + assertFieldWarnings("tree", "tree_levels", "precision"); + } + + public void testPointsOnlyDefaultsWithTermStrategy() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "10m") + .field("strategy", "term") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); + + LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.0)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(23)); + assertThat(strategy.isPointsOnly(), equalTo(true)); + // term strategy changes the default for points_only, check that we handle it correctly + assertThat(toXContentString(geoShapeFieldMapper, false), not(containsString("points_only"))); + assertFieldWarnings("tree", "precision", "strategy"); + } + + + public void testPointsOnlyFalseWithTermStrategy() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "10m") + .field("strategy", "term") + .field("points_only", false) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, + () -> parser.parse("type1", new CompressedXContent(mapping)) + ); + assertThat(e.getMessage(), containsString("points_only cannot be set to false for term strategy")); + assertFieldWarnings("tree", "precision", "strategy", "points_only"); + } + + public String toXContentString(LegacyGeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + ToXContent.Params params; + if (includeDefaults) { + params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")); + } else { + params = ToXContent.EMPTY_PARAMS; + } + mapper.doXContentBody(builder, includeDefaults, params); + return Strings.toString(builder.endObject()); + } + + public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException { + return toXContentString(mapper, true); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java new file mode 100644 index 00000000000..2fcbed82e33 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType; +import org.junit.Before; + +import java.io.IOException; + +public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase { + @Override + protected MappedFieldType createDefaultFieldType() { + return new GeoShapeFieldType(); + } + + @Before + public void setupProperties() { + addModifier(new Modifier("tree", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldType)ft).setTree("geohash"); + } + }); + addModifier(new Modifier("strategy", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldType)ft).setStrategy(SpatialStrategy.TERM); + } + }); + addModifier(new Modifier("tree_levels", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldType)ft).setTreeLevels(10); + } + }); + addModifier(new Modifier("precision", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldType)ft).setPrecisionInMeters(20); + } + }); + addModifier(new Modifier("distance_error_pct", true) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5); + } + }); + addModifier(new Modifier("orientation", true) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); + } + }); + } + + /** + * Test for {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#setStrategy(SpatialStrategy)} that checks + * that {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#pointsOnly()} gets set as a side effect when using SpatialStrategy.TERM + */ + public void testSetStrategyName() throws IOException { + GeoShapeFieldType fieldType = new GeoShapeFieldType(); + assertFalse(fieldType.pointsOnly()); + fieldType.setStrategy(SpatialStrategy.RECURSIVE); + assertFalse(fieldType.pointsOnly()); + fieldType.setStrategy(SpatialStrategy.TERM); + assertTrue(fieldType.pointsOnly()); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index bcd2b4ef144..e2e4db1f9b7 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ - package org.elasticsearch.index.query; import org.apache.lucene.search.BooleanQuery; @@ -29,7 +28,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -54,29 +52,41 @@ import static org.hamcrest.Matchers.equalTo; public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase { - private static String indexedShapeId; - private static String indexedShapeType; - private static String indexedShapePath; - private static String indexedShapeIndex; - private static String indexedShapeRouting; - private static ShapeBuilder indexedShapeToReturn; + protected static String indexedShapeId; + protected static String indexedShapeType; + protected static String indexedShapePath; + protected static String indexedShapeIndex; + protected static String indexedShapeRouting; + protected static ShapeBuilder indexedShapeToReturn; + + @Override + protected boolean enableWarningsCheck() { + return false; + } + + protected String fieldName() { + return GEO_SHAPE_FIELD_NAME; + } @Override protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { return doCreateTestQueryBuilder(randomBoolean()); } - private GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { - ShapeType shapeType = ShapeType.randomType(random()); + + protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { + // LatLonShape does not support MultiPoint queries + RandomShapeGenerator.ShapeType shapeType = + randomFrom(ShapeType.POINT, ShapeType.LINESTRING, ShapeType.MULTILINESTRING, ShapeType.POLYGON); ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); GeoShapeQueryBuilder builder; clearShapeFields(); if (indexedShape == false) { - builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder = new GeoShapeQueryBuilder(fieldName(), shape); } else { indexedShapeToReturn = shape; indexedShapeId = randomAlphaOfLengthBetween(3, 20); indexedShapeType = randomAlphaOfLengthBetween(3, 20); - builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeId, indexedShapeType); + builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType); if (randomBoolean()) { indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); builder.indexedShapeIndex(indexedShapeIndex); @@ -91,15 +101,11 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null)); + expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(fieldName(), null)); } public void testNoIndexedShape() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null, "type")); + () -> new GeoShapeQueryBuilder(fieldName(), null, "type")); assertEquals("either shapeBytes or indexedShapeId and indexedShapeType are required", e.getMessage()); } public void testNoIndexedShapeType() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, "id", null)); + () -> new GeoShapeQueryBuilder(fieldName(), "id", null)); assertEquals("indexedShapeType is required if indexedShapeId is specified", e.getMessage()); } public void testNoRelation() throws IOException { ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); - GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(fieldName(), shape); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.relation(null)); assertEquals("No Shape Relation defined", e.getMessage()); } - public void testInvalidRelation() throws IOException { - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); - GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder.strategy(SpatialStrategy.TERM); - expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); - GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); - expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM)); - GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder3.strategy(SpatialStrategy.TERM); - expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); - } - // see #3878 public void testThatXContentSerializationInsideOfArrayWorks() throws Exception { EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(0, 0), new Coordinate(10, 10)); @@ -205,7 +198,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase query.toQuery(createShardContext())); assertEquals("query must be rewritten first", e.getMessage()); QueryBuilder rewrite = rewriteAndFetch(query, createShardContext()); - GeoShapeQueryBuilder geoShapeQueryBuilder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeToReturn); + GeoShapeQueryBuilder geoShapeQueryBuilder = new GeoShapeQueryBuilder(fieldName(), indexedShapeToReturn); geoShapeQueryBuilder.strategy(query.strategy()); geoShapeQueryBuilder.relation(query.relation()); assertEquals(geoShapeQueryBuilder, rewrite); @@ -244,7 +237,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); + GeoShapeQueryBuilder builder; + clearShapeFields(); + if (indexedShape == false) { + builder = new GeoShapeQueryBuilder(fieldName(), shape); + } else { + indexedShapeToReturn = shape; + indexedShapeId = randomAlphaOfLengthBetween(3, 20); + indexedShapeType = randomAlphaOfLengthBetween(3, 20); + builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType); + if (randomBoolean()) { + indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); + builder.indexedShapeIndex(indexedShapeIndex); + } + if (randomBoolean()) { + indexedShapePath = randomAlphaOfLengthBetween(3, 20); + builder.indexedShapePath(indexedShapePath); + } + if (randomBoolean()) { + indexedShapeRouting = randomAlphaOfLengthBetween(3, 20); + builder.indexedShapeRouting(indexedShapeRouting); + } + } + if (randomBoolean()) { + SpatialStrategy strategy = randomFrom(SpatialStrategy.values()); + // ShapeType.MULTILINESTRING + SpatialStrategy.TERM can lead to large queries and will slow down tests, so + // we try to avoid that combination + while (shapeType == ShapeType.MULTILINESTRING && strategy == SpatialStrategy.TERM) { + strategy = randomFrom(SpatialStrategy.values()); + } + builder.strategy(strategy); + if (strategy != SpatialStrategy.TERM) { + builder.relation(randomFrom(ShapeRelation.values())); + } + } + + if (randomBoolean()) { + builder.ignoreUnmapped(randomBoolean()); + } + return builder; + } + + public void testInvalidRelation() throws IOException { + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder.strategy(SpatialStrategy.TERM); + expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); + GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); + expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM)); + GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder3.strategy(SpatialStrategy.TERM); + expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 1067ed62db4..184ee2759c1 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -62,6 +62,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public class MatchQueryBuilderTests extends AbstractQueryTestCase { + @Override protected MatchQueryBuilder doCreateTestQueryBuilder() { String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 70f504516ec..1c34057457a 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1048,6 +1048,12 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); + } + + /** tests querying a random geometry collection with a point */ + public void testPointQuery() throws Exception { + // Create a random geometry collection to index. + GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); + double[] pt = new double[] {GeoTestUtil.nextLongitude(), GeoTestUtil.nextLatitude()}; + PointBuilder pb = new PointBuilder(pt[0], pt[1]); + gcb.shape(pb); + if (randomBoolean()) { + client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape") + .execute().actionGet(); + } else { + client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") + .execute().actionGet(); + } + XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("location"), null).endObject(); + client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + + GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("location", pb); + geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); + SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(geoShapeQueryBuilder).get(); assertSearchResponse(result); assertHitCount(result, 1); } @@ -375,6 +461,28 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { assertThat(response.getHits().getTotalHits().value, greaterThan(0L)); } + public void testExistsQuery() throws Exception { + // Create a random geometry collection. + GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); + logger.info("Created Random GeometryCollection containing {} shapes", gcb.numShapes()); + + if (randomBoolean()) { + client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape") + .execute().actionGet(); + } else { + client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") + .execute().actionGet(); + } + + XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("location"), null).endObject(); + client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); + + ExistsQueryBuilder eqb = QueryBuilders.existsQuery("location"); + SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(eqb).get(); + assertSearchResponse(result); + assertHitCount(result, 1); + } + public void testShapeFilterWithDefinedGeoCollection() throws Exception { createIndex("shapes"); client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") diff --git a/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java new file mode 100644 index 00000000000..574bdd46bba --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -0,0 +1,170 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.geo; + +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.test.ESIntegTestCase; + +import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class LegacyGeoShapeIntegrationIT extends ESIntegTestCase { + + /** + * Test that orientation parameter correctly persists across cluster restart + */ + public void testOrientationPersistence() throws Exception { + String idxName = "orientation"; + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("orientation", "left") + .endObject().endObject() + .endObject().endObject()); + + // create index + assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); + + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("orientation", "right") + .endObject().endObject() + .endObject().endObject()); + + assertAcked(prepareCreate(idxName+"2").addMapping("shape", mapping, XContentType.JSON)); + ensureGreen(idxName, idxName+"2"); + + internalCluster().fullRestart(); + ensureGreen(idxName, idxName+"2"); + + // left orientation test + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); + IndexService indexService = indicesService.indexService(resolveIndex(idxName)); + MappedFieldType fieldType = indexService.mapperService().fullName("location"); + assertThat(fieldType, instanceOf(LegacyGeoShapeFieldMapper.GeoShapeFieldType.class)); + + LegacyGeoShapeFieldMapper.GeoShapeFieldType gsfm = (LegacyGeoShapeFieldMapper.GeoShapeFieldType)fieldType; + ShapeBuilder.Orientation orientation = gsfm.orientation(); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); + + // right orientation test + indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); + indexService = indicesService.indexService(resolveIndex((idxName+"2"))); + fieldType = indexService.mapperService().fullName("location"); + assertThat(fieldType, instanceOf(LegacyGeoShapeFieldMapper.GeoShapeFieldType.class)); + + gsfm = (LegacyGeoShapeFieldMapper.GeoShapeFieldType)fieldType; + orientation = gsfm.orientation(); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); + assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); + } + + /** + * Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document + */ + public void testIgnoreMalformed() throws Exception { + // create index + assertAcked(client().admin().indices().prepareCreate("test") + .addMapping("geometry", "shape", "type=geo_shape,tree=quadtree,ignore_malformed=true").get()); + ensureGreen(); + + // test self crossing ccw poly not crossing dateline + String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") + .startArray("coordinates") + .startArray() + .startArray().value(176.0).value(15.0).endArray() + .startArray().value(-177.0).value(10.0).endArray() + .startArray().value(-177.0).value(-10.0).endArray() + .startArray().value(176.0).value(-15.0).endArray() + .startArray().value(-177.0).value(15.0).endArray() + .startArray().value(172.0).value(0.0).endArray() + .startArray().value(176.0).value(15.0).endArray() + .endArray() + .endArray() + .endObject()); + + indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", + polygonGeoJson)); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } + + /** + * Test that the indexed shape routing can be provided if it is required + */ + public void testIndexShapeRouting() throws Exception { + String mapping = "{\n" + + " \"_routing\": {\n" + + " \"required\": true\n" + + " },\n" + + " \"properties\": {\n" + + " \"shape\": {\n" + + " \"type\": \"geo_shape\",\n" + + " \"tree\" : \"quadtree\"\n" + + " }\n" + + " }\n" + + " }"; + + + // create index + assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); + ensureGreen(); + + String source = "{\n" + + " \"shape\" : {\n" + + " \"type\" : \"bbox\",\n" + + " \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" + + " }\n" + + "}"; + + indexRandom(true, client().prepareIndex("test", "doc", "0").setSource(source, XContentType.JSON).setRouting("ABC")); + + SearchResponse searchResponse = client().prepareSearch("test").setQuery( + geoShapeQuery("shape", "0", "doc").indexedShapeIndex("test").indexedShapeRouting("ABC") + ).get(); + + assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + } + + private String findNodeName(String index) { + ClusterState state = client().admin().cluster().prepareState().get().getState(); + IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); + String nodeId = shard.assignedShards().get(0).currentNodeId(); + return state.getNodes().get(nodeId).getName(); + } +} diff --git a/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 76d18a59f9f..0d964e8eb6f 100644 --- a/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.geo.builders.MultiPointBuilder; import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.search.geo.GeoShapeQueryTests; import org.junit.Assert; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; import org.locationtech.spatial4j.distance.DistanceUtils; @@ -153,6 +154,7 @@ public class RandomShapeGenerator extends RandomGeoGenerator { /** * Creates a random shape useful for randomized testing, NOTE: exercise caution when using this to build random GeometryCollections * as creating a large random number of random shapes can result in massive resource consumption + * see: {@link GeoShapeQueryTests#testQueryRandomGeoCollection()} * * The following options are included * @param nearPoint Create a shape near a provided point diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index 5eef0a249b6..daf29e46b05 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -113,6 +113,7 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; protected static final String GEO_POINT_ALIAS_FIELD_NAME = "mapped_geo_point_alias"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; + protected static final String LEGACY_GEO_SHAPE_FIELD_NAME = "mapped_legacy_geo_shape"; protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, @@ -217,12 +218,28 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { AbstractBuilderTestCase.this, false); return null; }); + if (enableWarningsCheck() == true) { + assertDeprecatedGeoWarnings(); + } } serviceHolder.clientInvocationHandler.delegate = this; serviceHolderWithNoType.clientInvocationHandler.delegate = this; } + protected void assertDeprecatedGeoWarnings() { + String prefix = "Field parameter ["; + String postfix = "] is deprecated and will be removed in a future version."; + String[] deprecationWarnings = new String[] { + prefix + "tree" + postfix, + prefix + "tree_levels" + postfix, + prefix + "precision" + postfix, + prefix + "strategy" + postfix, + prefix + "distance_error_pct" + postfix + }; + assertWarnings(deprecationWarnings); + } + protected static SearchContext getSearchContext(QueryShardContext context) { TestSearchContext testSearchContext = new TestSearchContext(context) { @Override @@ -396,7 +413,8 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point", GEO_POINT_ALIAS_FIELD_NAME, "type=alias,path=" + GEO_POINT_FIELD_NAME, - GEO_SHAPE_FIELD_NAME, "type=geo_shape" + GEO_SHAPE_FIELD_NAME, "type=geo_shape", + LEGACY_GEO_SHAPE_FIELD_NAME, "type=geo_shape,tree=quadtree" ))), MapperService.MergeReason.MAPPING_UPDATE); // also add mappings for two inner field in the object field mapperService.merge("_doc", new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," From 75bfbe92eec7ee69e3cc3bfccaa39fbc78489e00 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 17 Dec 2018 22:34:27 +0100 Subject: [PATCH 20/26] TESTS:Debug Log. IndexStatsIT#testFilterCacheStats --- .../test/java/org/elasticsearch/indices/stats/IndexStatsIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 152429ae435..5fb67a64d9d 100644 --- a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -62,6 +62,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.util.ArrayList; @@ -1007,6 +1008,7 @@ public class IndexStatsIT extends ESIntegTestCase { assertEquals(total, shardTotal); } + @TestLogging("_root:DEBUG") // this fails at a very low rate on CI: https://github.com/elastic/elasticsearch/issues/32506 public void testFilterCacheStats() throws Exception { Settings settings = Settings.builder().put(indexSettings()).put("number_of_replicas", 0).build(); assertAcked(prepareCreate("index").setSettings(settings).get()); From 384757deffb1bd88781c2d5c3459ca25f15792a3 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Mon, 17 Dec 2018 16:25:11 -0600 Subject: [PATCH 21/26] ingest: support default pipelines + bulk upserts (#36618) This commit adds support to enable bulk upserts to use an index's default pipeline. Bulk upsert, doc_as_upsert, and script_as_upsert are all supported. However, bulk script_as_upsert has slightly surprising behavior since the pipeline is executed _before_ the script is evaluated. This means that the pipeline only has access the data found in the upsert field of the script_as_upsert. The non-bulk script_as_upsert (existing behavior) runs the pipeline _after_ the script is executed. This commit does _not_ attempt to consolidate the bulk and non-bulk behavior for script_as_upsert. This commit also adds additional testing for the non-bulk behavior, which remains unchanged with this commit. fixes #36219 --- .../test/ingest/200_default_pipeline.yml | 101 ++++++++++++++++-- .../action/bulk/TransportBulkAction.java | 30 ++++-- .../elasticsearch/ingest/IngestService.java | 10 +- .../bulk/TransportBulkActionIngestTests.java | 54 ++++++++++ .../action/bulk/TransportBulkActionTests.java | 21 ++++ 5 files changed, 196 insertions(+), 20 deletions(-) diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml index 4695991f3c3..d4b39c5e99a 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/200_default_pipeline.yml @@ -23,7 +23,7 @@ teardown: ] } - match: { acknowledged: true } - +# default pipeline via index - do: indices.create: index: test @@ -48,7 +48,7 @@ teardown: id: 1 - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } - +# default pipeline via alias - do: index: index: test_alias @@ -63,12 +63,101 @@ teardown: id: 2 - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } +# default pipeline via upsert + - do: + update: + index: test + type: test + id: 3 + body: + script: + source: "ctx._source.ran_script = true" + lang: "painless" + upsert: { "bytes_source_field":"1kb" } + - do: + get: + index: test + type: test + id: 3 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } +# default pipeline via scripted upsert + - do: + update: + index: test + type: test + id: 4 + body: + script: + source: "ctx._source.bytes_source_field = '1kb'" + lang: "painless" + upsert : {} + scripted_upsert: true + - do: + get: + index: test + type: test + id: 4 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } +# default pipeline via doc_as_upsert + - do: + update: + index: test + type: test + id: 5 + body: + doc: { "bytes_source_field":"1kb" } + doc_as_upsert: true + - do: + get: + index: test + type: test + id: 5 + - match: { _source.bytes_source_field: "1kb" } + - match: { _source.bytes_target_field: 1024 } +# default pipeline via bulk upsert +# note - bulk scripted upsert's execute the pipeline before the script, so any data referenced by the pipeline +# needs to be in the upsert, not the script + - do: + bulk: + refresh: true + body: | + {"update":{"_id":"6","_index":"test","_type":"test"}} + {"script":"ctx._source.ran_script = true","upsert":{"bytes_source_field":"1kb"}} + {"update":{"_id":"7","_index":"test","_type":"test"}} + {"doc":{"bytes_source_field":"2kb"}, "doc_as_upsert":true} + {"update":{"_id":"8","_index":"test","_type":"test"}} + {"script": "ctx._source.ran_script = true","upsert":{"bytes_source_field":"3kb"}, "scripted_upsert" : true} + - do: + mget: + body: + docs: + - { _index: "test", _type: "_doc", _id: "6" } + - { _index: "test", _type: "_doc", _id: "7" } + - { _index: "test", _type: "_doc", _id: "8" } + - match: { docs.0._index: "test" } + - match: { docs.0._id: "6" } + - match: { docs.0._source.bytes_source_field: "1kb" } + - match: { docs.0._source.bytes_target_field: 1024 } + - is_false: docs.0._source.ran_script + - match: { docs.1._index: "test" } + - match: { docs.1._id: "7" } + - match: { docs.1._source.bytes_source_field: "2kb" } + - match: { docs.1._source.bytes_target_field: 2048 } + - match: { docs.2._index: "test" } + - match: { docs.2._id: "8" } + - match: { docs.2._source.bytes_source_field: "3kb" } + - match: { docs.2._source.bytes_target_field: 3072 } + - match: { docs.2._source.ran_script: true } + +# explicit no default pipeline - do: index: index: test type: test - id: 3 + id: 9 pipeline: "_none" body: {bytes_source_field: "1kb"} @@ -76,15 +165,15 @@ teardown: get: index: test type: test - id: 3 + id: 9 - match: { _source.bytes_source_field: "1kb" } - is_false: _source.bytes_target_field - +# bad request - do: catch: bad_request index: index: test type: test - id: 4 + id: 10 pipeline: "" body: {bytes_source_field: "1kb"} diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index fa294a1bb2b..a89d162979f 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -127,6 +127,24 @@ public class TransportBulkAction extends HandledTransportAction listener) { final long startTime = relativeTime(); @@ -207,12 +225,12 @@ public class TransportBulkAction extends HandledTransportAction indicesMetaData = metaData.indices(); for (DocWriteRequest actionRequest : bulkRequest.requests) { - if (actionRequest instanceof IndexRequest) { - IndexRequest indexRequest = (IndexRequest) actionRequest; + IndexRequest indexRequest = getIndexWriteRequest(actionRequest); + if(indexRequest != null){ String pipeline = indexRequest.getPipeline(); if (pipeline == null) { - IndexMetaData indexMetaData = indicesMetaData.get(indexRequest.index()); - if (indexMetaData == null) { + IndexMetaData indexMetaData = indicesMetaData.get(actionRequest.index()); + if (indexMetaData == null && indexRequest.index() != null) { //check the alias AliasOrIndex indexOrAlias = metaData.getAliasAndIndexLookup().get(indexRequest.index()); if (indexOrAlias != null && indexOrAlias.isAlias()) { @@ -626,7 +644,7 @@ public class TransportBulkAction extends HandledTransportAction actionRequest : actionRequests) { - IndexRequest indexRequest = null; - if (actionRequest instanceof IndexRequest) { - indexRequest = (IndexRequest) actionRequest; - } else if (actionRequest instanceof UpdateRequest) { - UpdateRequest updateRequest = (UpdateRequest) actionRequest; - indexRequest = updateRequest.docAsUpsert() ? updateRequest.doc() : updateRequest.upsertRequest(); - } + IndexRequest indexRequest = TransportBulkAction.getIndexWriteRequest(actionRequest); if (indexRequest == null) { continue; } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index f25f8844153..219aee9ebe2 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateApplier; @@ -408,6 +409,57 @@ public class TransportBulkActionIngestTests extends ESTestCase { validateDefaultPipeline(new IndexRequest(WITH_DEFAULT_PIPELINE_ALIAS, "type", "id")); } + public void testUseDefaultPipelineWithBulkUpsert() throws Exception { + Exception exception = new Exception("fake exception"); + BulkRequest bulkRequest = new BulkRequest(); + IndexRequest indexRequest1 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id1").source(Collections.emptyMap()); + IndexRequest indexRequest2 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id2").source(Collections.emptyMap()); + IndexRequest indexRequest3 = new IndexRequest(WITH_DEFAULT_PIPELINE, "type", "id3").source(Collections.emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id1").upsert(indexRequest1).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").doc(indexRequest2).docAsUpsert(true); + // this test only covers the mechanics that scripted bulk upserts will execute a default pipeline. However, in practice scripted + // bulk upserts with a default pipeline are a bit surprising since the script executes AFTER the pipeline. + UpdateRequest scriptedUpsert = new UpdateRequest(WITH_DEFAULT_PIPELINE, "type", "id2").upsert(indexRequest3).script(mockScript("1")) + .scriptedUpsert(true); + bulkRequest.add(upsertRequest).add(docAsUpsertRequest).add(scriptedUpsert); + + AtomicBoolean responseCalled = new AtomicBoolean(false); + AtomicBoolean failureCalled = new AtomicBoolean(false); + assertNull(indexRequest1.getPipeline()); + assertNull(indexRequest2.getPipeline()); + assertNull(indexRequest3.getPipeline()); + action.execute(null, bulkRequest, ActionListener.wrap( + response -> { + BulkItemResponse itemResponse = response.iterator().next(); + assertThat(itemResponse.getFailure().getMessage(), containsString("fake exception")); + responseCalled.set(true); + }, + e -> { + assertThat(e, sameInstance(exception)); + failureCalled.set(true); + })); + + // check failure works, and passes through to the listener + assertFalse(action.isExecuted); // haven't executed yet + assertFalse(responseCalled.get()); + assertFalse(failureCalled.get()); + verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + assertEquals(indexRequest1.getPipeline(), "default_pipeline"); + assertEquals(indexRequest2.getPipeline(), "default_pipeline"); + assertEquals(indexRequest3.getPipeline(), "default_pipeline"); + completionHandler.getValue().accept(exception); + assertTrue(failureCalled.get()); + + // now check success of the transport bulk action + indexRequest1.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing + indexRequest2.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing + indexRequest3.setPipeline(IngestService.NOOP_PIPELINE_NAME); // this is done by the real pipeline execution service when processing + completionHandler.getValue().accept(null); + assertTrue(action.isExecuted); + assertFalse(responseCalled.get()); // listener would only be called by real index action, not our mocked one + verifyZeroInteractions(transportService); + } + public void testCreateIndexBeforeRunPipeline() throws Exception { Exception exception = new Exception("fake exception"); IndexRequest indexRequest = new IndexRequest("missing_index", "type", "id"); @@ -445,6 +497,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { indexRequest.source(Collections.emptyMap()); AtomicBoolean responseCalled = new AtomicBoolean(false); AtomicBoolean failureCalled = new AtomicBoolean(false); + assertNull(indexRequest.getPipeline()); singleItemBulkWriteAction.execute(null, indexRequest, ActionListener.wrap( response -> { responseCalled.set(true); @@ -459,6 +512,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { assertFalse(responseCalled.get()); assertFalse(failureCalled.get()); verify(ingestService).executeBulkRequest(bulkDocsItr.capture(), failureHandler.capture(), completionHandler.capture(), any()); + assertEquals(indexRequest.getPipeline(), "default_pipeline"); completionHandler.getValue().accept(exception); assertTrue(failureCalled.get()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index a058cf47741..162ef56553d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -23,8 +23,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.TransportBulkActionTookTests.Resolver; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.AutoCreateIndex; +import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -132,4 +134,23 @@ public class TransportBulkActionTests extends ESTestCase { throw new AssertionError(exception); })); } + + public void testGetIndexWriteRequest() throws Exception { + IndexRequest indexRequest = new IndexRequest("index", "type", "id1").source(Collections.emptyMap()); + UpdateRequest upsertRequest = new UpdateRequest("index", "type", "id1").upsert(indexRequest).script(mockScript("1")); + UpdateRequest docAsUpsertRequest = new UpdateRequest("index", "type", "id2").doc(indexRequest).docAsUpsert(true); + UpdateRequest scriptedUpsert = new UpdateRequest("index", "type", "id2").upsert(indexRequest).script(mockScript("1")) + .scriptedUpsert(true); + + assertEquals(TransportBulkAction.getIndexWriteRequest(indexRequest), indexRequest); + assertEquals(TransportBulkAction.getIndexWriteRequest(upsertRequest), indexRequest); + assertEquals(TransportBulkAction.getIndexWriteRequest(docAsUpsertRequest), indexRequest); + assertEquals(TransportBulkAction.getIndexWriteRequest(scriptedUpsert), indexRequest); + + DeleteRequest deleteRequest = new DeleteRequest("index", "id"); + assertNull(TransportBulkAction.getIndexWriteRequest(deleteRequest)); + + UpdateRequest badUpsertRequest = new UpdateRequest("index", "type", "id1"); + assertNull(TransportBulkAction.getIndexWriteRequest(badUpsertRequest)); + } } From f0f2b261595e2f5ea48736eac7a4e4121dd20b2c Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 17 Dec 2018 17:42:58 -0500 Subject: [PATCH 22/26] Fix duplicate phrase in shrink/split error message (#36734) This commit removes a duplicate "must be a" from the shrink/split error messages. --- .../org/elasticsearch/cluster/metadata/IndexMetaData.java | 4 ++-- .../elasticsearch/cluster/metadata/IndexMetaDataTests.java | 4 ++-- .../cluster/metadata/MetaDataCreateIndexServiceTests.java | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index f1dd843d798..5d23971dddb 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -1531,14 +1531,14 @@ public class IndexMetaData implements Diffable, ToXContentFragmen if (sourceNumberOfShards < targetNumberOfShards) { // split factor = targetNumberOfShards / sourceNumberOfShards; if (factor * sourceNumberOfShards != targetNumberOfShards || factor <= 1) { - throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " + + throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " + "factor of [" + targetNumberOfShards + "]"); } } else if (sourceNumberOfShards > targetNumberOfShards) { // shrink factor = sourceNumberOfShards / targetNumberOfShards; if (factor * targetNumberOfShards != sourceNumberOfShards || factor <= 1) { - throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a must be a " + + throw new IllegalArgumentException("the number of source shards [" + sourceNumberOfShards + "] must be a " + "multiple of [" + targetNumberOfShards + "]"); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 393f7f6b1d4..1fdea596afb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -227,7 +227,7 @@ public class IndexMetaDataTests extends ESTestCase { assertEquals("the number of target shards (0) must be greater than the shard id: 0", expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 0)).getMessage()); - assertEquals("the number of source shards [2] must be a must be a factor of [3]", + assertEquals("the number of source shards [2] must be a factor of [3]", expectThrows(IllegalArgumentException.class, () -> IndexMetaData.selectSplitShard(0, metaData, 3)).getMessage()); assertEquals("the number of routing shards [4] must be a multiple of the target shards [8]", @@ -285,6 +285,6 @@ public class IndexMetaDataTests extends ESTestCase { Settings notAFactorySettings = Settings.builder().put("index.number_of_shards", 2).put("index.number_of_routing_shards", 3).build(); iae = expectThrows(IllegalArgumentException.class, () -> IndexMetaData.INDEX_NUMBER_OF_ROUTING_SHARDS_SETTING.get(notAFactorySettings)); - assertEquals("the number of source shards [2] must be a must be a factor of [3]", iae.getMessage()); + assertEquals("the number of source shards [2] must be a factor of [3]", iae.getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 6cbd83e5b24..ec89e085f07 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -154,7 +154,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { MetaDataCreateIndexService.validateShrinkIndex(state, "source", Collections.emptySet(), "target", targetSettings) ).getMessage()); - assertEquals("the number of source shards [8] must be a must be a multiple of [3]", + assertEquals("the number of source shards [8] must be a multiple of [3]", expectThrows(IllegalArgumentException.class, () -> MetaDataCreateIndexService.validateShrinkIndex(createClusterState("source", 8, randomIntBetween(0, 10), Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target", @@ -221,7 +221,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { ).getMessage()); - assertEquals("the number of source shards [3] must be a must be a factor of [4]", + assertEquals("the number of source shards [3] must be a factor of [4]", expectThrows(IllegalArgumentException.class, () -> MetaDataCreateIndexService.validateSplitIndex(createClusterState("source", 3, randomIntBetween(0, 10), Settings.builder().put("index.blocks.write", true).build()), "source", Collections.emptySet(), "target", From 2f5300e3a626c3dbbebe5122d0e724cc1aa8f551 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 18 Dec 2018 00:57:42 +0100 Subject: [PATCH 23/26] Deprecate types in get_source and exist_source (#36426) This change adds a new untyped endpoint `{index}/_source/{id}` for both the GET and the HEAD methods to get the source of a document or check for its existance. It also adds deprecation warnings to RestGetSourceAction that emit a warning when the old deprecated "type" parameter is still used. Also updating documentation and tests where appropriate. Relates to #35190 --- .../client/RequestConverters.java | 10 +++- .../client/RequestConvertersTests.java | 54 +++++++++++++++++++ .../documentation/CRUDDocumentationIT.java | 1 - docs/reference/docs/get.asciidoc | 14 ++--- .../rest/Netty4HeadBodyIsEmptyIT.java | 6 +-- .../rest-api-spec/api/exists_source.json | 8 +-- .../rest-api-spec/api/get_source.json | 8 +-- .../test/get_source/10_basic.yml | 8 +-- .../test/get_source/11_basic_with_types.yml | 17 ++++++ .../test/get_source/15_default_values.yml | 7 ++- .../16_default_values_with_types.yml | 16 ++++++ .../test/get_source/40_routing.yml | 7 ++- .../test/get_source/41_routing_with_types.yml | 42 +++++++++++++++ .../test/get_source/60_realtime_refresh.yml | 6 +-- .../61_realtime_refresh_with_types.yml | 49 +++++++++++++++++ .../test/get_source/70_source_filtering.yml | 11 ++-- .../71_source_filtering_with_types.yml | 27 ++++++++++ .../test/get_source/80_missing.yml | 12 ++++- .../test/get_source/81_missing_with_types.yml | 19 +++++++ .../test/get_source/85_source_missing.yml | 7 ++- .../86_source_missing_with_types.yml | 38 +++++++++++++ .../elasticsearch/action/get/GetRequest.java | 4 +- .../action/document/RestGetSourceAction.java | 16 +++++- .../action/get/GetRequestTests.java | 1 + .../document/RestGetSourceActionTests.java | 49 +++++++++++++++-- 25 files changed, 393 insertions(+), 44 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/11_basic_with_types.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/16_default_values_with_types.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/41_routing_with_types.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/61_realtime_refresh_with_types.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/71_source_filtering_with_types.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/81_missing_with_types.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/get_source/86_source_missing_with_types.yml diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 6791b5f8259..c7a54a9ac32 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -268,8 +268,14 @@ final class RequestConverters { } static Request sourceExists(GetRequest getRequest) { - Request request = new Request(HttpHead.METHOD_NAME, endpoint(getRequest.index(), getRequest.type(), getRequest.id(), "_source")); - + String optionalType = getRequest.type(); + String endpoint; + if (optionalType.equals(MapperService.SINGLE_MAPPING_NAME)) { + endpoint = endpoint(getRequest.index(), "_source", getRequest.id()); + } else { + endpoint = endpoint(getRequest.index(), optionalType, getRequest.id(), "_source"); + } + Request request = new Request(HttpHead.METHOD_NAME, endpoint); Params parameters = new Params(request); parameters.withPreference(getRequest.preference()); parameters.withRouting(getRequest.routing()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index b50d2c1265e..fa0f1c5708c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.rankeval.PrecisionAtK; @@ -115,6 +116,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.StringJoiner; +import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; @@ -156,6 +158,58 @@ public class RequestConvertersTests extends ESTestCase { getAndExistsWithTypeTest(RequestConverters::get, HttpGet.METHOD_NAME); } + public void testSourceExists() throws IOException { + doTestSourceExists((index, id) -> new GetRequest(index, id)); + } + + public void testSourceExistsWithType() throws IOException { + String type = frequently() ? randomAlphaOfLengthBetween(3, 10) : MapperService.SINGLE_MAPPING_NAME; + doTestSourceExists((index, id) -> new GetRequest(index, type, id)); + } + + private static void doTestSourceExists(BiFunction requestFunction) throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + final GetRequest getRequest = requestFunction.apply(index, id); + + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + String preference = randomAlphaOfLengthBetween(3, 10); + getRequest.preference(preference); + expectedParams.put("preference", preference); + } + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + getRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + boolean realtime = randomBoolean(); + getRequest.realtime(realtime); + if (realtime == false) { + expectedParams.put("realtime", "false"); + } + } + if (randomBoolean()) { + boolean refresh = randomBoolean(); + getRequest.refresh(refresh); + if (refresh) { + expectedParams.put("refresh", "true"); + } + } + Request request = RequestConverters.sourceExists(getRequest); + assertEquals(HttpHead.METHOD_NAME, request.getMethod()); + String type = getRequest.type(); + if (type.equals(MapperService.SINGLE_MAPPING_NAME)) { + assertEquals("/" + index + "/_source/" + id, request.getEndpoint()); + } else { + assertEquals("/" + index + "/" + type + "/" + id + "/_source", request.getEndpoint()); + } + + assertEquals(expectedParams, request.getParameters()); + assertNull(request.getEntity()); + } + public void testMultiGet() throws IOException { Map expectedParams = new HashMap<>(); MultiGetRequest multiGetRequest = new MultiGetRequest(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index a9430b67aef..5279c19a415 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1265,7 +1265,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase { assertEquals(3, getResponse.getSourceAsMap().size()); //tag::get-response String index = getResponse.getIndex(); - String type = getResponse.getType(); String id = getResponse.getId(); if (getResponse.isExists()) { long version = getResponse.getVersion(); diff --git a/docs/reference/docs/get.asciidoc b/docs/reference/docs/get.asciidoc index 3ab5fa11fa1..5271b976f96 100644 --- a/docs/reference/docs/get.asciidoc +++ b/docs/reference/docs/get.asciidoc @@ -1,9 +1,9 @@ [[docs-get]] == Get API -The get API allows to get a typed JSON document from the index based on +The get API allows to get a JSON document from the index based on its id. The following example gets a JSON document from an index called -twitter, under a type called `_doc`, with id valued 0: +twitter with id valued 0: [source,js] -------------------------------------------------- @@ -34,7 +34,7 @@ The result of the above get operation is: -------------------------------------------------- // TESTRESPONSE[s/"_seq_no" : \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/] -The above result includes the `_index`, `_type`, `_id` and `_version` +The above result includes the `_index`, `_id` and `_version` of the document we wish to retrieve, including the actual `_source` of the document if it could be found (as indicated by the `found` field in the response). @@ -223,13 +223,13 @@ will fail. [[_source]] === Getting the +_source+ directly -Use the `/{index}/{type}/{id}/_source` endpoint to get +Use the `/{index}/_source/{id}` endpoint to get just the `_source` field of the document, without any additional content around it. For example: [source,js] -------------------------------------------------- -GET twitter/_doc/1/_source +GET twitter/_source/1 -------------------------------------------------- // CONSOLE // TEST[continued] @@ -238,7 +238,7 @@ You can also use the same source filtering parameters to control which parts of [source,js] -------------------------------------------------- -GET twitter/_doc/1/_source?_source_includes=*.id&_source_excludes=entities' +GET twitter/_source/1/?_source_includes=*.id&_source_excludes=entities' -------------------------------------------------- // CONSOLE // TEST[continued] @@ -248,7 +248,7 @@ An existing document will not have a _source if it is disabled in the < * The operation requires the {@link #index()}, {@link #type(String)} and {@link #id(String)} @@ -84,7 +84,6 @@ public class GetRequest extends SingleShardRequest implements Realti * @param index The index to get the document from * @param type The type of the document * @param id The id of the document - * * @deprecated Types are in the process of being removed, use {@link GetRequest(String, String)} instead. */ @Deprecated @@ -127,7 +126,6 @@ public class GetRequest extends SingleShardRequest implements Realti /** * Sets the type of the document to fetch. - * * @deprecated Types are in the process of being removed. */ @Deprecated diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java index c48529d420c..af376bf7c3c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetSourceAction.java @@ -19,12 +19,14 @@ package org.elasticsearch.rest.action.document; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -49,8 +51,14 @@ import static org.elasticsearch.rest.RestStatus.OK; */ public class RestGetSourceAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetSourceAction.class)); + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in get_source and exist_source" + + "requests is deprecated."; + public RestGetSourceAction(final Settings settings, final RestController controller) { super(settings); + controller.registerHandler(GET, "/{index}/_source/{id}", this); + controller.registerHandler(HEAD, "/{index}/_source/{id}", this); controller.registerHandler(GET, "/{index}/{type}/{id}/_source", this); controller.registerHandler(HEAD, "/{index}/{type}/{id}/_source", this); } @@ -62,7 +70,13 @@ public class RestGetSourceAction extends BaseRestHandler { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final GetRequest getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); + final GetRequest getRequest; + if (request.hasParam("type")) { + deprecationLogger.deprecatedAndMaybeLog("get_source_with_types", TYPES_DEPRECATION_MESSAGE); + getRequest = new GetRequest(request.param("index"), request.param("type"), request.param("id")); + } else { + getRequest = new GetRequest(request.param("index"), request.param("id")); + } getRequest.refresh(request.paramAsBoolean("refresh", getRequest.refresh())); getRequest.routing(request.param("routing")); getRequest.preference(request.param("preference")); diff --git a/server/src/test/java/org/elasticsearch/action/get/GetRequestTests.java b/server/src/test/java/org/elasticsearch/action/get/GetRequestTests.java index fc2162e8662..499932ccdf0 100644 --- a/server/src/test/java/org/elasticsearch/action/get/GetRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/GetRequestTests.java @@ -40,6 +40,7 @@ public class GetRequestTests extends ESTestCase { final ActionRequestValidationException validate = request.validate(); assertThat(validate, not(nullValue())); + assertEquals(2, validate.validationErrors().size()); assertThat(validate.validationErrors(), hasItems("type is missing", "id is missing")); } } diff --git a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java index e8f573dc57d..f012c1393c9 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/document/RestGetSourceActionTests.java @@ -23,26 +23,38 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener; import org.elasticsearch.test.rest.FakeRestChannel; import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; import org.junit.AfterClass; +import org.junit.Before; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import static java.util.Collections.emptyMap; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.elasticsearch.rest.RestStatus.OK; -import static org.elasticsearch.rest.action.document.RestGetSourceAction.RestGetSourceResponseListener; import static org.hamcrest.Matchers.equalTo; -public class RestGetSourceActionTests extends ESTestCase { +public class RestGetSourceActionTests extends RestActionTestCase { private static RestRequest request = new FakeRestRequest(); private static FakeRestChannel channel = new FakeRestChannel(request, true, 0); private static RestGetSourceResponseListener listener = new RestGetSourceResponseListener(channel, request); + @Before + public void setUpAction() { + new RestGetSourceAction(Settings.EMPTY, controller()); + } + @AfterClass public static void cleanupReferences() { request = null; @@ -50,6 +62,37 @@ public class RestGetSourceActionTests extends ESTestCase { listener = null; } + /** + * test deprecation is logged if type is used in path + */ + public void testTypeInPath() { + for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(method) + .withPath("/some_index/some_type/id/_source") + .build(); + dispatchRequest(request); + assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); + } + } + + /** + * test deprecation is logged if type is used as parameter + */ + public void testTypeParameter() { + Map params = new HashMap<>(); + params.put("type", "some_type"); + for (Method method : Arrays.asList(Method.GET, Method.HEAD)) { + RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(method) + .withPath("/some_index/_source/id") + .withParams(params) + .build(); + dispatchRequest(request); + assertWarnings(RestGetSourceAction.TYPES_DEPRECATION_MESSAGE); + } + } + public void testRestGetSourceAction() throws Exception { final BytesReference source = new BytesArray("{\"foo\": \"bar\"}"); final GetResponse response = From 96d279ed83e25f427db86b831333ca6772202a91 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Mon, 17 Dec 2018 20:09:46 -0600 Subject: [PATCH 24/26] Revert "[Geo] Integrate Lucene's LatLonShape (BKD Backed GeoShapes) as default `geo_shape` indexing approach (#35320)" This reverts commit 5bc7822562a6eefa4a64743233160cdc9f431adf. --- .../mapping/types/geo-shape.asciidoc | 184 ++--- .../migration/migrate_7_0/mappings.asciidoc | 16 - .../query-dsl/geo-shape-query.asciidoc | 5 +- .../common/geo/ShapeRelation.java | 12 - .../builders/GeometryCollectionBuilder.java | 3 + .../common/geo/parsers/GeoJsonParser.java | 24 +- .../common/geo/parsers/GeoWKTParser.java | 13 +- .../common/geo/parsers/ShapeParser.java | 4 +- .../index/mapper/BaseGeoShapeFieldMapper.java | 336 --------- .../index/mapper/GeoShapeFieldMapper.java | 600 +++++++++++++-- .../mapper/LegacyGeoShapeFieldMapper.java | 596 --------------- .../index/query/GeoShapeQueryBuilder.java | 117 +-- .../elasticsearch/indices/IndicesModule.java | 8 +- .../common/geo/GeoJsonShapeParserTests.java | 8 +- .../common/geo/GeoWKTShapeParserTests.java | 19 +- .../index/mapper/ExternalMapper.java | 21 +- .../ExternalValuesMapperIntegrationIT.java | 6 +- .../mapper/GeoShapeFieldMapperTests.java | 452 +++++++++-- .../index/mapper/GeoShapeFieldTypeTests.java | 52 +- .../LegacyGeoShapeFieldMapperTests.java | 714 ------------------ .../mapper/LegacyGeoShapeFieldTypeTests.java | 86 --- .../query/GeoShapeQueryBuilderTests.java | 75 +- .../query/LegacyGeoShapeFieldQueryTests.java | 94 --- .../index/query/MatchQueryBuilderTests.java | 1 - .../query/QueryStringQueryBuilderTests.java | 6 - .../elasticsearch/search/geo/GeoFilterIT.java | 1 - .../search/geo/GeoShapeIntegrationIT.java | 25 +- .../search/geo/GeoShapeQueryTests.java | 186 +---- .../geo/LegacyGeoShapeIntegrationIT.java | 170 ----- .../test/geo/RandomShapeGenerator.java | 2 - .../test/AbstractBuilderTestCase.java | 20 +- 31 files changed, 1227 insertions(+), 2629 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java delete mode 100644 server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java delete mode 100644 server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java delete mode 100644 server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java delete mode 100644 server/src/test/java/org/elasticsearch/index/query/LegacyGeoShapeFieldQueryTests.java delete mode 100644 server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 8efb184afa6..2f51465d110 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -21,59 +21,48 @@ type. |======================================================================= |Option |Description| Default -|`tree |deprecated[6.6, PrefixTrees no longer used] Name of the PrefixTree -implementation to be used: `geohash` for GeohashPrefixTree and `quadtree` -for QuadPrefixTree. Note: This parameter is only relevant for `term` and -`recursive` strategies. -| `quadtree` +|`tree` |Name of the PrefixTree implementation to be used: `geohash` for +GeohashPrefixTree and `quadtree` for QuadPrefixTree. +| `geohash` -|`precision` |deprecated[6.6, PrefixTrees no longer used] This parameter may -be used instead of `tree_levels` to set an appropriate value for the -`tree_levels` parameter. The value specifies the desired precision and -Elasticsearch will calculate the best tree_levels value to honor this -precision. The value should be a number followed by an optional distance -unit. Valid distance units include: `in`, `inch`, `yd`, `yard`, `mi`, -`miles`, `km`, `kilometers`, `m`,`meters`, `cm`,`centimeters`, `mm`, -`millimeters`. Note: This parameter is only relevant for `term` and -`recursive` strategies. +|`precision` |This parameter may be used instead of `tree_levels` to set +an appropriate value for the `tree_levels` parameter. The value +specifies the desired precision and Elasticsearch will calculate the +best tree_levels value to honor this precision. The value should be a +number followed by an optional distance unit. Valid distance units +include: `in`, `inch`, `yd`, `yard`, `mi`, `miles`, `km`, `kilometers`, +`m`,`meters`, `cm`,`centimeters`, `mm`, `millimeters`. | `50m` -|`tree_levels` |deprecated[6.6, PrefixTrees no longer used] Maximum number -of layers to be used by the PrefixTree. This can be used to control the -precision of shape representations andtherefore how many terms are -indexed. Defaults to the default value of the chosen PrefixTree -implementation. Since this parameter requires a certain level of -understanding of the underlying implementation, users may use the -`precision` parameter instead. However, Elasticsearch only uses the -tree_levels parameter internally and this is what is returned via the -mapping API even if you use the precision parameter. Note: This parameter -is only relevant for `term` and `recursive` strategies. +|`tree_levels` |Maximum number of layers to be used by the PrefixTree. +This can be used to control the precision of shape representations and +therefore how many terms are indexed. Defaults to the default value of +the chosen PrefixTree implementation. Since this parameter requires a +certain level of understanding of the underlying implementation, users +may use the `precision` parameter instead. However, Elasticsearch only +uses the tree_levels parameter internally and this is what is returned +via the mapping API even if you use the precision parameter. | various -|`strategy` |deprecated[6.6, PrefixTrees no longer used] The strategy -parameter defines the approach for how to represent shapes at indexing -and search time. It also influences the capabilities available so it -is recommended to let Elasticsearch set this parameter automatically. -There are two strategies available: `recursive`, and `term`. -Recursive and Term strategies are deprecated and will be removed in a -future version. While they are still available, the Term strategy -supports point types only (the `points_only` parameter will be -automatically set to true) while Recursive strategy supports all -shape types. (IMPORTANT: see <> for more -detailed information about these strategies) +|`strategy` |The strategy parameter defines the approach for how to +represent shapes at indexing and search time. It also influences the +capabilities available so it is recommended to let Elasticsearch set +this parameter automatically. There are two strategies available: +`recursive` and `term`. Term strategy supports point types only (the +`points_only` parameter will be automatically set to true) while +Recursive strategy supports all shape types. (IMPORTANT: see +<> for more detailed information) | `recursive` -|`distance_error_pct` |deprecated[6.6, PrefixTrees no longer used] Used as a -hint to the PrefixTree about how precise it should be. Defaults to 0.025 (2.5%) -with 0.5 as the maximum supported value. PERFORMANCE NOTE: This value will -default to 0 if a `precision` or `tree_level` definition is explicitly defined. -This guarantees spatial precision at the level defined in the mapping. This can -lead to significant memory usage for high resolution shapes with low error -(e.g., large shapes at 1m with < 0.001 error). To improve indexing performance -(at the cost of query accuracy) explicitly define `tree_level` or `precision` -along with a reasonable `distance_error_pct`, noting that large shapes will have -greater false positives. Note: This parameter is only relevant for `term` and -`recursive` strategies. +|`distance_error_pct` |Used as a hint to the PrefixTree about how +precise it should be. Defaults to 0.025 (2.5%) with 0.5 as the maximum +supported value. PERFORMANCE NOTE: This value will default to 0 if a `precision` or +`tree_level` definition is explicitly defined. This guarantees spatial precision +at the level defined in the mapping. This can lead to significant memory usage +for high resolution shapes with low error (e.g., large shapes at 1m with < 0.001 error). +To improve indexing performance (at the cost of query accuracy) explicitly define +`tree_level` or `precision` along with a reasonable `distance_error_pct`, noting +that large shapes will have greater false positives. | `0.025` |`orientation` |Optionally define how to interpret vertex order for @@ -88,13 +77,13 @@ sets vertex order for the coordinate list of a geo_shape field but can be overridden in each individual GeoJSON or WKT document. | `ccw` -|`points_only` |deprecated[6.6, PrefixTrees no longer used] Setting this option to -`true` (defaults to `false`) configures the `geo_shape` field type for point -shapes only (NOTE: Multi-Points are not yet supported). This optimizes index and -search performance for the `geohash` and `quadtree` when it is known that only points -will be indexed. At present geo_shape queries can not be executed on `geo_point` -field types. This option bridges the gap by improving point performance on a -`geo_shape` field so that `geo_shape` queries are optimal on a point only field. +|`points_only` |Setting this option to `true` (defaults to `false`) configures +the `geo_shape` field type for point shapes only (NOTE: Multi-Points are not +yet supported). This optimizes index and search performance for the `geohash` and +`quadtree` when it is known that only points will be indexed. At present geo_shape +queries can not be executed on `geo_point` field types. This option bridges the gap +by improving point performance on a `geo_shape` field so that `geo_shape` queries are +optimal on a point only field. | `false` |`ignore_malformed` |If true, malformed GeoJSON or WKT shapes are ignored. If @@ -111,35 +100,16 @@ and reject the whole document. |======================================================================= - -[[geoshape-indexing-approach]] -[float] -==== Indexing approach -GeoShape types are indexed by decomposing the shape into a triangular mesh and -indexing each triangle as a 7 dimension point in a BKD tree. This provides -near perfect spatial resolution (down to 1e-7 decimal degree precision) since all -spatial relations are computed using an encoded vector representation of the -original shape instead of a raster-grid representation as used by the -<> indexing approach. Performance of the tessellator primarily -depends on the number of vertices that define the polygon/multi-polyogn. While -this is the default indexing technique prefix trees can still be used by setting -the `tree` or `strategy` parameters according to the appropriate -<>. Note that these parameters are now deprecated -and will be removed in a future version. - [[prefix-trees]] [float] ==== Prefix trees -deprecated[6.6, PrefixTrees no longer used] To efficiently represent shapes in -an inverted index, Shapes are converted into a series of hashes representing -grid squares (commonly referred to as "rasters") using implementations of a -PrefixTree. The tree notion comes from the fact that the PrefixTree uses multiple -grid layers, each with an increasing level of precision to represent the Earth. -This can be thought of as increasing the level of detail of a map or image at higher -zoom levels. Since this approach causes precision issues with indexed shape, it has -been deprecated in favor of a vector indexing approach that indexes the shapes as a -triangular mesh (see <>). +To efficiently represent shapes in the index, Shapes are converted into +a series of hashes representing grid squares (commonly referred to as "rasters") +using implementations of a PrefixTree. The tree notion comes from the fact that +the PrefixTree uses multiple grid layers, each with an increasing level of +precision to represent the Earth. This can be thought of as increasing the level +of detail of a map or image at higher zoom levels. Multiple PrefixTree implementations are provided: @@ -161,10 +131,9 @@ number of levels for the quad trees in Elasticsearch is 29; the default is 21. [[spatial-strategy]] [float] ===== Spatial strategies -deprecated[6.6, PrefixTrees no longer used] The indexing implementation -selected relies on a SpatialStrategy for choosing how to decompose the shapes -(either as grid squares or a tessellated triangular mesh). Each strategy -answers the following: +The PrefixTree implementations rely on a SpatialStrategy for decomposing +the provided Shape(s) into approximated grid squares. Each strategy answers +the following: * What type of Shapes can be indexed? * What types of Query Operations and Shapes can be used? @@ -177,7 +146,7 @@ are provided: |======================================================================= |Strategy |Supported Shapes |Supported Queries |Multiple Shapes -|`recursive` |<> |`INTERSECTS`, `DISJOINT`, `WITHIN`, `CONTAINS` |Yes +|`recursive` |<> |`INTERSECTS`, `DISJOINT`, `WITHIN`, `CONTAINS` |Yes |`term` |<> |`INTERSECTS` |Yes |======================================================================= @@ -185,13 +154,13 @@ are provided: [float] ===== Accuracy -`Recursive` and `Term` strategies do not provide 100% accuracy and depending on -how they are configured it may return some false positives for `INTERSECTS`, -`WITHIN` and `CONTAINS` queries, and some false negatives for `DISJOINT` queries. -To mitigate this, it is important to select an appropriate value for the tree_levels -parameter and to adjust expectations accordingly. For example, a point may be near -the border of a particular grid cell and may thus not match a query that only matches -the cell right next to it -- even though the shape is very close to the point. +Geo_shape does not provide 100% accuracy and depending on how it is configured +it may return some false positives for `INTERSECTS`, `WITHIN` and `CONTAINS` +queries, and some false negatives for `DISJOINT` queries. To mitigate this, it +is important to select an appropriate value for the tree_levels parameter and +to adjust expectations accordingly. For example, a point may be near the border +of a particular grid cell and may thus not match a query that only matches the +cell right next to it -- even though the shape is very close to the point. [float] ===== Example @@ -204,7 +173,9 @@ PUT /example "doc": { "properties": { "location": { - "type": "geo_shape" + "type": "geo_shape", + "tree": "quadtree", + "precision": "100m" } } } @@ -214,23 +185,22 @@ PUT /example // CONSOLE // TESTSETUP -This mapping definition maps the location field to the geo_shape -type using the default vector implementation. It provides -approximately 1e-7 decimal degree precision. +This mapping maps the location field to the geo_shape type using the +quad_tree implementation and a precision of 100m. Elasticsearch translates +this into a tree_levels setting of 20. [float] -===== Performance considerations with Prefix Trees +===== Performance considerations -deprecated[6.6, PrefixTrees no longer used] With prefix trees, -Elasticsearch uses the paths in the tree as terms in the inverted index -and in queries. The higher the level (and thus the precision), the more -terms are generated. Of course, calculating the terms, keeping them in +Elasticsearch uses the paths in the prefix tree as terms in the index +and in queries. The higher the level is (and thus the precision), the +more terms are generated. Of course, calculating the terms, keeping them in memory, and storing them on disk all have a price. Especially with higher -tree levels, indices can become extremely large even with a modest amount -of data. Additionally, the size of the features also matters. Big, complex -polygons can take up a lot of space at higher tree levels. Which setting -is right depends on the use case. Generally one trades off accuracy against -index size and query performance. +tree levels, indices can become extremely large even with a modest +amount of data. Additionally, the size of the features also matters. +Big, complex polygons can take up a lot of space at higher tree levels. +Which setting is right depends on the use case. Generally one trades off +accuracy against index size and query performance. The defaults in Elasticsearch for both implementations are a compromise between index size and a reasonable level of precision of 50m at the @@ -628,10 +598,7 @@ POST /example/doc ===== Circle Elasticsearch supports a `circle` type, which consists of a center -point with a radius. Note that this circle representation can only -be indexed when using the `recursive` Prefix Tree strategy. For -the default <> circles should be approximated using -a `POLYGON`. +point with a radius: [source,js] -------------------------------------------------- @@ -645,7 +612,6 @@ POST /example/doc } -------------------------------------------------- // CONSOLE -// TEST[skip:not supported in default] Note: The inner `radius` field is required. If not specified, then the units of the `radius` will default to `METERS`. diff --git a/docs/reference/migration/migrate_7_0/mappings.asciidoc b/docs/reference/migration/migrate_7_0/mappings.asciidoc index f08ea3ab89c..5ee1615796c 100644 --- a/docs/reference/migration/migrate_7_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_7_0/mappings.asciidoc @@ -52,19 +52,3 @@ as a better alternative. An error will now be thrown when unknown configuration options are provided to similarities. Such unknown parameters were ignored before. - -[float] -==== deprecated `geo_shape` Prefix Tree indexing - -`geo_shape` types now default to using a vector indexing approach based on Lucene's new -`LatLonShape` field type. This indexes shapes as a triangular mesh instead of decomposing -them into individual grid cells. To index using legacy prefix trees `recursive` or `term` -strategy must be explicitly defined. Note that these strategies are now deprecated and will -be removed in a future version. - -[float] -==== deprecated `geo_shape` parameters - -The following type parameters are deprecated for the `geo_shape` field type: `tree`, -`precision`, `tree_levels`, `distance_error_pct`, `points_only`, and `strategy`. They -will be removed in a future version. \ No newline at end of file diff --git a/docs/reference/query-dsl/geo-shape-query.asciidoc b/docs/reference/query-dsl/geo-shape-query.asciidoc index f796881d520..4e00a2f49b4 100644 --- a/docs/reference/query-dsl/geo-shape-query.asciidoc +++ b/docs/reference/query-dsl/geo-shape-query.asciidoc @@ -7,7 +7,7 @@ Requires the <>. The `geo_shape` query uses the same grid square representation as the `geo_shape` mapping to find documents that have a shape that intersects -with the query shape. It will also use the same Prefix Tree configuration +with the query shape. It will also use the same PrefixTree configuration as defined for the field mapping. The query supports two ways of defining the query shape, either by @@ -157,8 +157,7 @@ has nothing in common with the query geometry. * `WITHIN` - Return all documents whose `geo_shape` field is within the query geometry. * `CONTAINS` - Return all documents whose `geo_shape` field -contains the query geometry. Note: this is only supported using the -`recursive` Prefix Tree Strategy deprecated[6.6] +contains the query geometry. [float] ==== Ignore Unmapped diff --git a/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java b/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java index e2e177c8f0f..e83e18ce432 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java +++ b/server/src/main/java/org/elasticsearch/common/geo/ShapeRelation.java @@ -19,7 +19,6 @@ package org.elasticsearch.common.geo; -import org.apache.lucene.document.LatLonShape.QueryRelation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -63,17 +62,6 @@ public enum ShapeRelation implements Writeable { return null; } - /** Maps ShapeRelation to Lucene's LatLonShapeRelation */ - public QueryRelation getLuceneRelation() { - switch (this) { - case INTERSECTS: return QueryRelation.INTERSECTS; - case DISJOINT: return QueryRelation.DISJOINT; - case WITHIN: return QueryRelation.WITHIN; - default: - throw new IllegalArgumentException("ShapeRelation [" + this + "] not supported"); - } - } - public String getRelationName() { return relationName; } diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index fdf7073bd74..b6e94c012c6 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -197,6 +197,9 @@ public class GeometryCollectionBuilder extends ShapeBuilder coerce = (shapeMapper == null) - ? BaseGeoShapeFieldMapper.Defaults.COERCE - : shapeMapper.coerce(); - Explicit ignoreZValue = (shapeMapper == null) - ? BaseGeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE - : shapeMapper.ignoreZValue(); + ShapeBuilder.Orientation requestedOrientation = + (shapeMapper == null) ? ShapeBuilder.Orientation.RIGHT : shapeMapper.fieldType().orientation(); + Explicit coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); + Explicit ignoreZValue = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : shapeMapper.ignoreZValue(); String malformedException = null; @@ -108,7 +102,7 @@ abstract class GeoJsonParser { malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; } subParser.nextToken(); - orientation = ShapeBuilder.Orientation.fromString(subParser.text()); + requestedOrientation = ShapeBuilder.Orientation.fromString(subParser.text()); } else { subParser.nextToken(); subParser.skipChildren(); @@ -134,7 +128,7 @@ abstract class GeoJsonParser { return geometryCollections; } - return shapeType.getBuilder(coordinateNode, radius, orientation, coerce.value()); + return shapeType.getBuilder(coordinateNode, radius, requestedOrientation, coerce.value()); } /** @@ -208,7 +202,7 @@ abstract class GeoJsonParser { * @return Geometry[] geometries of the GeometryCollection * @throws IOException Thrown if an error occurs while reading from the XContentParser */ - static GeometryCollectionBuilder parseGeometries(XContentParser parser, BaseGeoShapeFieldMapper mapper) throws + static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws IOException { if (parser.currentToken() != XContentParser.Token.START_ARRAY) { throw new ElasticsearchParseException("geometries must be an array of geojson objects"); diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java index bf26980c926..e1d990f0cff 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java @@ -34,7 +34,7 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.locationtech.jts.geom.Coordinate; import java.io.IOException; @@ -63,7 +63,7 @@ public class GeoWKTParser { // no instance private GeoWKTParser() {} - public static ShapeBuilder parse(XContentParser parser, final BaseGeoShapeFieldMapper shapeMapper) + public static ShapeBuilder parse(XContentParser parser, final GeoShapeFieldMapper shapeMapper) throws IOException, ElasticsearchParseException { return parseExpectedType(parser, null, shapeMapper); } @@ -75,12 +75,12 @@ public class GeoWKTParser { /** throws an exception if the parsed geometry type does not match the expected shape type */ public static ShapeBuilder parseExpectedType(XContentParser parser, final GeoShapeType shapeType, - final BaseGeoShapeFieldMapper shapeMapper) + final GeoShapeFieldMapper shapeMapper) throws IOException, ElasticsearchParseException { try (StringReader reader = new StringReader(parser.text())) { - Explicit ignoreZValue = (shapeMapper == null) ? BaseGeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : + Explicit ignoreZValue = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.IGNORE_Z_VALUE : shapeMapper.ignoreZValue(); - Explicit coerce = (shapeMapper == null) ? BaseGeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); + Explicit coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); // setup the tokenizer; configured to read words w/o numbers StreamTokenizer tokenizer = new StreamTokenizer(reader); tokenizer.resetSyntax(); @@ -257,8 +257,7 @@ public class GeoWKTParser { if (nextEmptyOrOpen(stream).equals(EMPTY)) { return null; } - PolygonBuilder builder = new PolygonBuilder(parseLinearRing(stream, ignoreZValue, coerce), - BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value()); + PolygonBuilder builder = new PolygonBuilder(parseLinearRing(stream, ignoreZValue, coerce), ShapeBuilder.Orientation.RIGHT); while (nextCloserOrComma(stream).equals(COMMA)) { builder.hole(parseLinearRing(stream, ignoreZValue, coerce)); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java index 21d1bd9f255..79582c3365b 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import java.io.IOException; @@ -46,7 +46,7 @@ public interface ShapeParser { * if the parsers current token has been null * @throws IOException if the input could not be read */ - static ShapeBuilder parse(XContentParser parser, BaseGeoShapeFieldMapper shapeMapper) throws IOException { + static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException { if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { return null; } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java deleted file mode 100644 index 3f1e49e525e..00000000000 --- a/server/src/main/java/org/elasticsearch/index/mapper/BaseGeoShapeFieldMapper.java +++ /dev/null @@ -1,336 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.DeprecatedParameters; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.QueryShardException; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED; - -/** - * Base class for {@link GeoShapeFieldMapper} and {@link LegacyGeoShapeFieldMapper} - */ -public abstract class BaseGeoShapeFieldMapper extends FieldMapper { - public static final String CONTENT_TYPE = "geo_shape"; - - public static class Names { - public static final ParseField ORIENTATION = new ParseField("orientation"); - public static final ParseField COERCE = new ParseField("coerce"); - } - - public static class Defaults { - public static final Explicit ORIENTATION = new Explicit<>(Orientation.RIGHT, false); - public static final Explicit COERCE = new Explicit<>(false, false); - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - public static final Explicit IGNORE_Z_VALUE = new Explicit<>(true, false); - } - - public abstract static class Builder - extends FieldMapper.Builder { - protected Boolean coerce; - protected Boolean ignoreMalformed; - protected Boolean ignoreZValue; - protected Orientation orientation; - - /** default builder - used for external mapper*/ - public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType) { - super(name, fieldType, defaultFieldType); - } - - public Builder(String name, MappedFieldType fieldType, MappedFieldType defaultFieldType, - boolean coerce, boolean ignoreMalformed, Orientation orientation, boolean ignoreZ) { - super(name, fieldType, defaultFieldType); - this.coerce = coerce; - this.ignoreMalformed = ignoreMalformed; - this.orientation = orientation; - this.ignoreZValue = ignoreZ; - } - - public Builder coerce(boolean coerce) { - this.coerce = coerce; - return this; - } - - protected Explicit coerce(BuilderContext context) { - if (coerce != null) { - return new Explicit<>(coerce, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); - } - return Defaults.COERCE; - } - - public Builder orientation(Orientation orientation) { - this.orientation = orientation; - return this; - } - - protected Explicit orientation() { - if (orientation != null) { - return new Explicit<>(orientation, true); - } - return Defaults.ORIENTATION; - } - - @Override - protected boolean defaultDocValues(Version indexCreated) { - return false; - } - - public Builder ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return this; - } - - protected Explicit ignoreMalformed(BuilderContext context) { - if (ignoreMalformed != null) { - return new Explicit<>(ignoreMalformed, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); - } - return Defaults.IGNORE_MALFORMED; - } - - protected Explicit ignoreZValue() { - if (ignoreZValue != null) { - return new Explicit<>(ignoreZValue, true); - } - return Defaults.IGNORE_Z_VALUE; - } - - public Builder ignoreZValue(final boolean ignoreZValue) { - this.ignoreZValue = ignoreZValue; - return this; - } - - @Override - protected void setupFieldType(BuilderContext context) { - super.setupFieldType(context); - - // field mapper handles this at build time - // but prefix tree strategies require a name, so throw a similar exception - if (name().isEmpty()) { - throw new IllegalArgumentException("name cannot be empty string"); - } - - BaseGeoShapeFieldType ft = (BaseGeoShapeFieldType)fieldType(); - ft.setOrientation(orientation().value()); - } - } - - public static class TypeParser implements Mapper.TypeParser { - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - boolean coerce = Defaults.COERCE.value(); - boolean ignoreZ = Defaults.IGNORE_Z_VALUE.value(); - boolean ignoreMalformed = Defaults.IGNORE_MALFORMED.value(); - Orientation orientation = Defaults.ORIENTATION.value(); - DeprecatedParameters deprecatedParameters = new DeprecatedParameters(); - boolean parsedDeprecatedParams = false; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (DeprecatedParameters.parse(name, fieldName, fieldNode, deprecatedParameters)) { - parsedDeprecatedParams = true; - iterator.remove(); - } else if (Names.ORIENTATION.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - orientation = ShapeBuilder.Orientation.fromString(fieldNode.toString()); - iterator.remove(); - } else if (IGNORE_MALFORMED.equals(fieldName)) { - ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode, name + ".ignore_malformed"); - iterator.remove(); - } else if (Names.COERCE.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - coerce = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE.getPreferredName()); - iterator.remove(); - } else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) { - ignoreZ = XContentMapValues.nodeBooleanValue(fieldNode, - name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName()); - iterator.remove(); - } - } - return getBuilder(name, coerce, ignoreMalformed, orientation, ignoreZ, parsedDeprecatedParams ? deprecatedParameters : null); - } - - private Builder getBuilder(String name, boolean coerce, boolean ignoreMalformed, Orientation orientation, - boolean ignoreZ, DeprecatedParameters deprecatedParameters) { - if (deprecatedParameters != null) { - return getLegacyBuilder(name, coerce, ignoreMalformed, orientation, ignoreZ, deprecatedParameters); - } - return new GeoShapeFieldMapper.Builder(name, coerce, ignoreMalformed, orientation, ignoreZ); - } - - private Builder getLegacyBuilder(String name, boolean coerce, boolean ignoreMalformed, Orientation orientation, - boolean ignoreZ, DeprecatedParameters deprecatedParameters) { - return new LegacyGeoShapeFieldMapper.Builder(name, coerce, ignoreMalformed, orientation, ignoreZ, deprecatedParameters); - } - } - - public abstract static class BaseGeoShapeFieldType extends MappedFieldType { - protected Orientation orientation = Defaults.ORIENTATION.value(); - - protected BaseGeoShapeFieldType() { - setIndexOptions(IndexOptions.DOCS); - setTokenized(false); - setStored(false); - setStoreTermVectors(false); - setOmitNorms(true); - } - - protected BaseGeoShapeFieldType(BaseGeoShapeFieldType ref) { - super(ref); - this.orientation = ref.orientation; - } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - BaseGeoShapeFieldType that = (BaseGeoShapeFieldType) o; - return orientation == that.orientation; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), orientation); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts) { - super.checkCompatibility(fieldType, conflicts); - } - - public Orientation orientation() { return this.orientation; } - - public void setOrientation(Orientation orientation) { - checkIfFrozen(); - this.orientation = orientation; - } - - @Override - public Query existsQuery(QueryShardContext context) { - return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); - } - - @Override - public Query termQuery(Object value, QueryShardContext context) { - throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead"); - } - } - - protected Explicit coerce; - protected Explicit ignoreMalformed; - protected Explicit ignoreZValue; - - protected BaseGeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, - Explicit ignoreZValue, Settings indexSettings, - MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - this.coerce = coerce; - this.ignoreMalformed = ignoreMalformed; - this.ignoreZValue = ignoreZValue; - } - - @Override - protected void doMerge(Mapper mergeWith) { - super.doMerge(mergeWith); - BaseGeoShapeFieldMapper gsfm = (BaseGeoShapeFieldMapper)mergeWith; - if (gsfm.coerce.explicit()) { - this.coerce = gsfm.coerce; - } - if (gsfm.ignoreMalformed.explicit()) { - this.ignoreMalformed = gsfm.ignoreMalformed; - } - if (gsfm.ignoreZValue.explicit()) { - this.ignoreZValue = gsfm.ignoreZValue; - } - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - builder.field("type", contentType()); - BaseGeoShapeFieldType ft = (BaseGeoShapeFieldType)fieldType(); - if (includeDefaults || ft.orientation() != Defaults.ORIENTATION.value()) { - builder.field(Names.ORIENTATION.getPreferredName(), ft.orientation()); - } - if (includeDefaults || coerce.explicit()) { - builder.field(Names.COERCE.getPreferredName(), coerce.value()); - } - if (includeDefaults || ignoreMalformed.explicit()) { - builder.field(IGNORE_MALFORMED, ignoreMalformed.value()); - } - if (includeDefaults || ignoreZValue.explicit()) { - builder.field(GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), ignoreZValue.value()); - } - } - - public Explicit coerce() { - return coerce; - } - - public Explicit ignoreMalformed() { - return ignoreMalformed; - } - - public Explicit ignoreZValue() { - return ignoreZValue; - } - - public Orientation orientation() { - return ((BaseGeoShapeFieldType)fieldType).orientation(); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } -} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 65ee2e428fa..7de40fe337d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -18,24 +18,48 @@ */ package org.elasticsearch.index.mapper; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.LatLonShape; -import org.apache.lucene.geo.Line; -import org.apache.lucene.geo.Polygon; -import org.apache.lucene.geo.Rectangle; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; +import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; +import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; +import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.geo.SpatialStrategy; +import org.elasticsearch.common.geo.XShapeCollection; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.QueryShardException; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED; /** - * FieldMapper for indexing {@link org.apache.lucene.document.LatLonShape}s. + * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. *

* Currently Shapes can only be indexed and can only be queried using * {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently @@ -49,128 +73,554 @@ import java.util.Arrays; * [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] * ] * } - *

- * or: - *

- * "field" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0)) */ -public class GeoShapeFieldMapper extends BaseGeoShapeFieldMapper { +public class GeoShapeFieldMapper extends FieldMapper { + + public static final String CONTENT_TYPE = "geo_shape"; + + public static class Names { + public static final String TREE = "tree"; + public static final String TREE_GEOHASH = "geohash"; + public static final String TREE_QUADTREE = "quadtree"; + public static final String TREE_LEVELS = "tree_levels"; + public static final String TREE_PRESISION = "precision"; + public static final String DISTANCE_ERROR_PCT = "distance_error_pct"; + public static final String ORIENTATION = "orientation"; + public static final String STRATEGY = "strategy"; + public static final String STRATEGY_POINTS_ONLY = "points_only"; + public static final String COERCE = "coerce"; + } + + public static class Defaults { + public static final String TREE = Names.TREE_GEOHASH; + public static final String STRATEGY = SpatialStrategy.RECURSIVE.getStrategyName(); + public static final boolean POINTS_ONLY = false; + public static final int GEOHASH_LEVELS = GeoUtils.geoHashLevelsForPrecision("50m"); + public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision("50m"); + public static final Orientation ORIENTATION = Orientation.RIGHT; + public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; + public static final Explicit COERCE = new Explicit<>(false, false); + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + public static final Explicit IGNORE_Z_VALUE = new Explicit<>(true, false); + + public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); + + static { + // setting name here is a hack so freeze can be called...instead all these options should be + // moved to the default ctor for GeoShapeFieldType, and defaultFieldType() should be removed from mappers... + FIELD_TYPE.setName("DoesNotExist"); + FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); + FIELD_TYPE.setTokenized(false); + FIELD_TYPE.setStored(false); + FIELD_TYPE.setStoreTermVectors(false); + FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.freeze(); + } + } + + public static class Builder extends FieldMapper.Builder { + + private Boolean coerce; + private Boolean ignoreMalformed; + private Boolean ignoreZValue; - public static class Builder extends BaseGeoShapeFieldMapper.Builder { public Builder(String name) { - super (name, new GeoShapeFieldType(), new GeoShapeFieldType()); + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); } - public Builder(String name, boolean coerce, boolean ignoreMalformed, ShapeBuilder.Orientation orientation, - boolean ignoreZ) { - super(name, new GeoShapeFieldType(), new GeoShapeFieldType(), coerce, ignoreMalformed, orientation, ignoreZ); + @Override + public GeoShapeFieldType fieldType() { + return (GeoShapeFieldType)fieldType; + } + + public Builder coerce(boolean coerce) { + this.coerce = coerce; + return this; + } + + @Override + protected boolean defaultDocValues(Version indexCreated) { + return false; + } + + protected Explicit coerce(BuilderContext context) { + if (coerce != null) { + return new Explicit<>(coerce, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); + } + return Defaults.COERCE; + } + + public Builder ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return this; + } + + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; + } + + protected Explicit ignoreZValue(BuilderContext context) { + if (ignoreZValue != null) { + return new Explicit<>(ignoreZValue, true); + } + return Defaults.IGNORE_Z_VALUE; + } + + public Builder ignoreZValue(final boolean ignoreZValue) { + this.ignoreZValue = ignoreZValue; + return this; } @Override public GeoShapeFieldMapper build(BuilderContext context) { + GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; + + if (geoShapeFieldType.treeLevels() == 0 && geoShapeFieldType.precisionInMeters() < 0) { + geoShapeFieldType.setDefaultDistanceErrorPct(Defaults.LEGACY_DISTANCE_ERROR_PCT); + } setupFieldType(context); - return new GeoShapeFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), - ignoreZValue(), context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + + return new GeoShapeFieldMapper(name, fieldType, ignoreMalformed(context), coerce(context), ignoreZValue(context), + context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); } } - public static final class GeoShapeFieldType extends BaseGeoShapeFieldType { - public GeoShapeFieldType() { - super(); + public static class TypeParser implements Mapper.TypeParser { + + @Override + public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + Builder builder = new Builder(name); + Boolean pointsOnly = null; + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String fieldName = entry.getKey(); + Object fieldNode = entry.getValue(); + if (Names.TREE.equals(fieldName)) { + builder.fieldType().setTree(fieldNode.toString()); + iterator.remove(); + } else if (Names.TREE_LEVELS.equals(fieldName)) { + builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString())); + iterator.remove(); + } else if (Names.TREE_PRESISION.equals(fieldName)) { + builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(), + DistanceUnit.DEFAULT, DistanceUnit.DEFAULT)); + iterator.remove(); + } else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) { + builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString())); + iterator.remove(); + } else if (Names.ORIENTATION.equals(fieldName)) { + builder.fieldType().setOrientation(ShapeBuilder.Orientation.fromString(fieldNode.toString())); + iterator.remove(); + } else if (Names.STRATEGY.equals(fieldName)) { + builder.fieldType().setStrategyName(fieldNode.toString()); + iterator.remove(); + } else if (IGNORE_MALFORMED.equals(fieldName)) { + builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(fieldNode, name + ".ignore_malformed")); + iterator.remove(); + } else if (Names.COERCE.equals(fieldName)) { + builder.coerce(XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.COERCE)); + iterator.remove(); + } else if (GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName().equals(fieldName)) { + builder.ignoreZValue(XContentMapValues.nodeBooleanValue(fieldNode, + name + "." + GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName())); + iterator.remove(); + } else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)) { + pointsOnly = XContentMapValues.nodeBooleanValue(fieldNode, name + "." + Names.STRATEGY_POINTS_ONLY); + iterator.remove(); + } + } + if (pointsOnly != null) { + if (builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) && pointsOnly == false) { + throw new IllegalArgumentException("points_only cannot be set to false for term strategy"); + } else { + builder.fieldType().setPointsOnly(pointsOnly); + } + } + return builder; } + } + + public static final class GeoShapeFieldType extends MappedFieldType { + + private String tree = Defaults.TREE; + private String strategyName = Defaults.STRATEGY; + private boolean pointsOnly = Defaults.POINTS_ONLY; + private int treeLevels = 0; + private double precisionInMeters = -1; + private Double distanceErrorPct; + private double defaultDistanceErrorPct = 0.0; + private Orientation orientation = Defaults.ORIENTATION; + + // these are built when the field type is frozen + private PrefixTreeStrategy defaultStrategy; + private RecursivePrefixTreeStrategy recursiveStrategy; + private TermQueryPrefixTreeStrategy termStrategy; + + public GeoShapeFieldType() {} protected GeoShapeFieldType(GeoShapeFieldType ref) { super(ref); + this.tree = ref.tree; + this.strategyName = ref.strategyName; + this.pointsOnly = ref.pointsOnly; + this.treeLevels = ref.treeLevels; + this.precisionInMeters = ref.precisionInMeters; + this.distanceErrorPct = ref.distanceErrorPct; + this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct; + this.orientation = ref.orientation; } @Override public GeoShapeFieldType clone() { return new GeoShapeFieldType(this); } + + @Override + public boolean equals(Object o) { + if (!super.equals(o)) return false; + GeoShapeFieldType that = (GeoShapeFieldType) o; + return treeLevels == that.treeLevels && + precisionInMeters == that.precisionInMeters && + defaultDistanceErrorPct == that.defaultDistanceErrorPct && + Objects.equals(tree, that.tree) && + Objects.equals(strategyName, that.strategyName) && + pointsOnly == that.pointsOnly && + Objects.equals(distanceErrorPct, that.distanceErrorPct) && + orientation == that.orientation; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), tree, strategyName, pointsOnly, treeLevels, precisionInMeters, distanceErrorPct, + defaultDistanceErrorPct, orientation); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public void freeze() { + super.freeze(); + // This is a bit hackish: we need to setup the spatial tree and strategies once the field name is set, which + // must be by the time freeze is called. + SpatialPrefixTree prefixTree; + if ("geohash".equals(tree)) { + prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, + getLevels(treeLevels, precisionInMeters, Defaults.GEOHASH_LEVELS, true)); + } else if ("legacyquadtree".equals(tree)) { + prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, + getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false)); + } else if ("quadtree".equals(tree)) { + prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, + getLevels(treeLevels, precisionInMeters, Defaults.QUADTREE_LEVELS, false)); + } else { + throw new IllegalArgumentException("Unknown prefix tree type [" + tree + "]"); + } + + recursiveStrategy = new RecursivePrefixTreeStrategy(prefixTree, name()); + recursiveStrategy.setDistErrPct(distanceErrorPct()); + recursiveStrategy.setPruneLeafyBranches(false); + termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, name()); + termStrategy.setDistErrPct(distanceErrorPct()); + defaultStrategy = resolveStrategy(strategyName); + defaultStrategy.setPointsOnly(pointsOnly); + } + + @Override + public void checkCompatibility(MappedFieldType fieldType, List conflicts) { + super.checkCompatibility(fieldType, conflicts); + GeoShapeFieldType other = (GeoShapeFieldType)fieldType; + // prevent user from changing strategies + if (strategyName().equals(other.strategyName()) == false) { + conflicts.add("mapper [" + name() + "] has different [strategy]"); + } + + // prevent user from changing trees (changes encoding) + if (tree().equals(other.tree()) == false) { + conflicts.add("mapper [" + name() + "] has different [tree]"); + } + + if ((pointsOnly() != other.pointsOnly())) { + conflicts.add("mapper [" + name() + "] has different points_only"); + } + + // TODO we should allow this, but at the moment levels is used to build bookkeeping variables + // in lucene's SpatialPrefixTree implementations, need a patch to correct that first + if (treeLevels() != other.treeLevels()) { + conflicts.add("mapper [" + name() + "] has different [tree_levels]"); + } + if (precisionInMeters() != other.precisionInMeters()) { + conflicts.add("mapper [" + name() + "] has different [precision]"); + } + } + + private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) { + if (treeLevels > 0 || precisionInMeters >= 0) { + return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters) + : GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0); + } + return defaultLevels; + } + + public String tree() { + return tree; + } + + public void setTree(String tree) { + checkIfFrozen(); + this.tree = tree; + } + + public String strategyName() { + return strategyName; + } + + public void setStrategyName(String strategyName) { + checkIfFrozen(); + this.strategyName = strategyName; + if (this.strategyName.equals(SpatialStrategy.TERM.getStrategyName())) { + this.pointsOnly = true; + } + } + + public boolean pointsOnly() { + return pointsOnly; + } + + public void setPointsOnly(boolean pointsOnly) { + checkIfFrozen(); + this.pointsOnly = pointsOnly; + } + public int treeLevels() { + return treeLevels; + } + + public void setTreeLevels(int treeLevels) { + checkIfFrozen(); + this.treeLevels = treeLevels; + } + + public double precisionInMeters() { + return precisionInMeters; + } + + public void setPrecisionInMeters(double precisionInMeters) { + checkIfFrozen(); + this.precisionInMeters = precisionInMeters; + } + + public double distanceErrorPct() { + return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct; + } + + public void setDistanceErrorPct(double distanceErrorPct) { + checkIfFrozen(); + this.distanceErrorPct = distanceErrorPct; + } + + public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) { + checkIfFrozen(); + this.defaultDistanceErrorPct = defaultDistanceErrorPct; + } + + public Orientation orientation() { return this.orientation; } + + public void setOrientation(Orientation orientation) { + checkIfFrozen(); + this.orientation = orientation; + } + + public PrefixTreeStrategy defaultStrategy() { + return this.defaultStrategy; + } + + public PrefixTreeStrategy resolveStrategy(SpatialStrategy strategy) { + return resolveStrategy(strategy.getStrategyName()); + } + + public PrefixTreeStrategy resolveStrategy(String strategyName) { + if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { + return recursiveStrategy; + } + if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { + return termStrategy; + } + throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); + } + + @Override + public Query existsQuery(QueryShardContext context) { + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); + } + + @Override + public Query termQuery(Object value, QueryShardContext context) { + throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead"); + } } - public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, - Explicit ignoreZValue, Settings indexSettings, + protected Explicit coerce; + protected Explicit ignoreMalformed; + protected Explicit ignoreZValue; + + public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit ignoreMalformed, + Explicit coerce, Explicit ignoreZValue, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, ignoreZValue, indexSettings, - multiFields, copyTo); + super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo); + this.coerce = coerce; + this.ignoreMalformed = ignoreMalformed; + this.ignoreZValue = ignoreZValue; } @Override public GeoShapeFieldType fieldType() { return (GeoShapeFieldType) super.fieldType(); } - - /** parsing logic for {@link LatLonShape} indexing */ @Override public void parse(ParseContext context) throws IOException { try { - Object shape = context.parseExternalValue(Object.class); + Shape shape = context.parseExternalValue(Shape.class); if (shape == null) { ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this); if (shapeBuilder == null) { return; } - shape = shapeBuilder.buildLucene(); + shape = shapeBuilder.buildS4J(); + } + if (fieldType().pointsOnly() == true) { + // index configured for pointsOnly + if (shape instanceof XShapeCollection && XShapeCollection.class.cast(shape).pointsOnly()) { + // MULTIPOINT data: index each point separately + List shapes = ((XShapeCollection) shape).getShapes(); + for (Shape s : shapes) { + indexShape(context, s); + } + return; + } else if (shape instanceof Point == false) { + throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " + + ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + + " was found"); + } } indexShape(context, shape); } catch (Exception e) { if (ignoreMalformed.value() == false) { throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), - fieldType().typeName()); + fieldType().typeName()); } - context.addIgnoredField(fieldType().name()); + context.addIgnoredField(fieldType.name()); } } - private void indexShape(ParseContext context, Object luceneShape) { - if (luceneShape instanceof GeoPoint) { - GeoPoint pt = (GeoPoint) luceneShape; - indexFields(context, LatLonShape.createIndexableFields(name(), pt.lat(), pt.lon())); - } else if (luceneShape instanceof double[]) { - double[] pt = (double[]) luceneShape; - indexFields(context, LatLonShape.createIndexableFields(name(), pt[1], pt[0])); - } else if (luceneShape instanceof Line) { - indexFields(context, LatLonShape.createIndexableFields(name(), (Line)luceneShape)); - } else if (luceneShape instanceof Polygon) { - indexFields(context, LatLonShape.createIndexableFields(name(), (Polygon) luceneShape)); - } else if (luceneShape instanceof double[][]) { - double[][] pts = (double[][])luceneShape; - for (int i = 0; i < pts.length; ++i) { - indexFields(context, LatLonShape.createIndexableFields(name(), pts[i][1], pts[i][0])); + private void indexShape(ParseContext context, Shape shape) { + List fields = new ArrayList<>(Arrays.asList(fieldType().defaultStrategy().createIndexableFields(shape))); + createFieldNamesField(context, fields); + for (IndexableField field : fields) { + context.doc().add(field); + } + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + } + + @Override + protected void doMerge(Mapper mergeWith) { + super.doMerge(mergeWith); + + GeoShapeFieldMapper gsfm = (GeoShapeFieldMapper)mergeWith; + if (gsfm.coerce.explicit()) { + this.coerce = gsfm.coerce; + } + if (gsfm.ignoreMalformed.explicit()) { + this.ignoreMalformed = gsfm.ignoreMalformed; + } + if (gsfm.ignoreZValue.explicit()) { + this.ignoreZValue = gsfm.ignoreZValue; + } + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + builder.field("type", contentType()); + + if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) { + builder.field(Names.TREE, fieldType().tree()); + } + + if (fieldType().treeLevels() != 0) { + builder.field(Names.TREE_LEVELS, fieldType().treeLevels()); + } else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified + if ("geohash".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.GEOHASH_LEVELS); + } else if ("legacyquadtree".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); + } else if ("quadtree".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); + } else { + throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]"); } - } else if (luceneShape instanceof Line[]) { - Line[] lines = (Line[]) luceneShape; - for (int i = 0; i < lines.length; ++i) { - indexFields(context, LatLonShape.createIndexableFields(name(), lines[i])); - } - } else if (luceneShape instanceof Polygon[]) { - Polygon[] polys = (Polygon[]) luceneShape; - for (int i = 0; i < polys.length; ++i) { - indexFields(context, LatLonShape.createIndexableFields(name(), polys[i])); - } - } else if (luceneShape instanceof Rectangle) { - // index rectangle as a polygon - Rectangle r = (Rectangle) luceneShape; - Polygon p = new Polygon(new double[]{r.minLat, r.minLat, r.maxLat, r.maxLat, r.minLat}, - new double[]{r.minLon, r.maxLon, r.maxLon, r.minLon, r.minLon}); - indexFields(context, LatLonShape.createIndexableFields(name(), p)); - } else if (luceneShape instanceof Object[]) { - // recurse to index geometry collection - for (Object o : (Object[])luceneShape) { - indexShape(context, o); + } + if (fieldType().precisionInMeters() != -1) { + builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters())); + } else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified + builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(50)); + } + if (includeDefaults || fieldType().strategyName().equals(Defaults.STRATEGY) == false) { + builder.field(Names.STRATEGY, fieldType().strategyName()); + } + if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) { + builder.field(Names.DISTANCE_ERROR_PCT, fieldType().distanceErrorPct()); + } + if (includeDefaults || fieldType().orientation() != Defaults.ORIENTATION) { + builder.field(Names.ORIENTATION, fieldType().orientation()); + } + if (fieldType().strategyName().equals(SpatialStrategy.TERM.getStrategyName())) { + // For TERMs strategy the defaults for points only change to true + if (includeDefaults || fieldType().pointsOnly() != true) { + builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly()); } } else { - throw new IllegalArgumentException("invalid shape type found [" + luceneShape.getClass() + "] while indexing shape"); + if (includeDefaults || fieldType().pointsOnly() != GeoShapeFieldMapper.Defaults.POINTS_ONLY) { + builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly()); + } + } + if (includeDefaults || coerce.explicit()) { + builder.field(Names.COERCE, coerce.value()); + } + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field(IGNORE_MALFORMED, ignoreMalformed.value()); + } + if (includeDefaults || ignoreZValue.explicit()) { + builder.field(GeoPointFieldMapper.Names.IGNORE_Z_VALUE.getPreferredName(), ignoreZValue.value()); } } - private void indexFields(ParseContext context, Field[] fields) { - ArrayList flist = new ArrayList<>(Arrays.asList(fields)); - createFieldNamesField(context, flist); - for (IndexableField f : flist) { - context.doc().add(f); - } + public Explicit coerce() { + return coerce; + } + + public Explicit ignoreMalformed() { + return ignoreMalformed; + } + + public Explicit ignoreZValue() { + return ignoreZValue; + } + + @Override + protected String contentType() { + return CONTENT_TYPE; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java deleted file mode 100644 index b68e48305b2..00000000000 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapper.java +++ /dev/null @@ -1,596 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.TermQueryPrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; -import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; -import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; -import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.geo.SpatialStrategy; -import org.elasticsearch.common.geo.XShapeCollection; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.locationtech.spatial4j.shape.Point; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.spatial4j.shape.jts.JtsGeometry; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. - *

- * Currently Shapes can only be indexed and can only be queried using - * {@link org.elasticsearch.index.query.GeoShapeQueryBuilder}, consequently - * a lot of behavior in this Mapper is disabled. - *

- * Format supported: - *

- * "field" : { - * "type" : "polygon", - * "coordinates" : [ - * [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] - * ] - * } - *

- * or: - *

- * "field" : "POLYGON ((100.0 0.0, 101.0 0.0, 101.0 1.0, 100.0 1.0, 100.0 0.0)) - * - * @deprecated use {@link GeoShapeFieldMapper} - */ -@Deprecated -public class LegacyGeoShapeFieldMapper extends BaseGeoShapeFieldMapper { - - public static final String CONTENT_TYPE = "geo_shape"; - - @Deprecated - public static class DeprecatedParameters { - public static class Names { - public static final ParseField STRATEGY = new ParseField("strategy"); - public static final ParseField TREE = new ParseField("tree"); - public static final ParseField TREE_LEVELS = new ParseField("tree_levels"); - public static final ParseField PRECISION = new ParseField("precision"); - public static final ParseField DISTANCE_ERROR_PCT = new ParseField("distance_error_pct"); - public static final ParseField POINTS_ONLY = new ParseField("points_only"); - } - - public static class PrefixTrees { - public static final String LEGACY_QUADTREE = "legacyquadtree"; - public static final String QUADTREE = "quadtree"; - public static final String GEOHASH = "geohash"; - } - - public static class Defaults { - public static final SpatialStrategy STRATEGY = SpatialStrategy.RECURSIVE; - public static final String TREE = "quadtree"; - public static final String PRECISION = "50m"; - public static final int QUADTREE_LEVELS = GeoUtils.quadTreeLevelsForPrecision(PRECISION); - public static final int GEOHASH_TREE_LEVELS = GeoUtils.geoHashLevelsForPrecision(PRECISION); - public static final boolean POINTS_ONLY = false; - public static final double DISTANCE_ERROR_PCT = 0.025d; - } - - public SpatialStrategy strategy = null; - public String tree = null; - public int treeLevels = Integer.MIN_VALUE; - public String precision = null; - public Boolean pointsOnly = null; - public double distanceErrorPct = Double.NaN; - - public void setSpatialStrategy(SpatialStrategy strategy) { - this.strategy = strategy; - } - - public void setTree(String prefixTree) { - this.tree = prefixTree; - } - - public void setTreeLevels(int treeLevels) { - this.treeLevels = treeLevels; - } - - public void setPrecision(String precision) { - this.precision = precision; - } - - public void setPointsOnly(boolean pointsOnly) { - if (this.strategy == SpatialStrategy.TERM && pointsOnly == false) { - throw new ElasticsearchParseException("points_only cannot be set to false for term strategy"); - } - this.pointsOnly = pointsOnly; - } - - public void setDistanceErrorPct(double distanceErrorPct) { - this.distanceErrorPct = distanceErrorPct; - } - - protected void setup() { - if (strategy == null) { - strategy = Defaults.STRATEGY; - } - if (tree == null) { - tree = Defaults.TREE; - } - if (Double.isNaN(distanceErrorPct)) { - if (precision != null || treeLevels != Integer.MIN_VALUE) { - distanceErrorPct = 0d; - } else { - distanceErrorPct = Defaults.DISTANCE_ERROR_PCT; - } - } - if (treeLevels == Integer.MIN_VALUE && precision == null) { - // set default precision if treeLevels is not explicitly set - precision = Defaults.PRECISION; - } - if (treeLevels == Integer.MIN_VALUE) { - if (precision.equals(Defaults.PRECISION)) { - treeLevels = tree.equals(Defaults.TREE) - ? Defaults.QUADTREE_LEVELS - : Defaults.GEOHASH_TREE_LEVELS; - } else { - treeLevels = tree == Defaults.TREE - ? GeoUtils.quadTreeLevelsForPrecision(precision) - : GeoUtils.geoHashLevelsForPrecision(precision); - } - } - if (pointsOnly == null) { - if (strategy == SpatialStrategy.TERM) { - pointsOnly = true; - } else { - pointsOnly = Defaults.POINTS_ONLY; - } - } - } - - public static boolean parse(String name, String fieldName, Object fieldNode, DeprecatedParameters deprecatedParameters) { - if (Names.STRATEGY.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - checkPrefixTreeSupport(fieldName); - deprecatedParameters.setSpatialStrategy(SpatialStrategy.fromString(fieldNode.toString())); - } else if (Names.TREE.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - checkPrefixTreeSupport(fieldName); - deprecatedParameters.setTree(fieldNode.toString()); - } else if (Names.TREE_LEVELS.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - checkPrefixTreeSupport(fieldName); - deprecatedParameters.setTreeLevels(Integer.parseInt(fieldNode.toString())); - } else if (Names.PRECISION.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - checkPrefixTreeSupport(fieldName); - deprecatedParameters.setPrecision(fieldNode.toString()); - } else if (Names.DISTANCE_ERROR_PCT.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - checkPrefixTreeSupport(fieldName); - deprecatedParameters.setDistanceErrorPct(Double.parseDouble(fieldNode.toString())); - } else if (Names.POINTS_ONLY.match(fieldName, LoggingDeprecationHandler.INSTANCE)) { - checkPrefixTreeSupport(fieldName); - deprecatedParameters.setPointsOnly( - XContentMapValues.nodeBooleanValue(fieldNode, name + "." + DeprecatedParameters.Names.POINTS_ONLY)); - } else { - return false; - } - return true; - } - - private static void checkPrefixTreeSupport(String fieldName) { - if (ShapesAvailability.JTS_AVAILABLE == false || ShapesAvailability.SPATIAL4J_AVAILABLE == false) { - throw new ElasticsearchParseException("Field parameter [{}] is not supported for [{}] field type", - fieldName, CONTENT_TYPE); - } - DEPRECATION_LOGGER.deprecated("Field parameter [{}] is deprecated and will be removed in a future version.", - fieldName); - } - } - - private static final Logger logger = LogManager.getLogger(LegacyGeoShapeFieldMapper.class); - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(logger); - - public static class Builder extends BaseGeoShapeFieldMapper.Builder { - - DeprecatedParameters deprecatedParameters; - - public Builder(String name) { - super(name, new GeoShapeFieldType(), new GeoShapeFieldType()); - this.deprecatedParameters = new DeprecatedParameters(); - this.deprecatedParameters.setup(); - } - - public Builder(String name, boolean coerce, boolean ignoreMalformed, Orientation orientation, - boolean ignoreZ, DeprecatedParameters deprecatedParameters) { - super(name, new GeoShapeFieldType(), new GeoShapeFieldType(), coerce, ignoreMalformed, orientation, ignoreZ); - this.deprecatedParameters = deprecatedParameters; - this.deprecatedParameters.setup(); - } - - @Override - public GeoShapeFieldType fieldType() { - return (GeoShapeFieldType)fieldType; - } - - private void setupFieldTypeDeprecatedParameters() { - GeoShapeFieldType ft = fieldType(); - ft.setStrategy(deprecatedParameters.strategy); - ft.setTree(deprecatedParameters.tree); - ft.setTreeLevels(deprecatedParameters.treeLevels); - if (deprecatedParameters.precision != null) { - // precision is only set iff: a. treeLevel is not explicitly set, b. its explicitly set - ft.setPrecisionInMeters(DistanceUnit.parse(deprecatedParameters.precision, - DistanceUnit.DEFAULT, DistanceUnit.DEFAULT)); - } - ft.setDistanceErrorPct(deprecatedParameters.distanceErrorPct); - ft.setPointsOnly(deprecatedParameters.pointsOnly); - } - - private void setupPrefixTrees() { - GeoShapeFieldType ft = fieldType(); - SpatialPrefixTree prefixTree; - if (ft.tree().equals(DeprecatedParameters.PrefixTrees.GEOHASH)) { - prefixTree = new GeohashPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, - getLevels(ft.treeLevels(), ft.precisionInMeters(), DeprecatedParameters.Defaults.GEOHASH_TREE_LEVELS, true)); - } else if (ft.tree().equals(DeprecatedParameters.PrefixTrees.LEGACY_QUADTREE)) { - prefixTree = new QuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, - getLevels(ft.treeLevels(), ft.precisionInMeters(), DeprecatedParameters.Defaults.QUADTREE_LEVELS, false)); - } else if (ft.tree().equals(DeprecatedParameters.PrefixTrees.QUADTREE)) { - prefixTree = new PackedQuadPrefixTree(ShapeBuilder.SPATIAL_CONTEXT, - getLevels(ft.treeLevels(), ft.precisionInMeters(), DeprecatedParameters.Defaults.QUADTREE_LEVELS, false)); - } else { - throw new IllegalArgumentException("Unknown prefix tree type [" + ft.tree() + "]"); - } - - // setup prefix trees regardless of strategy (this is used for the QueryBuilder) - // recursive: - RecursivePrefixTreeStrategy rpts = new RecursivePrefixTreeStrategy(prefixTree, ft.name()); - rpts.setDistErrPct(ft.distanceErrorPct()); - rpts.setPruneLeafyBranches(false); - ft.recursiveStrategy = rpts; - - // term: - TermQueryPrefixTreeStrategy termStrategy = new TermQueryPrefixTreeStrategy(prefixTree, ft.name()); - termStrategy.setDistErrPct(ft.distanceErrorPct()); - ft.termStrategy = termStrategy; - - // set default (based on strategy): - ft.defaultPrefixTreeStrategy = ft.resolvePrefixTreeStrategy(ft.strategy()); - ft.defaultPrefixTreeStrategy.setPointsOnly(ft.pointsOnly()); - } - - @Override - protected void setupFieldType(BuilderContext context) { - super.setupFieldType(context); - - // field mapper handles this at build time - // but prefix tree strategies require a name, so throw a similar exception - if (fieldType().name().isEmpty()) { - throw new IllegalArgumentException("name cannot be empty string"); - } - - // setup the deprecated parameters and the prefix tree configuration - setupFieldTypeDeprecatedParameters(); - setupPrefixTrees(); - } - - private static int getLevels(int treeLevels, double precisionInMeters, int defaultLevels, boolean geoHash) { - if (treeLevels > 0 || precisionInMeters >= 0) { - return Math.max(treeLevels, precisionInMeters >= 0 ? (geoHash ? GeoUtils.geoHashLevelsForPrecision(precisionInMeters) - : GeoUtils.quadTreeLevelsForPrecision(precisionInMeters)) : 0); - } - return defaultLevels; - } - - @Override - public LegacyGeoShapeFieldMapper build(BuilderContext context) { - setupFieldType(context); - - return new LegacyGeoShapeFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), orientation(), ignoreZValue(), context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); - } - } - - public static final class GeoShapeFieldType extends BaseGeoShapeFieldType { - - private String tree = DeprecatedParameters.Defaults.TREE; - private SpatialStrategy strategy = DeprecatedParameters.Defaults.STRATEGY; - private boolean pointsOnly = DeprecatedParameters.Defaults.POINTS_ONLY; - private int treeLevels = 0; - private double precisionInMeters = -1; - private Double distanceErrorPct; - private double defaultDistanceErrorPct = 0.0; - - // these are built when the field type is frozen - private PrefixTreeStrategy defaultPrefixTreeStrategy; - private RecursivePrefixTreeStrategy recursiveStrategy; - private TermQueryPrefixTreeStrategy termStrategy; - - public GeoShapeFieldType() { - setIndexOptions(IndexOptions.DOCS); - setTokenized(false); - setStored(false); - setStoreTermVectors(false); - setOmitNorms(true); - } - - protected GeoShapeFieldType(GeoShapeFieldType ref) { - super(ref); - this.tree = ref.tree; - this.strategy = ref.strategy; - this.pointsOnly = ref.pointsOnly; - this.treeLevels = ref.treeLevels; - this.precisionInMeters = ref.precisionInMeters; - this.distanceErrorPct = ref.distanceErrorPct; - this.defaultDistanceErrorPct = ref.defaultDistanceErrorPct; - } - - @Override - public GeoShapeFieldType clone() { - return new GeoShapeFieldType(this); - } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - GeoShapeFieldType that = (GeoShapeFieldType) o; - return treeLevels == that.treeLevels && - precisionInMeters == that.precisionInMeters && - defaultDistanceErrorPct == that.defaultDistanceErrorPct && - Objects.equals(tree, that.tree) && - Objects.equals(strategy, that.strategy) && - pointsOnly == that.pointsOnly && - Objects.equals(distanceErrorPct, that.distanceErrorPct); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), tree, strategy, pointsOnly, treeLevels, precisionInMeters, distanceErrorPct, - defaultDistanceErrorPct); - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts) { - super.checkCompatibility(fieldType, conflicts); - GeoShapeFieldType other = (GeoShapeFieldType)fieldType; - // prevent user from changing strategies - if (strategy() != other.strategy()) { - conflicts.add("mapper [" + name() + "] has different [strategy]"); - } - - // prevent user from changing trees (changes encoding) - if (tree().equals(other.tree()) == false) { - conflicts.add("mapper [" + name() + "] has different [tree]"); - } - - if ((pointsOnly() != other.pointsOnly())) { - conflicts.add("mapper [" + name() + "] has different points_only"); - } - - // TODO we should allow this, but at the moment levels is used to build bookkeeping variables - // in lucene's SpatialPrefixTree implementations, need a patch to correct that first - if (treeLevels() != other.treeLevels()) { - conflicts.add("mapper [" + name() + "] has different [tree_levels]"); - } - if (precisionInMeters() != other.precisionInMeters()) { - conflicts.add("mapper [" + name() + "] has different [precision]"); - } - } - - public String tree() { - return tree; - } - - public void setTree(String tree) { - checkIfFrozen(); - this.tree = tree; - } - - public SpatialStrategy strategy() { - return strategy; - } - - public void setStrategy(SpatialStrategy strategy) { - checkIfFrozen(); - this.strategy = strategy; - if (this.strategy.equals(SpatialStrategy.TERM)) { - this.pointsOnly = true; - } - } - - public boolean pointsOnly() { - return pointsOnly; - } - - public void setPointsOnly(boolean pointsOnly) { - checkIfFrozen(); - this.pointsOnly = pointsOnly; - } - public int treeLevels() { - return treeLevels; - } - - public void setTreeLevels(int treeLevels) { - checkIfFrozen(); - this.treeLevels = treeLevels; - } - - public double precisionInMeters() { - return precisionInMeters; - } - - public void setPrecisionInMeters(double precisionInMeters) { - checkIfFrozen(); - this.precisionInMeters = precisionInMeters; - } - - public double distanceErrorPct() { - return distanceErrorPct == null ? defaultDistanceErrorPct : distanceErrorPct; - } - - public void setDistanceErrorPct(double distanceErrorPct) { - checkIfFrozen(); - this.distanceErrorPct = distanceErrorPct; - } - - public void setDefaultDistanceErrorPct(double defaultDistanceErrorPct) { - checkIfFrozen(); - this.defaultDistanceErrorPct = defaultDistanceErrorPct; - } - - public PrefixTreeStrategy defaultPrefixTreeStrategy() { - return this.defaultPrefixTreeStrategy; - } - - public PrefixTreeStrategy resolvePrefixTreeStrategy(SpatialStrategy strategy) { - return resolvePrefixTreeStrategy(strategy.getStrategyName()); - } - - public PrefixTreeStrategy resolvePrefixTreeStrategy(String strategyName) { - if (SpatialStrategy.RECURSIVE.getStrategyName().equals(strategyName)) { - return recursiveStrategy; - } - if (SpatialStrategy.TERM.getStrategyName().equals(strategyName)) { - return termStrategy; - } - throw new IllegalArgumentException("Unknown prefix tree strategy [" + strategyName + "]"); - } - } - - public LegacyGeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Explicit orientation, - Explicit ignoreZValue, Settings indexSettings, - MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, ignoreZValue, indexSettings, - multiFields, copyTo); - } - - @Override - public GeoShapeFieldType fieldType() { - return (GeoShapeFieldType) super.fieldType(); - } - - @Override - public void parse(ParseContext context) throws IOException { - try { - Shape shape = context.parseExternalValue(Shape.class); - if (shape == null) { - ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this); - if (shapeBuilder == null) { - return; - } - shape = shapeBuilder.buildS4J(); - } - if (fieldType().pointsOnly() == true) { - // index configured for pointsOnly - if (shape instanceof XShapeCollection && XShapeCollection.class.cast(shape).pointsOnly()) { - // MULTIPOINT data: index each point separately - List shapes = ((XShapeCollection) shape).getShapes(); - for (Shape s : shapes) { - indexShape(context, s); - } - return; - } else if (shape instanceof Point == false) { - throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " - + ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) - + " was found"); - } - } - indexShape(context, shape); - } catch (Exception e) { - if (ignoreMalformed.value() == false) { - throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(), - fieldType().typeName()); - } - context.addIgnoredField(fieldType.name()); - } - } - - private void indexShape(ParseContext context, Shape shape) { - List fields = new ArrayList<>(Arrays.asList(fieldType().defaultPrefixTreeStrategy().createIndexableFields(shape))); - createFieldNamesField(context, fields); - for (IndexableField field : fields) { - context.doc().add(field); - } - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().tree().equals(DeprecatedParameters.Defaults.TREE) == false) { - builder.field(DeprecatedParameters.Names.TREE.getPreferredName(), fieldType().tree()); - } - - if (fieldType().treeLevels() != 0) { - builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), fieldType().treeLevels()); - } else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified - if (DeprecatedParameters.PrefixTrees.GEOHASH.equals(fieldType().tree())) { - builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), - DeprecatedParameters.Defaults.GEOHASH_TREE_LEVELS); - } else if (DeprecatedParameters.PrefixTrees.LEGACY_QUADTREE.equals(fieldType().tree())) { - builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), - DeprecatedParameters.Defaults.QUADTREE_LEVELS); - } else if (DeprecatedParameters.PrefixTrees.QUADTREE.equals(fieldType().tree())) { - builder.field(DeprecatedParameters.Names.TREE_LEVELS.getPreferredName(), - DeprecatedParameters.Defaults.QUADTREE_LEVELS); - } else { - throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]"); - } - } - if (fieldType().precisionInMeters() != -1) { - builder.field(DeprecatedParameters.Names.PRECISION.getPreferredName(), - DistanceUnit.METERS.toString(fieldType().precisionInMeters())); - } else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified - builder.field(DeprecatedParameters.Names.PRECISION.getPreferredName(), - DistanceUnit.METERS.toString(50)); - } - - builder.field(DeprecatedParameters.Names.STRATEGY.getPreferredName(), fieldType().strategy().getStrategyName()); - - if (includeDefaults || fieldType().distanceErrorPct() != fieldType().defaultDistanceErrorPct) { - builder.field(DeprecatedParameters.Names.DISTANCE_ERROR_PCT.getPreferredName(), fieldType().distanceErrorPct()); - } - if (fieldType().strategy() == SpatialStrategy.TERM) { - // For TERMs strategy the defaults for points only change to true - if (includeDefaults || fieldType().pointsOnly() != true) { - builder.field(DeprecatedParameters.Names.POINTS_ONLY.getPreferredName(), fieldType().pointsOnly()); - } - } else { - if (includeDefaults || fieldType().pointsOnly() != DeprecatedParameters.Defaults.POINTS_ONLY) { - builder.field(DeprecatedParameters.Names.POINTS_ONLY.getPreferredName(), fieldType().pointsOnly()); - } - } - } -} diff --git a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 6ee0f3f10dd..c5170508969 100644 --- a/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -19,10 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.document.LatLonShape; -import org.apache.lucene.geo.Line; -import org.apache.lucene.geo.Polygon; -import org.apache.lucene.geo.Rectangle; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -40,9 +36,8 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.parsers.ShapeParser; @@ -53,8 +48,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -335,9 +329,9 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder listener) { + if (ShapesAvailability.JTS_AVAILABLE == false) { + throw new IllegalStateException("JTS not available"); + } getRequest.preference("_local"); client.get(getRequest, new ActionListener(){ diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 24b5d7f427c..a1038853c06 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -25,13 +25,13 @@ import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition; import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition; import org.elasticsearch.action.resync.TransportResyncReplicationAction; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.index.mapper.BaseGeoShapeFieldMapper; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; @@ -39,6 +39,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldAliasMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.GeoPointFieldMapper; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IgnoredFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; @@ -131,7 +132,10 @@ public class IndicesModule extends AbstractModule { mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser()); mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); - mappers.put(BaseGeoShapeFieldMapper.CONTENT_TYPE, new BaseGeoShapeFieldMapper.TypeParser()); + + if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { + mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); + } for (MapperPlugin mapperPlugin : mapperPlugins) { for (Map.Entry entry : mapperPlugin.getMappers().entrySet()) { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 2acabee8797..a9a21054906 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; @@ -296,8 +296,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final LegacyGeoShapeFieldMapper mapperBuilder = - (LegacyGeoShapeFieldMapper) (new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext)); + final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); try (XContentParser parser = createParser(polygonGeoJson)) { parser.nextToken(); ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).buildS4J()); @@ -897,6 +896,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { .startArray().value(101.0).value(1.0).endArray() .endArray() .endObject(); + ShapeCollection expected = shapeCollection( SPATIAL_CONTEXT.makePoint(100, 0), SPATIAL_CONTEXT.makePoint(101, 1.0)); @@ -968,6 +968,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { shellCoordinates.add(new Coordinate(102, 2)); shellCoordinates.add(new Coordinate(102, 3)); + shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null); @@ -1148,6 +1149,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { .startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject() .startObject("lala").field("type", "NotAPoint").endObject() .endObject(); + Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0)); assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson, true); diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java index 94c96e00d92..1b4c0b9dce0 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java @@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.test.geo.RandomShapeGenerator; import org.locationtech.jts.geom.Coordinate; @@ -147,6 +146,7 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { @Override public void testParseLineString() throws IOException { List coordinates = randomLineStringCoords(); + LineString expected = GEOMETRY_FACTORY.createLineString(coordinates.toArray(new Coordinate[coordinates.size()])); assertExpected(jtsGeom(expected), new LineStringBuilder(coordinates), true); @@ -279,14 +279,13 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { parser.nextToken(); Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_7_0_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final GeoShapeFieldMapper mapperBuilder = - (GeoShapeFieldMapper) (new GeoShapeFieldMapper.Builder("test").ignoreZValue(false).build(mockBuilderContext)); + final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(false).build(mockBuilderContext); // test store z disabled ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, @@ -324,8 +323,7 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final LegacyGeoShapeFieldMapper mapperBuilder = - (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext)); + final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); // test store z disabled ElasticsearchException e = expectThrows(ElasticsearchException.class, @@ -354,8 +352,7 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final LegacyGeoShapeFieldMapper mapperBuilder = - (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext)); + final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder); assertEquals(shapeBuilder.numDimensions(), 3); @@ -375,14 +372,12 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()).build(); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); - final LegacyGeoShapeFieldMapper defaultMapperBuilder = - (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").coerce(false).build(mockBuilderContext)); + final GeoShapeFieldMapper defaultMapperBuilder = new GeoShapeFieldMapper.Builder("test").coerce(false).build(mockBuilderContext); ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, () -> ShapeParser.parse(parser, defaultMapperBuilder)); assertEquals("invalid LinearRing found (coordinates are not closed)", exception.getMessage()); - final LegacyGeoShapeFieldMapper coercingMapperBuilder = - (LegacyGeoShapeFieldMapper)(new LegacyGeoShapeFieldMapper.Builder("test").coerce(true).build(mockBuilderContext)); + final GeoShapeFieldMapper coercingMapperBuilder = new GeoShapeFieldMapper.Builder("test").coerce(true).build(mockBuilderContext); ShapeBuilder shapeBuilder = ShapeParser.parse(parser, coercingMapperBuilder); assertNotNull(shapeBuilder); assertEquals("polygon ((100.0 5.0, 100.0 10.0, 90.0 10.0, 90.0 5.0, 100.0 5.0))", shapeBuilder.toWKT()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index 20c49c00935..0e6854c41e3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -24,8 +24,8 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.geo.builders.PointBuilder; +import org.locationtech.spatial4j.shape.Point; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; @@ -63,7 +63,6 @@ public class ExternalMapper extends FieldMapper { private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL); private GeoPointFieldMapper.Builder latLonPointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT); private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); - private LegacyGeoShapeFieldMapper.Builder legacyShapeBuilder = new LegacyGeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); private Mapper.Builder stringBuilder; private String generatedValue; private String mapperName; @@ -87,9 +86,7 @@ public class ExternalMapper extends FieldMapper { BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); GeoPointFieldMapper pointMapper = latLonPointBuilder.build(context); - BaseGeoShapeFieldMapper shapeMapper = (context.indexCreatedVersion().before(Version.V_6_6_0)) - ? legacyShapeBuilder.build(context) - : shapeBuilder.build(context); + GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context); FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); @@ -153,13 +150,13 @@ public class ExternalMapper extends FieldMapper { private BinaryFieldMapper binMapper; private BooleanFieldMapper boolMapper; private GeoPointFieldMapper pointMapper; - private BaseGeoShapeFieldMapper shapeMapper; + private GeoShapeFieldMapper shapeMapper; private FieldMapper stringMapper; public ExternalMapper(String simpleName, MappedFieldType fieldType, String generatedValue, String mapperName, BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, GeoPointFieldMapper pointMapper, - BaseGeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, + GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, new ExternalFieldType(), indexSettings, multiFields, copyTo); this.generatedValue = generatedValue; @@ -185,12 +182,8 @@ public class ExternalMapper extends FieldMapper { pointMapper.parse(context.createExternalValueContext(point)); // Let's add a Dummy Shape - PointBuilder pb = new PointBuilder(-100, 45); - if (shapeMapper instanceof GeoShapeFieldMapper) { - shapeMapper.parse(context.createExternalValueContext(pb.buildLucene())); - } else { - shapeMapper.parse(context.createExternalValueContext(pb.buildS4J())); - } + Point shape = new PointBuilder(-100, 45).buildS4J(); + shapeMapper.parse(context.createExternalValueContext(shape)); context = context.createExternalValueContext(generatedValue); @@ -217,7 +210,7 @@ public class ExternalMapper extends FieldMapper { BinaryFieldMapper binMapperUpdate = (BinaryFieldMapper) binMapper.updateFieldType(fullNameToFieldType); BooleanFieldMapper boolMapperUpdate = (BooleanFieldMapper) boolMapper.updateFieldType(fullNameToFieldType); GeoPointFieldMapper pointMapperUpdate = (GeoPointFieldMapper) pointMapper.updateFieldType(fullNameToFieldType); - BaseGeoShapeFieldMapper shapeMapperUpdate = (BaseGeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); + GeoShapeFieldMapper shapeMapperUpdate = (GeoShapeFieldMapper) shapeMapper.updateFieldType(fullNameToFieldType); TextFieldMapper stringMapperUpdate = (TextFieldMapper) stringMapper.updateFieldType(fullNameToFieldType); if (update == this && multiFieldsUpdate == multiFields diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java index 6d47e4a784e..e1158f77bd4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java @@ -21,13 +21,12 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.test.ESIntegTestCase; -import org.locationtech.jts.geom.Coordinate; import java.util.Arrays; import java.util.Collection; @@ -119,8 +118,7 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { assertThat(response.getHits().getTotalHits().value, equalTo((long) 1)); response = client().prepareSearch("test-idx") - .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", - new EnvelopeBuilder(new Coordinate(-101, 46), new Coordinate(-99, 44))).relation(ShapeRelation.WITHIN)) + .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", new PointBuilder(-100, 45)).relation(ShapeRelation.WITHIN)) .execute().actionGet(); assertThat(response.getHits().getTotalHits().value, equalTo((long) 1)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index a5e2d7c31af..20e689e9d7e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -18,9 +18,14 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; +import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; +import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -37,6 +42,7 @@ import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { @@ -47,10 +53,10 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { public void testDefaultConfiguration() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -58,8 +64,12 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; - assertThat(geoShapeFieldMapper.fieldType().orientation(), - equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION.value())); + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.025d)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoShapeFieldMapper.Defaults.GEOHASH_LEVELS)); + assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(GeoShapeFieldMapper.Defaults.ORIENTATION)); } /** @@ -67,11 +77,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { */ public void testOrientationParsing() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "left") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("orientation", "left") + .endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -85,11 +95,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { // explicit right orientation test mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("orientation", "right") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("orientation", "right") + .endObject().endObject() + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -107,11 +117,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { */ public void testCoerceParsing() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("coerce", "true") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("coerce", "true") + .endObject().endObject() + .endObject().endObject()); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -123,11 +133,11 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { // explicit false coerce test mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("coerce", "false") - .endObject().endObject() - .endObject().endObject()); + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("coerce", "false") + .endObject().endObject() + .endObject().endObject()); defaultMapper = createIndex("test2").mapperService().documentMapperParser() .parse("type1", new CompressedXContent(mapping)); @@ -136,7 +146,6 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { coerce = ((GeoShapeFieldMapper)fieldMapper).coerce().value(); assertThat(coerce, equalTo(false)); - assertFieldWarnings("tree"); } @@ -213,45 +222,304 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { assertThat(ignoreMalformed.value(), equalTo(false)); } + public void testGeohashConfiguration() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("tree_levels", "4") + .field("distance_error_pct", "0.1") + .endObject().endObject() + .endObject().endObject()); - private void assertFieldWarnings(String... fieldNames) { - String[] warnings = new String[fieldNames.length]; - for (int i = 0; i < fieldNames.length; ++i) { - warnings[i] = "Field parameter [" + fieldNames[i] + "] " - + "is deprecated and will be removed in a future version."; + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.1)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(4)); + } + + public void testQuadtreeConfiguration() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .field("distance_error_pct", "0.5") + .field("points_only", true) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(6)); + assertThat(strategy.isPointsOnly(), equalTo(true)); + } + + public void testLevelPrecisionConfiguration() throws IOException { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + // 70m is more precise so it wins + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d))); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "26") + .field("precision", "70m") + .endObject().endObject() + .endObject().endObject()); + + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + // distance_error_pct was not specified so we expect the mapper to take the highest precision between "precision" and + // "tree_levels" setting distErrPct to 0 to guarantee desired precision + assertThat(strategy.getDistErrPct(), equalTo(0.0)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + // 70m is less precise so it loses + assertThat(strategy.getGrid().getMaxLevels(), equalTo(26)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("tree_levels", "6") + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + // 70m is more precise so it wins + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d))); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("tree_levels", GeoUtils.geoHashLevelsForPrecision(70d)+1) + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d)+1)); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", GeoUtils.quadTreeLevelsForPrecision(70d)+1) + .field("precision", "70m") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d)+1)); + } + } + + public void testPointsOnlyOption() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("points_only", true) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.isPointsOnly(), equalTo(true)); + } + + public void testLevelDefaults() throws IOException { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + /* 50m is default */ + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(50d))); + } + + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .field("distance_error_pct", "0.5") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.5)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + /* 50m is default */ + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(50d))); } } public void testGeoShapeMapperMerge() throws Exception { String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("shape").field("type", "geo_shape") - .field("orientation", "ccw") - .endObject().endObject().endObject().endObject()); + .startObject("shape").field("type", "geo_shape").field("tree", "geohash") + .field("strategy", "recursive") + .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01) + .field("orientation", "ccw") + .endObject().endObject().endObject().endObject()); MapperService mapperService = createIndex("test").mapperService(); DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE); String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("shape").field("type", "geo_shape") - .field("orientation", "cw").endObject().endObject().endObject().endObject()); - mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); + .startObject("properties").startObject("shape").field("type", "geo_shape") + .field("tree", "quadtree") + .field("strategy", "term").field("precision", "1km") + .field("tree_levels", 26).field("distance_error_pct", 26) + .field("orientation", "cw").endObject().endObject().endObject().endObject()); + try { + mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); + fail(); + } catch (IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]")); + assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]")); + } // verify nothing changed Mapper fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getDistErrPct(), equalTo(0.01)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); - // change mapping; orientation + // correct mapping stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("shape").field("type", "geo_shape") - .field("orientation", "cw").endObject().endObject().endObject().endObject()); + .startObject("properties").startObject("shape").field("type", "geo_shape").field("precision", "1m") + .field("tree_levels", 8).field("distance_error_pct", 0.001) + .field("orientation", "cw").endObject().endObject().endObject().endObject()); docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); fieldMapper = docMapper.mappers().getMapper("shape"); assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); + assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); + assertThat(strategy.getDistErrPct(), equalTo(0.001)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW)); } @@ -276,12 +544,112 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") .field("type", "geo_shape") + .field("tree", "quadtree") .endObject().endObject() .endObject().endObject()); DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"orientation\":\"" + BaseGeoShapeFieldMapper.Defaults.ORIENTATION.value() + "\"")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":21")); } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":9")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertFalse(serialized, serialized.contains("\"precision\":")); + assertTrue(serialized, serialized.contains("\"tree_levels\":6")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertFalse(serialized, serialized.contains("\"tree_levels\":")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6m") + .field("tree_levels", "5") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":5")); + } + } + + public void testPointsOnlyDefaultsWithTermStrategy() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "10m") + .field("strategy", "term") + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() + .parse("type1", new CompressedXContent(mapping)); + Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + GeoShapeFieldMapper geoShapeFieldMapper = (GeoShapeFieldMapper) fieldMapper; + PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultStrategy(); + + assertThat(strategy.getDistErrPct(), equalTo(0.0)); + assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); + assertThat(strategy.getGrid().getMaxLevels(), equalTo(23)); + assertThat(strategy.isPointsOnly(), equalTo(true)); + // term strategy changes the default for points_only, check that we handle it correctly + assertThat(toXContentString(geoShapeFieldMapper, false), not(containsString("points_only"))); + } + + + public void testPointsOnlyFalseWithTermStrategy() throws Exception { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "10m") + .field("strategy", "term") + .field("points_only", false) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type1", new CompressedXContent(mapping)) + ); + assertThat(e.getMessage(), containsString("points_only cannot be set to false for term strategy")); } public String toXContentString(GeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java index c10ec5facf8..a1c225f8a06 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldTypeTests.java @@ -18,23 +18,69 @@ */ package org.elasticsearch.index.mapper; +import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.index.mapper.GeoShapeFieldMapper.GeoShapeFieldType; import org.junit.Before; +import java.io.IOException; + public class GeoShapeFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new GeoShapeFieldType(); + return new GeoShapeFieldMapper.GeoShapeFieldType(); } @Before public void setupProperties() { - addModifier(new FieldTypeTestCase.Modifier("orientation", true) { + addModifier(new Modifier("tree", false) { @Override public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); + ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTree("quadtree"); + } + }); + addModifier(new Modifier("strategy", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setStrategyName("term"); + } + }); + addModifier(new Modifier("tree_levels", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setTreeLevels(10); + } + }); + addModifier(new Modifier("precision", false) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setPrecisionInMeters(20); + } + }); + addModifier(new Modifier("distance_error_pct", true) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5); + } + }); + addModifier(new Modifier("orientation", true) { + @Override + public void modify(MappedFieldType ft) { + ((GeoShapeFieldMapper.GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); } }); } + + /** + * Test for {@link GeoShapeFieldType#setStrategyName(String)} that checks that {@link GeoShapeFieldType#pointsOnly()} + * gets set as a side effect when using SpatialStrategy.TERM + */ + public void testSetStrategyName() throws IOException { + GeoShapeFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(); + assertFalse(fieldType.pointsOnly()); + fieldType.setStrategyName(SpatialStrategy.RECURSIVE.getStrategyName()); + assertFalse(fieldType.pointsOnly()); + fieldType.setStrategyName(SpatialStrategy.TERM.getStrategyName()); + assertTrue(fieldType.pointsOnly()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java deleted file mode 100644 index 11d8c72531d..00000000000 --- a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldMapperTests.java +++ /dev/null @@ -1,714 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; -import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; -import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; - -import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.not; - -public class LegacyGeoShapeFieldMapperTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testDefaultConfiguration() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("strategy", "recursive") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - assertThat(geoShapeFieldMapper.fieldType().tree(), - equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.TREE)); - assertThat(geoShapeFieldMapper.fieldType().treeLevels(), - equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.QUADTREE_LEVELS)); - assertThat(geoShapeFieldMapper.fieldType().pointsOnly(), - equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.POINTS_ONLY)); - assertThat(geoShapeFieldMapper.fieldType().distanceErrorPct(), - equalTo(LegacyGeoShapeFieldMapper.DeprecatedParameters.Defaults.DISTANCE_ERROR_PCT)); - assertThat(geoShapeFieldMapper.fieldType().orientation(), - equalTo(LegacyGeoShapeFieldMapper.Defaults.ORIENTATION.value())); - assertFieldWarnings("strategy"); - } - - /** - * Test that orientation parameter correctly parses - */ - public void testOrientationParsing() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "left") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - ShapeBuilder.Orientation orientation = ((LegacyGeoShapeFieldMapper)fieldMapper).fieldType().orientation(); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); - - // explicit right orientation test - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "right") - .endObject().endObject() - .endObject().endObject()); - - defaultMapper = createIndex("test2").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - orientation = ((LegacyGeoShapeFieldMapper)fieldMapper).fieldType().orientation(); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); - assertFieldWarnings("tree"); - } - - /** - * Test that coerce parameter correctly parses - */ - public void testCoerceParsing() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("coerce", "true") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - boolean coerce = ((LegacyGeoShapeFieldMapper)fieldMapper).coerce().value(); - assertThat(coerce, equalTo(true)); - - // explicit false coerce test - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("coerce", "false") - .endObject().endObject() - .endObject().endObject()); - - defaultMapper = createIndex("test2").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - coerce = ((LegacyGeoShapeFieldMapper)fieldMapper).coerce().value(); - assertThat(coerce, equalTo(false)); - assertFieldWarnings("tree"); - } - - - /** - * Test that accept_z_value parameter correctly parses - */ - public void testIgnoreZValue() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("strategy", "recursive") - .field(IGNORE_Z_VALUE.getPreferredName(), "true") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - boolean ignoreZValue = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreZValue().value(); - assertThat(ignoreZValue, equalTo(true)); - - // explicit false accept_z_value test - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field(IGNORE_Z_VALUE.getPreferredName(), "false") - .endObject().endObject() - .endObject().endObject()); - - defaultMapper = createIndex("test2").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - ignoreZValue = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreZValue().value(); - assertThat(ignoreZValue, equalTo(false)); - assertFieldWarnings("strategy", "tree"); - } - - /** - * Test that ignore_malformed parameter correctly parses - */ - public void testIgnoreMalformedParsing() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("ignore_malformed", "true") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - Explicit ignoreMalformed = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreMalformed(); - assertThat(ignoreMalformed.value(), equalTo(true)); - - // explicit false ignore_malformed test - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("ignore_malformed", "false") - .endObject().endObject() - .endObject().endObject()); - - defaultMapper = createIndex("test2").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - ignoreMalformed = ((LegacyGeoShapeFieldMapper)fieldMapper).ignoreMalformed(); - assertThat(ignoreMalformed.explicit(), equalTo(true)); - assertThat(ignoreMalformed.value(), equalTo(false)); - assertFieldWarnings("tree"); - } - - public void testGeohashConfiguration() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("tree_levels", "4") - .field("distance_error_pct", "0.1") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.1)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(4)); - assertFieldWarnings("tree", "tree_levels", "distance_error_pct"); - } - - public void testQuadtreeConfiguration() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "6") - .field("distance_error_pct", "0.5") - .field("points_only", true) - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(6)); - assertThat(strategy.isPointsOnly(), equalTo(true)); - assertFieldWarnings("tree", "tree_levels", "distance_error_pct", "points_only"); - } - - private void assertFieldWarnings(String... fieldNames) { - String[] warnings = new String[fieldNames.length]; - for (int i = 0; i < fieldNames.length; ++i) { - warnings[i] = "Field parameter [" + fieldNames[i] + "] " - + "is deprecated and will be removed in a future version."; - } - assertWarnings(warnings); - } - - public void testLevelPrecisionConfiguration() throws IOException { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "6") - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - // 70m is more precise so it wins - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d))); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "26") - .field("precision", "70m") - .endObject().endObject() - .endObject().endObject()); - - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - // distance_error_pct was not specified so we expect the mapper to take the highest precision between "precision" and - // "tree_levels" setting distErrPct to 0 to guarantee desired precision - assertThat(strategy.getDistErrPct(), equalTo(0.0)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - // 70m is less precise so it loses - assertThat(strategy.getGrid().getMaxLevels(), equalTo(26)); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("tree_levels", "6") - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - // 70m is more precise so it wins - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d))); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("tree_levels", GeoUtils.geoHashLevelsForPrecision(70d)+1) - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(70d)+1)); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", GeoUtils.quadTreeLevelsForPrecision(70d)+1) - .field("precision", "70m") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(70d)+1)); - } - assertFieldWarnings("tree", "tree_levels", "precision", "distance_error_pct"); - } - - public void testPointsOnlyOption() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("points_only", true) - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.isPointsOnly(), equalTo(true)); - assertFieldWarnings("tree", "points_only"); - } - - public void testLevelDefaults() throws IOException { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - /* 50m is default */ - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.quadTreeLevelsForPrecision(50d))); - } - - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .field("distance_error_pct", "0.5") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.5)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - /* 50m is default */ - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(50d))); - } - assertFieldWarnings("tree", "distance_error_pct"); - } - - public void testGeoShapeMapperMerge() throws Exception { - String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("shape").field("type", "geo_shape").field("tree", "geohash") - .field("strategy", "recursive") - .field("precision", "1m").field("tree_levels", 8).field("distance_error_pct", 0.01) - .field("orientation", "ccw") - .endObject().endObject().endObject().endObject()); - MapperService mapperService = createIndex("test").mapperService(); - DocumentMapper docMapper = mapperService.merge("type", new CompressedXContent(stage1Mapping), - MapperService.MergeReason.MAPPING_UPDATE); - String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("shape").field("type", "geo_shape") - .field("tree", "quadtree") - .field("strategy", "term").field("precision", "1km") - .field("tree_levels", 26).field("distance_error_pct", 26) - .field("orientation", "cw").endObject().endObject().endObject().endObject()); - try { - mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [shape] has different [strategy]")); - assertThat(e.getMessage(), containsString("mapper [shape] has different [tree]")); - assertThat(e.getMessage(), containsString("mapper [shape] has different [tree_levels]")); - assertThat(e.getMessage(), containsString("mapper [shape] has different [precision]")); - } - - // verify nothing changed - Mapper fieldMapper = docMapper.mappers().getMapper("shape"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getDistErrPct(), equalTo(0.01)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); - assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CCW)); - - // correct mapping - stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("shape").field("type", "geo_shape") - .field("tree", "geohash") - .field("strategy", "recursive") - .field("precision", "1m") - .field("tree_levels", 8).field("distance_error_pct", 0.001) - .field("orientation", "cw").endObject().endObject().endObject().endObject()); - docMapper = mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); - - fieldMapper = docMapper.mappers().getMapper("shape"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy, instanceOf(RecursivePrefixTreeStrategy.class)); - assertThat(strategy.getGrid(), instanceOf(GeohashPrefixTree.class)); - assertThat(strategy.getDistErrPct(), equalTo(0.001)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(GeoUtils.geoHashLevelsForPrecision(1d))); - assertThat(geoShapeFieldMapper.fieldType().orientation(), equalTo(ShapeBuilder.Orientation.CW)); - - assertFieldWarnings("tree", "strategy", "precision", "tree_levels", "distance_error_pct"); - } - - public void testEmptyName() throws Exception { - // after 5.x - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("") - .field("type", "geo_shape") - .field("tree", "quadtree") - .endObject().endObject() - .endObject().endObject()); - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type1", new CompressedXContent(mapping)) - ); - assertThat(e.getMessage(), containsString("name cannot be empty string")); - assertFieldWarnings("tree"); - } - - public void testSerializeDefaults() throws Exception { - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":21")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "geohash") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":9")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("tree_levels", "6") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertFalse(serialized, serialized.contains("\"precision\":")); - assertTrue(serialized, serialized.contains("\"tree_levels\":6")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "6") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":10")); - } - { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "6m") - .field("tree_levels", "5") - .endObject().endObject() - .endObject().endObject()); - DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); - String serialized = toXContentString((LegacyGeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); - assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); - assertTrue(serialized, serialized.contains("\"tree_levels\":5")); - } - assertFieldWarnings("tree", "tree_levels", "precision"); - } - - public void testPointsOnlyDefaultsWithTermStrategy() throws IOException { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "10m") - .field("strategy", "term") - .endObject().endObject() - .endObject().endObject()); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser() - .parse("type1", new CompressedXContent(mapping)); - Mapper fieldMapper = defaultMapper.mappers().getMapper("location"); - assertThat(fieldMapper, instanceOf(LegacyGeoShapeFieldMapper.class)); - - LegacyGeoShapeFieldMapper geoShapeFieldMapper = (LegacyGeoShapeFieldMapper) fieldMapper; - PrefixTreeStrategy strategy = geoShapeFieldMapper.fieldType().defaultPrefixTreeStrategy(); - - assertThat(strategy.getDistErrPct(), equalTo(0.0)); - assertThat(strategy.getGrid(), instanceOf(QuadPrefixTree.class)); - assertThat(strategy.getGrid().getMaxLevels(), equalTo(23)); - assertThat(strategy.isPointsOnly(), equalTo(true)); - // term strategy changes the default for points_only, check that we handle it correctly - assertThat(toXContentString(geoShapeFieldMapper, false), not(containsString("points_only"))); - assertFieldWarnings("tree", "precision", "strategy"); - } - - - public void testPointsOnlyFalseWithTermStrategy() throws Exception { - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("precision", "10m") - .field("strategy", "term") - .field("points_only", false) - .endObject().endObject() - .endObject().endObject()); - - DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, - () -> parser.parse("type1", new CompressedXContent(mapping)) - ); - assertThat(e.getMessage(), containsString("points_only cannot be set to false for term strategy")); - assertFieldWarnings("tree", "precision", "strategy", "points_only"); - } - - public String toXContentString(LegacyGeoShapeFieldMapper mapper, boolean includeDefaults) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); - ToXContent.Params params; - if (includeDefaults) { - params = new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true")); - } else { - params = ToXContent.EMPTY_PARAMS; - } - mapper.doXContentBody(builder, includeDefaults, params); - return Strings.toString(builder.endObject()); - } - - public String toXContentString(LegacyGeoShapeFieldMapper mapper) throws IOException { - return toXContentString(mapper, true); - } - -} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java deleted file mode 100644 index 2fcbed82e33..00000000000 --- a/server/src/test/java/org/elasticsearch/index/mapper/LegacyGeoShapeFieldTypeTests.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.geo.SpatialStrategy; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper.GeoShapeFieldType; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyGeoShapeFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new GeoShapeFieldType(); - } - - @Before - public void setupProperties() { - addModifier(new Modifier("tree", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setTree("geohash"); - } - }); - addModifier(new Modifier("strategy", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setStrategy(SpatialStrategy.TERM); - } - }); - addModifier(new Modifier("tree_levels", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setTreeLevels(10); - } - }); - addModifier(new Modifier("precision", false) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setPrecisionInMeters(20); - } - }); - addModifier(new Modifier("distance_error_pct", true) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setDefaultDistanceErrorPct(0.5); - } - }); - addModifier(new Modifier("orientation", true) { - @Override - public void modify(MappedFieldType ft) { - ((GeoShapeFieldType)ft).setOrientation(ShapeBuilder.Orientation.LEFT); - } - }); - } - - /** - * Test for {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#setStrategy(SpatialStrategy)} that checks - * that {@link LegacyGeoShapeFieldMapper.GeoShapeFieldType#pointsOnly()} gets set as a side effect when using SpatialStrategy.TERM - */ - public void testSetStrategyName() throws IOException { - GeoShapeFieldType fieldType = new GeoShapeFieldType(); - assertFalse(fieldType.pointsOnly()); - fieldType.setStrategy(SpatialStrategy.RECURSIVE); - assertFalse(fieldType.pointsOnly()); - fieldType.setStrategy(SpatialStrategy.TERM); - assertTrue(fieldType.pointsOnly()); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index e2e4db1f9b7..bcd2b4ef144 100644 --- a/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.index.query; import org.apache.lucene.search.BooleanQuery; @@ -28,6 +29,7 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -52,41 +54,29 @@ import static org.hamcrest.Matchers.equalTo; public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase { - protected static String indexedShapeId; - protected static String indexedShapeType; - protected static String indexedShapePath; - protected static String indexedShapeIndex; - protected static String indexedShapeRouting; - protected static ShapeBuilder indexedShapeToReturn; - - @Override - protected boolean enableWarningsCheck() { - return false; - } - - protected String fieldName() { - return GEO_SHAPE_FIELD_NAME; - } + private static String indexedShapeId; + private static String indexedShapeType; + private static String indexedShapePath; + private static String indexedShapeIndex; + private static String indexedShapeRouting; + private static ShapeBuilder indexedShapeToReturn; @Override protected GeoShapeQueryBuilder doCreateTestQueryBuilder() { return doCreateTestQueryBuilder(randomBoolean()); } - - protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { - // LatLonShape does not support MultiPoint queries - RandomShapeGenerator.ShapeType shapeType = - randomFrom(ShapeType.POINT, ShapeType.LINESTRING, ShapeType.MULTILINESTRING, ShapeType.POLYGON); + private GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { + ShapeType shapeType = ShapeType.randomType(random()); ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); GeoShapeQueryBuilder builder; clearShapeFields(); if (indexedShape == false) { - builder = new GeoShapeQueryBuilder(fieldName(), shape); + builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); } else { indexedShapeToReturn = shape; indexedShapeId = randomAlphaOfLengthBetween(3, 20); indexedShapeType = randomAlphaOfLengthBetween(3, 20); - builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType); + builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeId, indexedShapeType); if (randomBoolean()) { indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); builder.indexedShapeIndex(indexedShapeIndex); @@ -101,11 +91,15 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase new GeoShapeQueryBuilder(fieldName(), null)); + expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null)); } public void testNoIndexedShape() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(fieldName(), null, "type")); + () -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, null, "type")); assertEquals("either shapeBytes or indexedShapeId and indexedShapeType are required", e.getMessage()); } public void testNoIndexedShapeType() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoShapeQueryBuilder(fieldName(), "id", null)); + () -> new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, "id", null)); assertEquals("indexedShapeType is required if indexedShapeId is specified", e.getMessage()); } public void testNoRelation() throws IOException { ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); - GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(fieldName(), shape); + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.relation(null)); assertEquals("No Shape Relation defined", e.getMessage()); } + public void testInvalidRelation() throws IOException { + ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); + GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder.strategy(SpatialStrategy.TERM); + expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); + GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); + expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM)); + GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); + builder3.strategy(SpatialStrategy.TERM); + expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); + } + // see #3878 public void testThatXContentSerializationInsideOfArrayWorks() throws Exception { EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(0, 0), new Coordinate(10, 10)); @@ -198,7 +205,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase query.toQuery(createShardContext())); assertEquals("query must be rewritten first", e.getMessage()); QueryBuilder rewrite = rewriteAndFetch(query, createShardContext()); - GeoShapeQueryBuilder geoShapeQueryBuilder = new GeoShapeQueryBuilder(fieldName(), indexedShapeToReturn); + GeoShapeQueryBuilder geoShapeQueryBuilder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeToReturn); geoShapeQueryBuilder.strategy(query.strategy()); geoShapeQueryBuilder.relation(query.relation()); assertEquals(geoShapeQueryBuilder, rewrite); @@ -237,7 +244,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType); - GeoShapeQueryBuilder builder; - clearShapeFields(); - if (indexedShape == false) { - builder = new GeoShapeQueryBuilder(fieldName(), shape); - } else { - indexedShapeToReturn = shape; - indexedShapeId = randomAlphaOfLengthBetween(3, 20); - indexedShapeType = randomAlphaOfLengthBetween(3, 20); - builder = new GeoShapeQueryBuilder(fieldName(), indexedShapeId, indexedShapeType); - if (randomBoolean()) { - indexedShapeIndex = randomAlphaOfLengthBetween(3, 20); - builder.indexedShapeIndex(indexedShapeIndex); - } - if (randomBoolean()) { - indexedShapePath = randomAlphaOfLengthBetween(3, 20); - builder.indexedShapePath(indexedShapePath); - } - if (randomBoolean()) { - indexedShapeRouting = randomAlphaOfLengthBetween(3, 20); - builder.indexedShapeRouting(indexedShapeRouting); - } - } - if (randomBoolean()) { - SpatialStrategy strategy = randomFrom(SpatialStrategy.values()); - // ShapeType.MULTILINESTRING + SpatialStrategy.TERM can lead to large queries and will slow down tests, so - // we try to avoid that combination - while (shapeType == ShapeType.MULTILINESTRING && strategy == SpatialStrategy.TERM) { - strategy = randomFrom(SpatialStrategy.values()); - } - builder.strategy(strategy); - if (strategy != SpatialStrategy.TERM) { - builder.relation(randomFrom(ShapeRelation.values())); - } - } - - if (randomBoolean()) { - builder.ignoreUnmapped(randomBoolean()); - } - return builder; - } - - public void testInvalidRelation() throws IOException { - ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null); - GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder.strategy(SpatialStrategy.TERM); - expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); - GeoShapeQueryBuilder builder2 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder2.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)); - expectThrows(IllegalArgumentException.class, () -> builder2.strategy(SpatialStrategy.TERM)); - GeoShapeQueryBuilder builder3 = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape); - builder3.strategy(SpatialStrategy.TERM); - expectThrows(IllegalArgumentException.class, () -> builder3.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN))); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 184ee2759c1..1067ed62db4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -62,7 +62,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; public class MatchQueryBuilderTests extends AbstractQueryTestCase { - @Override protected MatchQueryBuilder doCreateTestQueryBuilder() { String fieldName = randomFrom(STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, BOOLEAN_FIELD_NAME, INT_FIELD_NAME, diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 1c34057457a..70f504516ec 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1048,12 +1048,6 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase 0); - } - - /** tests querying a random geometry collection with a point */ - public void testPointQuery() throws Exception { - // Create a random geometry collection to index. - GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); - double[] pt = new double[] {GeoTestUtil.nextLongitude(), GeoTestUtil.nextLatitude()}; - PointBuilder pb = new PointBuilder(pt[0], pt[1]); - gcb.shape(pb); - if (randomBoolean()) { - client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape") - .execute().actionGet(); - } else { - client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") - .execute().actionGet(); - } - XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("location"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); - - GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery("location", pb); - geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(geoShapeQueryBuilder).get(); + GeoShapeQueryBuilder filter = QueryBuilders.geoShapeQuery("location", filterShape); + filter.relation(ShapeRelation.INTERSECTS); + SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(QueryBuilders.matchAllQuery()) + .setPostFilter(filter).get(); assertSearchResponse(result); assertHitCount(result, 1); } @@ -461,28 +375,6 @@ public class GeoShapeQueryTests extends ESSingleNodeTestCase { assertThat(response.getHits().getTotalHits().value, greaterThan(0L)); } - public void testExistsQuery() throws Exception { - // Create a random geometry collection. - GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); - logger.info("Created Random GeometryCollection containing {} shapes", gcb.numShapes()); - - if (randomBoolean()) { - client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape") - .execute().actionGet(); - } else { - client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") - .execute().actionGet(); - } - - XContentBuilder docSource = gcb.toXContent(jsonBuilder().startObject().field("location"), null).endObject(); - client().prepareIndex("test", "type", "1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); - - ExistsQueryBuilder eqb = QueryBuilders.existsQuery("location"); - SearchResponse result = client().prepareSearch("test").setTypes("type").setQuery(eqb).get(); - assertSearchResponse(result); - assertHitCount(result, 1); - } - public void testShapeFilterWithDefinedGeoCollection() throws Exception { createIndex("shapes"); client().admin().indices().prepareCreate("test").addMapping("type", "location", "type=geo_shape,tree=quadtree") diff --git a/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java deleted file mode 100644 index 574bdd46bba..00000000000 --- a/server/src/test/java/org/elasticsearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.geo; - -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.LegacyGeoShapeFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.test.ESIntegTestCase; - -import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class LegacyGeoShapeIntegrationIT extends ESIntegTestCase { - - /** - * Test that orientation parameter correctly persists across cluster restart - */ - public void testOrientationPersistence() throws Exception { - String idxName = "orientation"; - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "left") - .endObject().endObject() - .endObject().endObject()); - - // create index - assertAcked(prepareCreate(idxName).addMapping("shape", mapping, XContentType.JSON)); - - mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("shape") - .startObject("properties").startObject("location") - .field("type", "geo_shape") - .field("tree", "quadtree") - .field("orientation", "right") - .endObject().endObject() - .endObject().endObject()); - - assertAcked(prepareCreate(idxName+"2").addMapping("shape", mapping, XContentType.JSON)); - ensureGreen(idxName, idxName+"2"); - - internalCluster().fullRestart(); - ensureGreen(idxName, idxName+"2"); - - // left orientation test - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName)); - IndexService indexService = indicesService.indexService(resolveIndex(idxName)); - MappedFieldType fieldType = indexService.mapperService().fullName("location"); - assertThat(fieldType, instanceOf(LegacyGeoShapeFieldMapper.GeoShapeFieldType.class)); - - LegacyGeoShapeFieldMapper.GeoShapeFieldType gsfm = (LegacyGeoShapeFieldMapper.GeoShapeFieldType)fieldType; - ShapeBuilder.Orientation orientation = gsfm.orientation(); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.CLOCKWISE)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.LEFT)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.CW)); - - // right orientation test - indicesService = internalCluster().getInstance(IndicesService.class, findNodeName(idxName+"2")); - indexService = indicesService.indexService(resolveIndex((idxName+"2"))); - fieldType = indexService.mapperService().fullName("location"); - assertThat(fieldType, instanceOf(LegacyGeoShapeFieldMapper.GeoShapeFieldType.class)); - - gsfm = (LegacyGeoShapeFieldMapper.GeoShapeFieldType)fieldType; - orientation = gsfm.orientation(); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.COUNTER_CLOCKWISE)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.RIGHT)); - assertThat(orientation, equalTo(ShapeBuilder.Orientation.CCW)); - } - - /** - * Test that ignore_malformed on GeoShapeFieldMapper does not fail the entire document - */ - public void testIgnoreMalformed() throws Exception { - // create index - assertAcked(client().admin().indices().prepareCreate("test") - .addMapping("geometry", "shape", "type=geo_shape,tree=quadtree,ignore_malformed=true").get()); - ensureGreen(); - - // test self crossing ccw poly not crossing dateline - String polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") - .startArray("coordinates") - .startArray() - .startArray().value(176.0).value(15.0).endArray() - .startArray().value(-177.0).value(10.0).endArray() - .startArray().value(-177.0).value(-10.0).endArray() - .startArray().value(176.0).value(-15.0).endArray() - .startArray().value(-177.0).value(15.0).endArray() - .startArray().value(172.0).value(0.0).endArray() - .startArray().value(176.0).value(15.0).endArray() - .endArray() - .endArray() - .endObject()); - - indexRandom(true, client().prepareIndex("test", "geometry", "0").setSource("shape", - polygonGeoJson)); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - /** - * Test that the indexed shape routing can be provided if it is required - */ - public void testIndexShapeRouting() throws Exception { - String mapping = "{\n" + - " \"_routing\": {\n" + - " \"required\": true\n" + - " },\n" + - " \"properties\": {\n" + - " \"shape\": {\n" + - " \"type\": \"geo_shape\",\n" + - " \"tree\" : \"quadtree\"\n" + - " }\n" + - " }\n" + - " }"; - - - // create index - assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", mapping, XContentType.JSON).get()); - ensureGreen(); - - String source = "{\n" + - " \"shape\" : {\n" + - " \"type\" : \"bbox\",\n" + - " \"coordinates\" : [[-45.0, 45.0], [45.0, -45.0]]\n" + - " }\n" + - "}"; - - indexRandom(true, client().prepareIndex("test", "doc", "0").setSource(source, XContentType.JSON).setRouting("ABC")); - - SearchResponse searchResponse = client().prepareSearch("test").setQuery( - geoShapeQuery("shape", "0", "doc").indexedShapeIndex("test").indexedShapeRouting("ABC") - ).get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - } - - private String findNodeName(String index) { - ClusterState state = client().admin().cluster().prepareState().get().getState(); - IndexShardRoutingTable shard = state.getRoutingTable().index(index).shard(0); - String nodeId = shard.assignedShards().get(0).currentNodeId(); - return state.getNodes().get(nodeId).getName(); - } -} diff --git a/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 0d964e8eb6f..76d18a59f9f 100644 --- a/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/server/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.geo.builders.MultiPointBuilder; import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.search.geo.GeoShapeQueryTests; import org.junit.Assert; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; import org.locationtech.spatial4j.distance.DistanceUtils; @@ -154,7 +153,6 @@ public class RandomShapeGenerator extends RandomGeoGenerator { /** * Creates a random shape useful for randomized testing, NOTE: exercise caution when using this to build random GeometryCollections * as creating a large random number of random shapes can result in massive resource consumption - * see: {@link GeoShapeQueryTests#testQueryRandomGeoCollection()} * * The following options are included * @param nearPoint Create a shape near a provided point diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index daf29e46b05..5eef0a249b6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -113,7 +113,6 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; protected static final String GEO_POINT_ALIAS_FIELD_NAME = "mapped_geo_point_alias"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; - protected static final String LEGACY_GEO_SHAPE_FIELD_NAME = "mapped_legacy_geo_shape"; protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, STRING_ALIAS_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_POINT_ALIAS_FIELD_NAME, @@ -218,28 +217,12 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { AbstractBuilderTestCase.this, false); return null; }); - if (enableWarningsCheck() == true) { - assertDeprecatedGeoWarnings(); - } } serviceHolder.clientInvocationHandler.delegate = this; serviceHolderWithNoType.clientInvocationHandler.delegate = this; } - protected void assertDeprecatedGeoWarnings() { - String prefix = "Field parameter ["; - String postfix = "] is deprecated and will be removed in a future version."; - String[] deprecationWarnings = new String[] { - prefix + "tree" + postfix, - prefix + "tree_levels" + postfix, - prefix + "precision" + postfix, - prefix + "strategy" + postfix, - prefix + "distance_error_pct" + postfix - }; - assertWarnings(deprecationWarnings); - } - protected static SearchContext getSearchContext(QueryShardContext context) { TestSearchContext testSearchContext = new TestSearchContext(context) { @Override @@ -413,8 +396,7 @@ public abstract class AbstractBuilderTestCase extends ESTestCase { OBJECT_FIELD_NAME, "type=object", GEO_POINT_FIELD_NAME, "type=geo_point", GEO_POINT_ALIAS_FIELD_NAME, "type=alias,path=" + GEO_POINT_FIELD_NAME, - GEO_SHAPE_FIELD_NAME, "type=geo_shape", - LEGACY_GEO_SHAPE_FIELD_NAME, "type=geo_shape,tree=quadtree" + GEO_SHAPE_FIELD_NAME, "type=geo_shape" ))), MapperService.MergeReason.MAPPING_UPDATE); // also add mappings for two inner field in the object field mapperService.merge("_doc", new CompressedXContent("{\"properties\":{\"" + OBJECT_FIELD_NAME + "\":{\"type\":\"object\"," From 7b9ca62174216097493e83d1f127c1098c4f6737 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 18 Dec 2018 10:05:50 +0200 Subject: [PATCH 25/26] Enhance Invalidate Token API (#35388) This change: - Adds functionality to invalidate all (refresh+access) tokens for all users of a realm - Adds functionality to invalidate all (refresh+access)tokens for a user in all realms - Adds functionality to invalidate all (refresh+access) tokens for a user in a specific realm - Changes the response format for the invalidate token API to contain information about the number of the invalidated tokens and possible errors that were encountered. - Updates the API Documentation After back-porting to 6.x, the `created` field will be removed from master as a field in the response Resolves: #35115 Relates: #34556 --- .../SecurityDocumentationIT.java | 1 + .../security/invalidate-token.asciidoc | 2 +- .../security/invalidate-tokens.asciidoc | 90 ++- .../action/token/InvalidateTokenAction.java | 2 +- .../action/token/InvalidateTokenRequest.java | 133 ++++- .../token/InvalidateTokenRequestBuilder.java | 16 + .../action/token/InvalidateTokenResponse.java | 66 ++- .../support/TokensInvalidationResult.java | 113 ++++ .../core/security/client/SecurityClient.java | 4 + .../action/token/CreateTokenRequestTests.java | 1 - .../token/InvalidateTokenRequestTests.java | 82 +++ .../token/InvalidateTokenResponseTests.java | 141 +++++ .../TransportSamlInvalidateSessionAction.java | 46 +- .../saml/TransportSamlLogoutAction.java | 3 +- .../token/TransportInvalidateTokenAction.java | 11 +- .../xpack/security/authc/TokenService.java | 516 ++++++++++++------ .../oauth2/RestInvalidateTokenAction.java | 66 +-- ...sportSamlInvalidateSessionActionTests.java | 72 ++- .../saml/TransportSamlLogoutActionTests.java | 49 +- .../security/authc/TokenAuthIntegTests.java | 96 +++- .../security/authc/TokenServiceTests.java | 3 +- .../TokensInvalidationResultTests.java | 74 +++ .../RestInvalidateTokenActionTests.java | 61 +++ .../rest-api-spec/test/token/10_basic.yml | 90 ++- 24 files changed, 1429 insertions(+), 309 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index 8bd285cd31f..6cd56774086 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -1317,6 +1317,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase { } } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/pull/36362") public void testInvalidateToken() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/security/invalidate-token.asciidoc b/docs/java-rest/high-level/security/invalidate-token.asciidoc index ecb3fedb56f..65e0f15bd86 100644 --- a/docs/java-rest/high-level/security/invalidate-token.asciidoc +++ b/docs/java-rest/high-level/security/invalidate-token.asciidoc @@ -36,4 +36,4 @@ The returned +{response}+ contains a single property: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests-file}[{api}-response] --------------------------------------------------- +-------------------------------------------------- \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc b/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc index 540f5866825..18c88f7addd 100644 --- a/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc +++ b/x-pack/docs/en/rest-api/security/invalidate-tokens.asciidoc @@ -2,7 +2,7 @@ [[security-api-invalidate-token]] === Invalidate token API -Invalidates an access token or a refresh token. +Invalidates one or more access tokens or refresh tokens. ==== Request @@ -19,21 +19,31 @@ can no longer be used. That time period is defined by the The refresh tokens returned by the <> are only valid for 24 hours. They can also be used exactly once. -If you want to invalidate an access or refresh token immediately, use this invalidate token API. +If you want to invalidate one or more access or refresh tokens immediately, use this invalidate token API. ==== Request Body The following parameters can be specified in the body of a DELETE request and -pertain to invalidating a token: +pertain to invalidating tokens: `token` (optional):: -(string) An access token. This parameter cannot be used when `refresh_token` is used. +(string) An access token. This parameter cannot be used any of `refresh_token`, `realm_name` or + `username` are used. `refresh_token` (optional):: -(string) A refresh token. This parameter cannot be used when `token` is used. +(string) A refresh token. This parameter cannot be used any of `refresh_token`, `realm_name` or + `username` are used. -NOTE: One of `token` or `refresh_token` parameters is required. +`realm_name` (optional):: +(string) The name of an authentication realm. This parameter cannot be used with either `refresh_token` or `token`. + +`username` (optional):: +(string) The username of a user. This parameter cannot be used with either `refresh_token` or `token` + +NOTE: While all parameters are optional, at least one of them is required. More specifically, either one of `token` +or `refresh_token` parameters is required. If none of these two are specified, then `realm_name` and/or `username` +need to be specified. ==== Examples @@ -59,15 +69,75 @@ DELETE /_security/oauth2/token -------------------------------------------------- // NOTCONSOLE -A successful call returns a JSON structure that indicates whether the token -has already been invalidated. +The following example invalidates all access tokens and refresh tokens for the `saml1` realm immediately: [source,js] -------------------------------------------------- +DELETE /_xpack/security/oauth2/token { - "created" : true <1> + "realm_name" : "saml1" } -------------------------------------------------- // NOTCONSOLE -<1> When a token has already been invalidated, `created` is set to false. +The following example invalidates all access tokens and refresh tokens for the user `myuser` in all realms immediately: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/oauth2/token +{ + "username" : "myuser" +} +-------------------------------------------------- +// NOTCONSOLE + +Finally, the following example invalidates all access tokens and refresh tokens for the user `myuser` in + the `saml1` realm immediately: + +[source,js] +-------------------------------------------------- +DELETE /_xpack/security/oauth2/token +{ + "username" : "myuser", + "realm_name" : "saml1" +} +-------------------------------------------------- +// NOTCONSOLE + +A successful call returns a JSON structure that contains the number of tokens that were invalidated, the number +of tokens that had already been invalidated, and potentially a list of errors encountered while invalidating +specific tokens. + +[source,js] +-------------------------------------------------- +{ + "invalidated_tokens":9, <1> + "previously_invalidated_tokens":15, <2> + "error_count":2, <3> + "error_details":[ <4> + { + "type":"exception", + "reason":"Elasticsearch exception [type=exception, reason=foo]", + "caused_by":{ + "type":"exception", + "reason":"Elasticsearch exception [type=illegal_argument_exception, reason=bar]" + } + }, + { + "type":"exception", + "reason":"Elasticsearch exception [type=exception, reason=boo]", + "caused_by":{ + "type":"exception", + "reason":"Elasticsearch exception [type=illegal_argument_exception, reason=far]" + } + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> The number of the tokens that were invalidated as part of this request. +<2> The number of tokens that were already invalidated. +<3> The number of errors that were encountered when invalidating the tokens. +<4> Details about these errors. This field is not present in the response when + `error_count` is 0. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java index 679ee0756f6..57bd5bd35dd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.action.Action; /** - * Action for invalidating a given token + * Action for invalidating one or more tokens */ public final class InvalidateTokenAction extends Action { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java index 7a8372fe456..de3b73ec4af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -22,31 +23,81 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; public final class InvalidateTokenRequest extends ActionRequest { public enum Type { - ACCESS_TOKEN, - REFRESH_TOKEN + ACCESS_TOKEN("token"), + REFRESH_TOKEN("refresh_token"); + + private final String value; + + Type(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public static Type fromString(String tokenType) { + if (tokenType != null) { + for (Type type : values()) { + if (type.getValue().equals(tokenType)) { + return type; + } + } + } + return null; + } } private String tokenString; private Type tokenType; + private String realmName; + private String userName; public InvalidateTokenRequest() {} /** - * @param tokenString the string representation of the token + * @param tokenString the string representation of the token to be invalidated + * @param tokenType the type of the token to be invalidated + * @param realmName the name of the realm for which all tokens will be invalidated + * @param userName the principal of the user for which all tokens will be invalidated */ - public InvalidateTokenRequest(String tokenString, Type type) { + public InvalidateTokenRequest(@Nullable String tokenString, @Nullable String tokenType, + @Nullable String realmName, @Nullable String userName) { this.tokenString = tokenString; - this.tokenType = type; + this.tokenType = Type.fromString(tokenType); + this.realmName = realmName; + this.userName = userName; + } + + /** + * @param tokenString the string representation of the token to be invalidated + * @param tokenType the type of the token to be invalidated + */ + public InvalidateTokenRequest(String tokenString, String tokenType) { + this.tokenString = tokenString; + this.tokenType = Type.fromString(tokenType); + this.realmName = null; + this.userName = null; } @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - if (Strings.isNullOrEmpty(tokenString)) { - validationException = addValidationError("token string must be provided", null); - } - if (tokenType == null) { - validationException = addValidationError("token type must be provided", validationException); + if (Strings.hasText(realmName) || Strings.hasText(userName)) { + if (Strings.hasText(tokenString)) { + validationException = + addValidationError("token string must not be provided when realm name or username is specified", null); + } + if (tokenType != null) { + validationException = + addValidationError("token type must not be provided when realm name or username is specified", validationException); + } + } else if (Strings.isNullOrEmpty(tokenString)) { + validationException = + addValidationError("token string must be provided when not specifying a realm name or a username", null); + } else if (tokenType == null) { + validationException = + addValidationError("token type must be provided when a token string is specified", null); } return validationException; } @@ -67,26 +118,76 @@ public final class InvalidateTokenRequest extends ActionRequest { this.tokenType = tokenType; } + public String getRealmName() { + return realmName; + } + + public void setRealmName(String realmName) { + this.realmName = realmName; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeString(tokenString); + if (out.getVersion().before(Version.V_7_0_0)) { + if (Strings.isNullOrEmpty(tokenString)) { + throw new IllegalArgumentException("token is required for versions < v6.6.0"); + } + out.writeString(tokenString); + } else { + out.writeOptionalString(tokenString); + } if (out.getVersion().onOrAfter(Version.V_6_2_0)) { - out.writeVInt(tokenType.ordinal()); + if (out.getVersion().before(Version.V_7_0_0)) { + if (tokenType == null) { + throw new IllegalArgumentException("token type is not optional for versions > v6.2.0 and < v6.6.0"); + } + out.writeVInt(tokenType.ordinal()); + } else { + out.writeOptionalVInt(tokenType == null ? null : tokenType.ordinal()); + } } else if (tokenType == Type.REFRESH_TOKEN) { - throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + - "]"); + throw new IllegalArgumentException("refresh token invalidation cannot be serialized with version [" + out.getVersion() + "]"); + } + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeOptionalString(realmName); + out.writeOptionalString(userName); + } else if (realmName != null || userName != null) { + throw new IllegalArgumentException( + "realm or user token invalidation cannot be serialized with version [" + out.getVersion() + "]"); } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - tokenString = in.readString(); + if (in.getVersion().before(Version.V_7_0_0)) { + tokenString = in.readString(); + } else { + tokenString = in.readOptionalString(); + } if (in.getVersion().onOrAfter(Version.V_6_2_0)) { - tokenType = Type.values()[in.readVInt()]; + if (in.getVersion().before(Version.V_7_0_0)) { + int type = in.readVInt(); + tokenType = Type.values()[type]; + } else { + Integer type = in.readOptionalVInt(); + tokenType = type == null ? null : Type.values()[type]; + } } else { tokenType = Type.ACCESS_TOKEN; } + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + realmName = in.readOptionalString(); + userName = in.readOptionalString(); + } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java index f77f6c65332..0b454905cfa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestBuilder.java @@ -34,4 +34,20 @@ public final class InvalidateTokenRequestBuilder request.setTokenType(type); return this; } + + /** + * Sets the name of the realm for which all tokens should be invalidated + */ + public InvalidateTokenRequestBuilder setRealmName(String realmName) { + request.setRealmName(realmName); + return this; + } + + /** + * Sets the username for which all tokens should be invalidated + */ + public InvalidateTokenRequestBuilder setUserName(String username) { + request.setUserName(username); + return this; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java index cebb005b272..886caeac370 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponse.java @@ -5,41 +5,83 @@ */ package org.elasticsearch.xpack.core.security.action.token; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Objects; /** - * Response for a invalidation of a token. + * Response for a invalidation of one or multiple tokens. */ -public final class InvalidateTokenResponse extends ActionResponse { +public final class InvalidateTokenResponse extends ActionResponse implements ToXContent { - private boolean created; + private TokensInvalidationResult result; public InvalidateTokenResponse() {} - public InvalidateTokenResponse(boolean created) { - this.created = created; + public InvalidateTokenResponse(TokensInvalidationResult result) { + this.result = result; } - /** - * If the token is already invalidated then created will be false - */ - public boolean isCreated() { - return created; + public TokensInvalidationResult getResult() { + return result; + } + + private boolean isCreated() { + return result.getInvalidatedTokens().size() > 0 + && result.getPreviouslyInvalidatedTokens().isEmpty() + && result.getErrors().isEmpty(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeBoolean(created); + if (out.getVersion().before(Version.V_7_0_0)) { + out.writeBoolean(isCreated()); + } else { + result.writeTo(out); + } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - created = in.readBoolean(); + if (in.getVersion().before(Version.V_7_0_0)) { + final boolean created = in.readBoolean(); + if (created) { + result = new TokensInvalidationResult(Arrays.asList(""), Collections.emptyList(), Collections.emptyList(), 0); + } else { + result = new TokensInvalidationResult(Collections.emptyList(), Arrays.asList(""), Collections.emptyList(), 0); + } + } else { + result = new TokensInvalidationResult(in); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + result.toXContent(builder, params); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + InvalidateTokenResponse that = (InvalidateTokenResponse) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java new file mode 100644 index 00000000000..cfa83b63ed5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/TokensInvalidationResult.java @@ -0,0 +1,113 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.core.security.authc.support; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * The result of attempting to invalidate one or multiple tokens. The result contains information about: + *

    + *
  • how many of the tokens were actually invalidated
  • + *
  • how many tokens are not invalidated in this request because they were already invalidated
  • + *
  • how many errors were encountered while invalidating tokens and the error details
  • + *
+ */ +public class TokensInvalidationResult implements ToXContentObject, Writeable { + + private final List invalidatedTokens; + private final List previouslyInvalidatedTokens; + private final List errors; + private final int attemptCount; + + public TokensInvalidationResult(List invalidatedTokens, List previouslyInvalidatedTokens, + @Nullable List errors, int attemptCount) { + Objects.requireNonNull(invalidatedTokens, "invalidated_tokens must be provided"); + this.invalidatedTokens = invalidatedTokens; + Objects.requireNonNull(previouslyInvalidatedTokens, "previously_invalidated_tokens must be provided"); + this.previouslyInvalidatedTokens = previouslyInvalidatedTokens; + if (null != errors) { + this.errors = errors; + } else { + this.errors = Collections.emptyList(); + } + this.attemptCount = attemptCount; + } + + public TokensInvalidationResult(StreamInput in) throws IOException { + this.invalidatedTokens = in.readList(StreamInput::readString); + this.previouslyInvalidatedTokens = in.readList(StreamInput::readString); + this.errors = in.readList(StreamInput::readException); + this.attemptCount = in.readVInt(); + } + + public static TokensInvalidationResult emptyResult() { + return new TokensInvalidationResult(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), 0); + } + + + public List getInvalidatedTokens() { + return invalidatedTokens; + } + + public List getPreviouslyInvalidatedTokens() { + return previouslyInvalidatedTokens; + } + + public List getErrors() { + return errors; + } + + public int getAttemptCount() { + return attemptCount; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + //Remove created after PR is backported to 6.x + .field("created", isCreated()) + .field("invalidated_tokens", invalidatedTokens.size()) + .field("previously_invalidated_tokens", previouslyInvalidatedTokens.size()) + .field("error_count", errors.size()); + if (errors.isEmpty() == false) { + builder.field("error_details"); + builder.startArray(); + for (ElasticsearchException e : errors) { + builder.startObject(); + ElasticsearchException.generateThrowableXContent(builder, params, e); + builder.endObject(); + } + builder.endArray(); + } + return builder.endObject(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringList(invalidatedTokens); + out.writeStringList(previouslyInvalidatedTokens); + out.writeCollection(errors, StreamOutput::writeException); + out.writeVInt(attemptCount); + } + + private boolean isCreated() { + return this.getInvalidatedTokens().size() > 0 + && this.getPreviouslyInvalidatedTokens().isEmpty() + && this.getErrors().isEmpty(); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java index ef59f870c68..a7faf4d2231 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/client/SecurityClient.java @@ -326,6 +326,10 @@ public class SecurityClient { return new InvalidateTokenRequestBuilder(client).setTokenString(token); } + public InvalidateTokenRequestBuilder prepareInvalidateToken() { + return new InvalidateTokenRequestBuilder(client); + } + public void invalidateToken(InvalidateTokenRequest request, ActionListener listener) { client.execute(InvalidateTokenAction.INSTANCE, request, listener); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java index bd23198e8ea..2d8782f0111 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.core.security.action.token; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasItem; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java new file mode 100644 index 00000000000..3fd7eb7da46 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenRequestTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.token; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; + +public class InvalidateTokenRequestTests extends ESTestCase { + + public void testValidation() { + InvalidateTokenRequest request = new InvalidateTokenRequest(); + ActionRequestValidationException ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), containsString("token string must be provided when not specifying a realm")); + + request = new InvalidateTokenRequest(randomAlphaOfLength(12), randomFrom("", null)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), containsString("token type must be provided when a token string is specified")); + + request = new InvalidateTokenRequest(randomFrom("", null), "access_token"); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), containsString("token string must be provided when not specifying a realm")); + + request = new InvalidateTokenRequest(randomFrom("", null), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNull(ve); + + request = + new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + + request = new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("token", "refresh_token"), + randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(2, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + assertThat(ve.validationErrors().get(1), + containsString("token type must not be provided when realm name or username is specified")); + + request = + new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomAlphaOfLength(4), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + + request = + new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("token", "refresh_token"), randomFrom("", null), + randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(2, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + assertThat(ve.validationErrors().get(1), + containsString("token type must not be provided when realm name or username is specified")); + + request = new InvalidateTokenRequest(randomAlphaOfLength(4), randomFrom("", null), randomFrom("", null), randomAlphaOfLength(8)); + ve = request.validate(); + assertNotNull(ve); + assertEquals(1, ve.validationErrors().size()); + assertThat(ve.validationErrors().get(0), + containsString("token string must not be provided when realm name or username is specified")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java new file mode 100644 index 00000000000..1a59971ff9c --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenResponseTests.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.security.action.token; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class InvalidateTokenResponseTests extends ESTestCase { + + public void testSerialization() throws IOException { + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false)), + Arrays.asList(generateRandomStringArray(20, 15, false)), + Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), + new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), + randomIntBetween(0, 5)); + InvalidateTokenResponse response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + InvalidateTokenResponse serialized = new InvalidateTokenResponse(); + serialized.readFrom(input); + assertThat(serialized.getResult().getInvalidatedTokens(), equalTo(response.getResult().getInvalidatedTokens())); + assertThat(serialized.getResult().getPreviouslyInvalidatedTokens(), + equalTo(response.getResult().getPreviouslyInvalidatedTokens())); + assertThat(serialized.getResult().getErrors().size(), equalTo(response.getResult().getErrors().size())); + assertThat(serialized.getResult().getErrors().get(0).toString(), containsString("this is an error message")); + assertThat(serialized.getResult().getErrors().get(1).toString(), containsString("this is an error message2")); + } + } + + result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false)), + Arrays.asList(generateRandomStringArray(20, 15, false)), + Collections.emptyList(), randomIntBetween(0, 5)); + response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + InvalidateTokenResponse serialized = new InvalidateTokenResponse(); + serialized.readFrom(input); + assertThat(serialized.getResult().getInvalidatedTokens(), equalTo(response.getResult().getInvalidatedTokens())); + assertThat(serialized.getResult().getPreviouslyInvalidatedTokens(), + equalTo(response.getResult().getPreviouslyInvalidatedTokens())); + assertThat(serialized.getResult().getErrors().size(), equalTo(response.getResult().getErrors().size())); + } + } + } + + public void testSerializationToPre66Version() throws IOException{ + final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_2_0, Version.V_6_5_1); + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), + new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), + randomIntBetween(0, 5)); + InvalidateTokenResponse response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(version); + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + // False as we have errors and previously invalidated tokens + assertThat(input.readBoolean(), equalTo(false)); + } + } + + result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Collections.emptyList(), randomIntBetween(0, 5)); + response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(version); + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + // False as we have previously invalidated tokens + assertThat(input.readBoolean(), equalTo(false)); + } + } + + result = new TokensInvalidationResult(Arrays.asList(generateRandomStringArray(20, 15, false, false)), + Collections.emptyList(), Collections.emptyList(), randomIntBetween(0, 5)); + response = new InvalidateTokenResponse(result); + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.setVersion(version); + response.writeTo(output); + try (StreamInput input = output.bytes().streamInput()) { + assertThat(input.readBoolean(), equalTo(true)); + } + } + } + + public void testToXContent() throws IOException { + List invalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false)); + List previouslyInvalidatedTokens = Arrays.asList(generateRandomStringArray(20, 15, false)); + TokensInvalidationResult result = new TokensInvalidationResult(invalidatedTokens, previouslyInvalidatedTokens, + Arrays.asList(new ElasticsearchException("foo", new IllegalArgumentException("this is an error message")), + new ElasticsearchException("bar", new IllegalArgumentException("this is an error message2"))), + randomIntBetween(0, 5)); + InvalidateTokenResponse response = new InvalidateTokenResponse(result); + XContentBuilder builder = XContentFactory.jsonBuilder(); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(Strings.toString(builder), + equalTo("{\"created\":false," + + "\"invalidated_tokens\":" + invalidatedTokens.size() + "," + + "\"previously_invalidated_tokens\":" + previouslyInvalidatedTokens.size() + "," + + "\"error_count\":2," + + "\"error_details\":[" + + "{\"type\":\"exception\"," + + "\"reason\":\"foo\"," + + "\"caused_by\":{" + + "\"type\":\"illegal_argument_exception\"," + + "\"reason\":\"this is an error message\"}" + + "}," + + "{\"type\":\"exception\"," + + "\"reason\":\"bar\"," + + "\"caused_by\":" + + "{\"type\":\"illegal_argument_exception\"," + + "\"reason\":\"this is an error message2\"}" + + "}" + + "]" + + "}")); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index f0e6bf2c990..8c35df01ed9 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionResponse; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; import org.elasticsearch.xpack.security.authc.UserToken; @@ -27,12 +28,11 @@ import org.elasticsearch.xpack.security.authc.saml.SamlRedirect; import org.elasticsearch.xpack.security.authc.saml.SamlUtils; import org.opensaml.saml.saml2.core.LogoutResponse; -import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.stream.Collectors; +import java.util.function.Predicate; import static org.elasticsearch.xpack.security.authc.saml.SamlRealm.findSamlRealms; @@ -85,7 +85,7 @@ public final class TransportSamlInvalidateSessionAction private void findAndInvalidateTokens(SamlRealm realm, SamlLogoutRequestHandler.Result result, ActionListener listener) { final Map tokenMetadata = realm.createTokenMetadata(result.getNameId(), result.getSession()); - if (Strings.hasText((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE)) == false) { + if (Strings.isNullOrEmpty((String) tokenMetadata.get(SamlRealm.TOKEN_METADATA_NAMEID_VALUE))) { // If we don't have a valid name-id to match against, don't do anything logger.debug("Logout request [{}] has no NameID value, so cannot invalidate any sessions", result); listener.onResponse(0); @@ -93,22 +93,21 @@ public final class TransportSamlInvalidateSessionAction } tokenService.findActiveTokensForRealm(realm.name(), ActionListener.wrap(tokens -> { - List> sessionTokens = filterTokens(tokens, tokenMetadata); - logger.debug("Found [{}] token pairs to invalidate for SAML metadata [{}]", sessionTokens.size(), tokenMetadata); - if (sessionTokens.isEmpty()) { - listener.onResponse(0); - } else { - GroupedActionListener groupedListener = new GroupedActionListener<>( - ActionListener.wrap(collection -> listener.onResponse(collection.size()), listener::onFailure), - sessionTokens.size(), Collections.emptyList() - ); - sessionTokens.forEach(tuple -> invalidateTokenPair(tuple, groupedListener)); - } - }, e -> listener.onFailure(e) - )); + logger.debug("Found [{}] token pairs to invalidate for SAML metadata [{}]", tokens.size(), tokenMetadata); + if (tokens.isEmpty()) { + listener.onResponse(0); + } else { + GroupedActionListener groupedListener = new GroupedActionListener<>( + ActionListener.wrap(collection -> listener.onResponse(collection.size()), listener::onFailure), + tokens.size(), Collections.emptyList() + ); + tokens.forEach(tuple -> invalidateTokenPair(tuple, groupedListener)); + } + }, listener::onFailure + ), containsMetadata(tokenMetadata)); } - private void invalidateTokenPair(Tuple tokenPair, ActionListener listener) { + private void invalidateTokenPair(Tuple tokenPair, ActionListener listener) { // Invalidate the refresh token first, so the client doesn't trigger a refresh once the access token is invalidated tokenService.invalidateRefreshToken(tokenPair.v2(), ActionListener.wrap(ignore -> tokenService.invalidateAccessToken( tokenPair.v1(), @@ -118,13 +117,12 @@ public final class TransportSamlInvalidateSessionAction })), listener::onFailure)); } - private List> filterTokens(Collection> tokens, Map requiredMetadata) { - return tokens.stream() - .filter(tup -> { - Map actualMetadata = tup.v1().getMetadata(); - return requiredMetadata.entrySet().stream().allMatch(e -> Objects.equals(actualMetadata.get(e.getKey()), e.getValue())); - }) - .collect(Collectors.toList()); + + private Predicate> containsMetadata(Map requiredMetadata) { + return source -> { + Map actualMetadata = (Map) source.get("metadata"); + return requiredMetadata.entrySet().stream().allMatch(e -> Objects.equals(actualMetadata.get(e.getKey()), e.getValue())); + }; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index b62702ead78..28e9f911cd5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutResponse; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Realm; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.authc.Realms; import org.elasticsearch.xpack.security.authc.TokenService; @@ -79,7 +80,7 @@ public final class TransportSamlLogoutAction }, listener::onFailure)); } - private void invalidateRefreshToken(String refreshToken, ActionListener listener) { + private void invalidateRefreshToken(String refreshToken, ActionListener listener) { if (refreshToken == null) { listener.onResponse(null); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 70f614435fc..9f0443a86f7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -8,12 +8,14 @@ package org.elasticsearch.xpack.security.action.token; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenResponse; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.authc.TokenService; /** @@ -31,9 +33,12 @@ public final class TransportInvalidateTokenAction extends HandledTransportAction @Override protected void doExecute(Task task, InvalidateTokenRequest request, ActionListener listener) { - final ActionListener invalidateListener = - ActionListener.wrap(created -> listener.onResponse(new InvalidateTokenResponse(created)), listener::onFailure); - if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { + final ActionListener invalidateListener = + ActionListener.wrap(tokensInvalidationResult -> + listener.onResponse(new InvalidateTokenResponse(tokensInvalidationResult)), listener::onFailure); + if (Strings.hasText(request.getUserName()) || Strings.hasText(request.getRealmName())) { + tokenService.invalidateActiveTokensForRealmAndUser(request.getRealmName(), request.getUserName(), invalidateListener); + } else if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { tokenService.invalidateAccessToken(request.getTokenString(), invalidateListener); } else { assert request.getTokenType() == InvalidateTokenRequest.Type.REFRESH_TOKEN; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index be5b11aa666..15d3e758426 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -17,6 +17,11 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest.OpType; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetItemResponse; @@ -24,7 +29,6 @@ import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -39,6 +43,7 @@ import org.elasticsearch.cluster.ack.AckedRequest; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; @@ -61,7 +66,6 @@ import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -74,6 +78,7 @@ import org.elasticsearch.xpack.core.security.ScrollHelper; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.KeyAndTimestamp; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.security.support.SecurityIndexManager; import javax.crypto.Cipher; @@ -90,6 +95,7 @@ import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; +import java.io.UncheckedIOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; @@ -116,6 +122,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; +import java.util.function.Predicate; +import java.util.stream.Collectors; import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; @@ -221,9 +229,9 @@ public final class TokenService { boolean includeRefreshToken) throws IOException { ensureEnabled(); if (authentication == null) { - listener.onFailure(traceLog("create token", null, new IllegalArgumentException("authentication must be provided"))); + listener.onFailure(traceLog("create token", new IllegalArgumentException("authentication must be provided"))); } else if (originatingClientAuth == null) { - listener.onFailure(traceLog("create token", null, + listener.onFailure(traceLog("create token", new IllegalArgumentException("originating client authentication must be provided"))); } else { final Instant created = clock.instant(); @@ -471,7 +479,7 @@ public final class TokenService { * have been created on versions on or after 6.2; this step involves performing an update to * the token document and setting the invalidated field to true */ - public void invalidateAccessToken(String tokenString, ActionListener listener) { + public void invalidateAccessToken(String tokenString, ActionListener listener) { ensureEnabled(); if (Strings.isNullOrEmpty(tokenString)) { logger.trace("No token-string provided"); @@ -484,7 +492,8 @@ public final class TokenService { listener.onFailure(traceLog("invalidate token", tokenString, malformedTokenException())); } else { final long expirationEpochMilli = getExpirationTime().toEpochMilli(); - indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli); + indexBwcInvalidation(Collections.singleton(userToken.getId()), listener, new AtomicInteger(0), + expirationEpochMilli, null); } }, listener::onFailure)); } catch (IOException e) { @@ -499,7 +508,7 @@ public final class TokenService { * * @see #invalidateAccessToken(String, ActionListener) */ - public void invalidateAccessToken(UserToken userToken, ActionListener listener) { + public void invalidateAccessToken(UserToken userToken, ActionListener listener) { ensureEnabled(); if (userToken == null) { logger.trace("No access token provided"); @@ -507,11 +516,17 @@ public final class TokenService { } else { maybeStartTokenRemover(); final long expirationEpochMilli = getExpirationTime().toEpochMilli(); - indexBwcInvalidation(userToken, listener, new AtomicInteger(0), expirationEpochMilli); + indexBwcInvalidation(Collections.singleton(userToken.getId()), listener, new AtomicInteger(0), expirationEpochMilli, null); } } - public void invalidateRefreshToken(String refreshToken, ActionListener listener) { + /** + * This method performs the steps necessary to invalidate a refresh token so that it may no longer be used. + * + * @param refreshToken The string representation of the refresh token + * @param listener the listener to notify upon completion + */ + public void invalidateRefreshToken(String refreshToken, ActionListener listener) { ensureEnabled(); if (Strings.isNullOrEmpty(refreshToken)) { logger.trace("No refresh token provided"); @@ -520,152 +535,222 @@ public final class TokenService { maybeStartTokenRemover(); findTokenFromRefreshToken(refreshToken, ActionListener.wrap(tuple -> { - final String docId = tuple.v1().getHits().getAt(0).getId(); - final long docVersion = tuple.v1().getHits().getAt(0).getVersion(); - indexInvalidation(docId, Version.CURRENT, listener, tuple.v2(), "refresh_token", docVersion); + final String docId = getTokenIdFromDocumentId(tuple.v1().getHits().getAt(0).getId()); + indexInvalidation(Collections.singletonList(docId), listener, tuple.v2(), "refresh_token", null); }, listener::onFailure), new AtomicInteger(0)); } } /** - * Performs the actual bwc invalidation of a token and then kicks off the new invalidation method + * Invalidate all access tokens and all refresh tokens of a given {@code realmName} and/or of a given + * {@code username} so that they may no longer be used * - * @param userToken the token to invalidate - * @param listener the listener to notify upon completion - * @param attemptCount the number of attempts to invalidate that have already been tried - * @param expirationEpochMilli the expiration time as milliseconds since the epoch + * @param realmName the realm of which the tokens should be invalidated + * @param username the username for which the tokens should be invalidated + * @param listener the listener to notify upon completion */ - private void indexBwcInvalidation(UserToken userToken, ActionListener listener, AtomicInteger attemptCount, - long expirationEpochMilli) { - if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { - logger.warn("Failed to invalidate token [{}] after [{}] attempts", userToken.getId(), attemptCount.get()); - listener.onFailure(invalidGrantException("failed to invalidate token")); + public void invalidateActiveTokensForRealmAndUser(@Nullable String realmName, @Nullable String username, + ActionListener listener) { + ensureEnabled(); + if (Strings.isNullOrEmpty(realmName) && Strings.isNullOrEmpty(username)) { + logger.trace("No realm name or username provided"); + listener.onFailure(new IllegalArgumentException("realm name or username must be provided")); } else { - final String invalidatedTokenId = getInvalidatedTokenDocumentId(userToken); - IndexRequest indexRequest = client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) - .setOpType(OpType.CREATE) - .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) - .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) - .request(); - final String tokenDocId = getTokenDocumentId(userToken); - final Version version = userToken.getVersion(); - securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)), - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, indexRequest, - ActionListener.wrap(indexResponse -> { - ActionListener wrappedListener = - ActionListener.wrap(ignore -> listener.onResponse(true), listener::onFailure); - indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L); - }, e -> { - Throwable cause = ExceptionsHelper.unwrapCause(e); - traceLog("(bwc) invalidate token", tokenDocId, cause); - if (cause instanceof VersionConflictEngineException) { - // expected since something else could have invalidated - ActionListener wrappedListener = - ActionListener.wrap(ignore -> listener.onResponse(false), listener::onFailure); - indexInvalidation(tokenDocId, version, wrappedListener, attemptCount, "access_token", 1L); - } else if (isShardNotAvailableException(e)) { - attemptCount.incrementAndGet(); - indexBwcInvalidation(userToken, listener, attemptCount, expirationEpochMilli); - } else { - listener.onFailure(e); - } - }), client::index)); + if (Strings.isNullOrEmpty(realmName)) { + findActiveTokensForUser(username, ActionListener.wrap(tokenTuples -> { + if (tokenTuples.isEmpty()) { + logger.warn("No tokens to invalidate for realm [{}] and username [{}]", realmName, username); + listener.onResponse(TokensInvalidationResult.emptyResult()); + } else { + invalidateAllTokens(tokenTuples.stream().map(t -> t.v1().getId()).collect(Collectors.toList()), listener); + } + }, listener::onFailure)); + } else { + Predicate filter = null; + if (Strings.hasText(username)) { + filter = isOfUser(username); + } + findActiveTokensForRealm(realmName, ActionListener.wrap(tokenTuples -> { + if (tokenTuples.isEmpty()) { + logger.warn("No tokens to invalidate for realm [{}] and username [{}]", realmName, username); + listener.onResponse(TokensInvalidationResult.emptyResult()); + } else { + invalidateAllTokens(tokenTuples.stream().map(t -> t.v1().getId()).collect(Collectors.toList()), listener); + } + }, listener::onFailure), filter); + } } } /** - * Performs the actual invalidation of a token + * Invalidates a collection of access_token and refresh_token that were retrieved by + * {@link TokenService#invalidateActiveTokensForRealmAndUser} * - * @param tokenDocId the id of the token doc to invalidate + * @param accessTokenIds The ids of the access tokens which should be invalidated (along with the respective refresh_token) + * @param listener the listener to notify upon completion + */ + private void invalidateAllTokens(Collection accessTokenIds, ActionListener listener) { + maybeStartTokenRemover(); + final long expirationEpochMilli = getExpirationTime().toEpochMilli(); + // Invalidate the refresh tokens first so that they cannot be used to get new + // access tokens while we invalidate the access tokens we currently know about + indexInvalidation(accessTokenIds, ActionListener.wrap(result -> + indexBwcInvalidation(accessTokenIds, listener, new AtomicInteger(result.getAttemptCount()), + expirationEpochMilli, result), + listener::onFailure), new AtomicInteger(0), "refresh_token", null); + } + + /** + * Performs the actual bwc invalidation of a collection of tokens and then kicks off the new invalidation method. + * + * @param tokenIds the collection of token ids or token document ids that should be invalidated + * @param listener the listener to notify upon completion + * @param attemptCount the number of attempts to invalidate that have already been tried + * @param expirationEpochMilli the expiration time as milliseconds since the epoch + * @param previousResult if this not the initial attempt for invalidation, it contains the result of invalidating + * tokens up to the point of the retry. This result is added to the result of the current attempt + */ + private void indexBwcInvalidation(Collection tokenIds, ActionListener listener, + AtomicInteger attemptCount, long expirationEpochMilli, + @Nullable TokensInvalidationResult previousResult) { + + if (tokenIds.isEmpty()) { + logger.warn("No tokens provided for invalidation"); + listener.onFailure(invalidGrantException("No tokens provided for invalidation")); + } else if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to invalidate [{}] tokens after [{}] attempts", tokenIds.size(), + attemptCount.get()); + listener.onFailure(invalidGrantException("failed to invalidate tokens")); + } else { + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + for (String tokenId : tokenIds) { + final String invalidatedTokenId = getInvalidatedTokenDocumentId(tokenId); + IndexRequest indexRequest = client.prepareIndex(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, invalidatedTokenId) + .setOpType(OpType.CREATE) + .setSource("doc_type", INVALIDATED_TOKEN_DOC_TYPE, "expiration_time", expirationEpochMilli) + .request(); + bulkRequestBuilder.add(indexRequest); + } + bulkRequestBuilder.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); + final BulkRequest bulkRequest = bulkRequestBuilder.request(); + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", ex)), + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequest, + ActionListener.wrap(bulkResponse -> { + List retryTokenIds = new ArrayList<>(); + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + if (bulkItemResponse.isFailed()) { + Throwable cause = bulkItemResponse.getFailure().getCause(); + logger.error(cause.getMessage()); + traceLog("(bwc) invalidate tokens", cause); + if (isShardNotAvailableException(cause)) { + retryTokenIds.add(getTokenIdFromInvalidatedTokenDocumentId(bulkItemResponse.getFailure().getId())); + } else if ((cause instanceof VersionConflictEngineException) == false){ + // We don't handle VersionConflictEngineException, the ticket has been invalidated + listener.onFailure(bulkItemResponse.getFailure().getCause()); + } + } + } + if (retryTokenIds.isEmpty() == false) { + attemptCount.incrementAndGet(); + indexBwcInvalidation(retryTokenIds, listener, attemptCount, expirationEpochMilli, previousResult); + } + indexInvalidation(tokenIds, listener, attemptCount, "access_token", previousResult); + }, e -> { + Throwable cause = ExceptionsHelper.unwrapCause(e); + traceLog("(bwc) invalidate tokens", cause); + if (isShardNotAvailableException(cause)) { + attemptCount.incrementAndGet(); + indexBwcInvalidation(tokenIds, listener, attemptCount, expirationEpochMilli, previousResult); + } else { + listener.onFailure(e); + } + }), + client::bulk)); + } + } + + /** + * Performs the actual invalidation of a collection of tokens + * + * @param tokenIds the tokens to invalidate * @param listener the listener to notify upon completion * @param attemptCount the number of attempts to invalidate that have already been tried - * @param srcPrefix the prefix to use when constructing the doc to update - * @param documentVersion the expected version of the document we will update + * @param srcPrefix the prefix to use when constructing the doc to update, either refresh_token or access_token depending on + * what type of tokens should be invalidated + * @param previousResult if this not the initial attempt for invalidation, it contains the result of invalidating + * tokens up to the point of the retry. This result is added to the result of the current attempt */ - private void indexInvalidation(String tokenDocId, Version version, ActionListener listener, AtomicInteger attemptCount, - String srcPrefix, long documentVersion) { - if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { - logger.warn("Failed to invalidate token [{}] after [{}] attempts", tokenDocId, attemptCount.get()); - listener.onFailure(invalidGrantException("failed to invalidate token")); + private void indexInvalidation(Collection tokenIds, ActionListener listener, + AtomicInteger attemptCount, String srcPrefix, @Nullable TokensInvalidationResult previousResult) { + if (tokenIds.isEmpty()) { + logger.warn("No [{}] tokens provided for invalidation", srcPrefix); + listener.onFailure(invalidGrantException("No tokens provided for invalidation")); + } else if (attemptCount.get() > MAX_RETRY_ATTEMPTS) { + logger.warn("Failed to invalidate [{}] tokens after [{}] attempts", tokenIds.size(), + attemptCount.get()); + listener.onFailure(invalidGrantException("failed to invalidate tokens")); } else { - UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId) + BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); + for (String tokenId : tokenIds) { + UpdateRequest request = client.prepareUpdate(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, getTokenDocumentId(tokenId)) .setDoc(srcPrefix, Collections.singletonMap("invalidated", true)) - .setVersion(documentVersion) - .setRefreshPolicy(RefreshPolicy.WAIT_UNTIL) + .setFetchSource(srcPrefix, null) .request(); - securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", tokenDocId, ex)), - () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, request, - ActionListener.wrap(updateResponse -> { - logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, tokenDocId); - if (updateResponse.getGetResult() != null - && updateResponse.getGetResult().sourceAsMap().containsKey(srcPrefix) - && ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) - .containsKey("invalidated")) { - final boolean prevInvalidated = (boolean) - ((Map) updateResponse.getGetResult().sourceAsMap().get(srcPrefix)) - .get("invalidated"); - listener.onResponse(prevInvalidated == false); - } else { - listener.onResponse(true); + bulkRequestBuilder.add(request); + } + bulkRequestBuilder.setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); + securityIndex.prepareIndexIfNeededThenExecute(ex -> listener.onFailure(traceLog("prepare security index", ex)), + () -> executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, bulkRequestBuilder.request(), + ActionListener.wrap(bulkResponse -> { + ArrayList retryTokenDocIds = new ArrayList<>(); + ArrayList failedRequestResponses = new ArrayList<>(); + ArrayList previouslyInvalidated = new ArrayList<>(); + ArrayList invalidated = new ArrayList<>(); + if (null != previousResult) { + failedRequestResponses.addAll((previousResult.getErrors())); + previouslyInvalidated.addAll(previousResult.getPreviouslyInvalidatedTokens()); + invalidated.addAll(previousResult.getInvalidatedTokens()); } + for (BulkItemResponse bulkItemResponse : bulkResponse.getItems()) { + if (bulkItemResponse.isFailed()) { + Throwable cause = bulkItemResponse.getFailure().getCause(); + final String failedTokenDocId = getTokenIdFromDocumentId(bulkItemResponse.getFailure().getId()); + if (isShardNotAvailableException(cause)) { + retryTokenDocIds.add(failedTokenDocId); + } + else { + traceLog("invalidate access token", failedTokenDocId, cause); + failedRequestResponses.add(new ElasticsearchException("Error invalidating " + srcPrefix + ": ", cause)); + } + } else { + UpdateResponse updateResponse = bulkItemResponse.getResponse(); + if (updateResponse.getResult() == DocWriteResponse.Result.UPDATED) { + logger.debug("Invalidated [{}] for doc [{}]", srcPrefix, updateResponse.getGetResult().getId()); + invalidated.add(updateResponse.getGetResult().getId()); + } else if (updateResponse.getResult() == DocWriteResponse.Result.NOOP) { + previouslyInvalidated.add(updateResponse.getGetResult().getId()); + } + } + } + if (retryTokenDocIds.isEmpty() == false) { + TokensInvalidationResult incompleteResult = new TokensInvalidationResult(invalidated, previouslyInvalidated, + failedRequestResponses, attemptCount.get()); + attemptCount.incrementAndGet(); + indexInvalidation(retryTokenDocIds, listener, attemptCount, srcPrefix, incompleteResult); + } + TokensInvalidationResult result = new TokensInvalidationResult(invalidated, previouslyInvalidated, + failedRequestResponses, attemptCount.get()); + listener.onResponse(result); }, e -> { Throwable cause = ExceptionsHelper.unwrapCause(e); - traceLog("invalidate token", tokenDocId, cause); - if (cause instanceof DocumentMissingException) { - if (version.onOrAfter(Version.V_6_2_0)) { - // the document should always be there! - listener.onFailure(e); - } else { - listener.onResponse(false); - } - } else if (cause instanceof VersionConflictEngineException - || isShardNotAvailableException(cause)) { + traceLog("invalidate tokens", cause); + if (isShardNotAvailableException(cause)) { attemptCount.incrementAndGet(); - executeAsyncWithOrigin(client.threadPool().getThreadContext(), SECURITY_ORIGIN, - client.prepareGet(SecurityIndexManager.SECURITY_INDEX_NAME, TYPE, tokenDocId).request(), - ActionListener.wrap(getResult -> { - if (getResult.isExists()) { - Map source = getResult.getSource(); - Map accessTokenSource = (Map) source.get("access_token"); - Consumer onFailure = ex -> listener.onFailure(traceLog("get token", tokenDocId, ex)); - if (accessTokenSource == null) { - onFailure.accept(new IllegalArgumentException( - "token document is missing access_token field")); - } else { - Boolean invalidated = (Boolean) accessTokenSource.get("invalidated"); - if (invalidated == null) { - onFailure.accept(new IllegalStateException( - "token document missing invalidated value")); - } else if (invalidated) { - logger.trace("Token [{}] is already invalidated", tokenDocId); - listener.onResponse(false); - } else { - indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, - getResult.getVersion()); - } - } - } else if (version.onOrAfter(Version.V_6_2_0)) { - logger.warn("could not find token document [{}] but there should " + - "be one as token has version [{}]", tokenDocId, version); - listener.onFailure(invalidGrantException("could not invalidate the token")); - } else { - listener.onResponse(false); - } - }, - e1 -> { - traceLog("get token", tokenDocId, e1); - if (isShardNotAvailableException(e1)) { - // don't increment count; call again - indexInvalidation(tokenDocId, version, listener, attemptCount, srcPrefix, - documentVersion); - } else { - listener.onFailure(e1); - } - }), client::get); + indexInvalidation(tokenIds, listener, attemptCount, srcPrefix, previousResult); } else { listener.onFailure(e); } - }), client::update)); + }), client::bulk)); } } @@ -676,12 +761,12 @@ public final class TokenService { public void refreshToken(String refreshToken, ActionListener> listener) { ensureEnabled(); findTokenFromRefreshToken(refreshToken, - ActionListener.wrap(tuple -> { - final Authentication userAuth = Authentication.readFromContext(client.threadPool().getThreadContext()); - final String tokenDocId = tuple.v1().getHits().getHits()[0].getId(); - innerRefresh(tokenDocId, userAuth, listener, tuple.v2()); - }, listener::onFailure), - new AtomicInteger(0)); + ActionListener.wrap(tuple -> { + final Authentication userAuth = Authentication.readFromContext(client.threadPool().getThreadContext()); + final String tokenDocId = tuple.v1().getHits().getHits()[0].getId(); + innerRefresh(tokenDocId, userAuth, listener, tuple.v2()); + }, listener::onFailure), + new AtomicInteger(0)); } private void findTokenFromRefreshToken(String refreshToken, ActionListener> listener, @@ -691,11 +776,11 @@ public final class TokenService { listener.onFailure(invalidGrantException("could not refresh the requested token")); } else { SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) - .setQuery(QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery("doc_type", "token")) - .filter(QueryBuilders.termQuery("refresh_token.token", refreshToken))) - .setVersion(true) - .request(); + .setQuery(QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("doc_type", "token")) + .filter(QueryBuilders.termQuery("refresh_token.token", refreshToken))) + .setVersion(true) + .request(); final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); if (frozenSecurityIndex.indexExists() == false) { @@ -860,12 +945,16 @@ public final class TokenService { } /** - * Find all stored refresh and access tokens that have not been invalidated or expired, and were issued against + * Find stored refresh and access tokens that have not been invalidated or expired, and were issued against * the specified realm. + * + * @param realmName The name of the realm for which to get the tokens + * @param listener The listener to notify upon completion + * @param filter an optional Predicate to test the source of the found documents against */ - public void findActiveTokensForRealm(String realmName, ActionListener>> listener) { + public void findActiveTokensForRealm(String realmName, ActionListener>> listener, + @Nullable Predicate> filter) { ensureEnabled(); - final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); if (Strings.isNullOrEmpty(realmName)) { listener.onFailure(new IllegalArgumentException("Realm name is required")); @@ -883,7 +972,10 @@ public final class TokenService { .must(QueryBuilders.termQuery("access_token.invalidated", false)) .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli())) ) - .should(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("creation_time").gte(now.toEpochMilli() - TimeValue.timeValueHours(24).millis())) + ) ); final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) @@ -893,33 +985,102 @@ public final class TokenService { .setSize(1000) .setFetchSource(true) .request(); - securityIndex.checkIndexVersionThenExecute(listener::onFailure, - () -> ScrollHelper.fetchAllByEntity(client, request, listener, this::parseHit)); + () -> ScrollHelper.fetchAllByEntity(client, request, listener, (SearchHit hit) -> filterAndParseHit(hit, filter))); } } - private Tuple parseHit(SearchHit hit) { + /** + * Find stored refresh and access tokens that have not been invalidated or expired, and were issued for + * the specified user. + * + * @param username The user for which to get the tokens + * @param listener The listener to notify upon completion + */ + public void findActiveTokensForUser(String username, ActionListener>> listener) { + ensureEnabled(); + + final SecurityIndexManager frozenSecurityIndex = securityIndex.freeze(); + if (Strings.isNullOrEmpty(username)) { + listener.onFailure(new IllegalArgumentException("username is required")); + } else if (frozenSecurityIndex.indexExists() == false) { + listener.onResponse(Collections.emptyList()); + } else if (frozenSecurityIndex.isAvailable() == false) { + listener.onFailure(frozenSecurityIndex.getUnavailableReason()); + } else { + final Instant now = clock.instant(); + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("doc_type", "token")) + .filter(QueryBuilders.boolQuery() + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("access_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("access_token.user_token.expiration_time").gte(now.toEpochMilli())) + ) + .should(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("refresh_token.invalidated", false)) + .must(QueryBuilders.rangeQuery("creation_time").gte(now.toEpochMilli() - TimeValue.timeValueHours(24).millis())) + ) + ); + + final SearchRequest request = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) + .setScroll(DEFAULT_KEEPALIVE_SETTING.get(settings)) + .setQuery(boolQuery) + .setVersion(false) + .setSize(1000) + .setFetchSource(true) + .request(); + securityIndex.checkIndexVersionThenExecute(listener::onFailure, + () -> ScrollHelper.fetchAllByEntity(client, request, listener, + (SearchHit hit) -> filterAndParseHit(hit, isOfUser(username)))); + } + } + + private static Predicate> isOfUser(String username) { + return source -> { + String auth = (String) source.get("authentication"); + Integer version = (Integer) source.get("version"); + Version authVersion = Version.fromId(version); + try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode(auth))) { + in.setVersion(authVersion); + Authentication authentication = new Authentication(in); + return authentication.getUser().principal().equals(username); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + }; + } + + + private Tuple filterAndParseHit(SearchHit hit, @Nullable Predicate> filter) { final Map source = hit.getSourceAsMap(); if (source == null) { throw new IllegalStateException("token document did not have source but source should have been fetched"); } - try { - return parseTokensFromDocument(source); + return parseTokensFromDocument(source, filter); } catch (IOException e) { throw invalidGrantException("cannot read token from document"); } } /** - * @return A {@link Tuple} of access-token and refresh-token-id + * + * Parses a token document into a Tuple of a {@link UserToken} and a String representing the corresponding refresh_token + * + * @param source The token document source as retrieved + * @param filter an optional Predicate to test the source of the UserToken against + * @return A {@link Tuple} of access-token and refresh-token-id or null if a Predicate is defined and the userToken source doesn't + * satisfy it */ - private Tuple parseTokensFromDocument(Map source) throws IOException { - final String refreshToken = (String) ((Map) source.get("refresh_token")).get("token"); + private Tuple parseTokensFromDocument(Map source, @Nullable Predicate> filter) + throws IOException { + final String refreshToken = (String) ((Map) source.get("refresh_token")).get("token"); final Map userTokenSource = (Map) - ((Map) source.get("access_token")).get("user_token"); + ((Map) source.get("access_token")).get("user_token"); + if (null != filter && filter.test(userTokenSource) == false) { + return null; + } final String id = (String) userTokenSource.get("id"); final Integer version = (Integer) userTokenSource.get("version"); final String authString = (String) userTokenSource.get("authentication"); @@ -951,6 +1112,23 @@ public final class TokenService { return "token_" + id; } + private static String getTokenIdFromDocumentId(String docId) { + if (docId.startsWith("token_") == false) { + throw new IllegalStateException("TokenDocument ID [" + docId + "] has unexpected value"); + } else { + return docId.substring("token_".length()); + } + } + + private static String getTokenIdFromInvalidatedTokenDocumentId(String docId) { + final String invalidatedTokenDocPrefix = INVALIDATED_TOKEN_DOC_TYPE + "_"; + if (docId.startsWith(invalidatedTokenDocPrefix) == false) { + throw new IllegalStateException("InvalidatedTokenDocument ID [" + docId + "] has unexpected value"); + } else { + return docId.substring(invalidatedTokenDocPrefix.length()); + } + } + private void ensureEnabled() { if (enabled == false) { throw new IllegalStateException("tokens are not enabled"); @@ -1149,7 +1327,7 @@ public final class TokenService { } /** - * Creates an {@link ElasticsearchSecurityException} that indicates the token was expired. It + * Creates an {@link ElasticsearchSecurityException} that indicates the token was malformed. It * is up to the client to re-authenticate and obtain a new token. The format for this response * is defined in */ @@ -1171,7 +1349,7 @@ public final class TokenService { } /** - * Logs an exception at TRACE level (if enabled) + * Logs an exception concerning a specific Token at TRACE level (if enabled) */ private E traceLog(String action, String identifier, E exception) { if (logger.isTraceEnabled()) { @@ -1179,12 +1357,34 @@ public final class TokenService { final ElasticsearchException esEx = (ElasticsearchException) exception; final Object detail = esEx.getHeader("error_description"); if (detail != null) { - logger.trace("Failure in [{}] for id [{}] - [{}] [{}]", action, identifier, detail, esEx.getDetailedMessage()); + logger.trace(() -> new ParameterizedMessage("Failure in [{}] for id [{}] - [{}]", action, identifier, detail), + esEx); } else { - logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, esEx.getDetailedMessage()); + logger.trace(() -> new ParameterizedMessage("Failure in [{}] for id [{}]", action, identifier), + esEx); } } else { - logger.trace("Failure in [{}] for id [{}] - [{}]", action, identifier, exception.toString()); + logger.trace(() -> new ParameterizedMessage("Failure in [{}] for id [{}]", action, identifier), exception); + } + } + return exception; + } + + /** + * Logs an exception at TRACE level (if enabled) + */ + private E traceLog(String action, E exception) { + if (logger.isTraceEnabled()) { + if (exception instanceof ElasticsearchException) { + final ElasticsearchException esEx = (ElasticsearchException) exception; + final Object detail = esEx.getHeader("error_description"); + if (detail != null) { + logger.trace(() -> new ParameterizedMessage("Failure in [{}] - [{}]", action, detail), esEx); + } else { + logger.trace(() -> new ParameterizedMessage("Failure in [{}]", action), esEx); + } + } else { + logger.trace(() -> new ParameterizedMessage("Failure in [{}]", action), exception); } } return exception; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java index 52228d2823a..9801f3c93c8 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenAction.java @@ -9,7 +9,6 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -37,11 +36,32 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; public final class RestInvalidateTokenAction extends SecurityBaseRestHandler { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestInvalidateTokenAction.class)); - static final ConstructingObjectParser, Void> PARSER = - new ConstructingObjectParser<>("invalidate_token", a -> new Tuple<>((String) a[0], (String) a[1])); + static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("invalidate_token", a -> { + final String token = (String) a[0]; + final String refreshToken = (String) a[1]; + final String tokenString; + final String tokenType; + if (Strings.hasLength(token) && Strings.hasLength(refreshToken)) { + throw new IllegalArgumentException("only one of [token, refresh_token] may be sent per request"); + } else if (Strings.hasLength(token)) { + tokenString = token; + tokenType = InvalidateTokenRequest.Type.ACCESS_TOKEN.getValue(); + } else if (Strings.hasLength(refreshToken)) { + tokenString = refreshToken; + tokenType = InvalidateTokenRequest.Type.REFRESH_TOKEN.getValue(); + } else { + tokenString = null; + tokenType = null; + } + return new InvalidateTokenRequest(tokenString, tokenType, (String) a[2], (String) a[3]); + }); + static { PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("token")); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("refresh_token")); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("realm_name")); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), new ParseField("username")); } public RestInvalidateTokenAction(Settings settings, RestController controller, XPackLicenseState xPackLicenseState) { @@ -60,36 +80,16 @@ public final class RestInvalidateTokenAction extends SecurityBaseRestHandler { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { try (XContentParser parser = request.contentParser()) { - final Tuple tuple = PARSER.parse(parser, null); - final String token = tuple.v1(); - final String refreshToken = tuple.v2(); - - final String tokenString; - final InvalidateTokenRequest.Type type; - if (Strings.hasLength(token) && Strings.hasLength(refreshToken)) { - throw new IllegalArgumentException("only one of [token, refresh_token] may be sent per request"); - } else if (Strings.hasLength(token)) { - tokenString = token; - type = InvalidateTokenRequest.Type.ACCESS_TOKEN; - } else if (Strings.hasLength(refreshToken)) { - tokenString = refreshToken; - type = InvalidateTokenRequest.Type.REFRESH_TOKEN; - } else { - tokenString = null; - type = null; - } - - final InvalidateTokenRequest tokenRequest = new InvalidateTokenRequest(tokenString, type); - return channel -> client.execute(InvalidateTokenAction.INSTANCE, tokenRequest, - new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(InvalidateTokenResponse invalidateResp, - XContentBuilder builder) throws Exception { - return new BytesRestResponse(RestStatus.OK, builder.startObject() - .field("created", invalidateResp.isCreated()) - .endObject()); - } - }); + final InvalidateTokenRequest invalidateTokenRequest = PARSER.parse(parser, null); + return channel -> client.execute(InvalidateTokenAction.INSTANCE, invalidateTokenRequest, + new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(InvalidateTokenResponse invalidateResp, + XContentBuilder builder) throws Exception { + invalidateResp.toXContent(builder, channel.request()); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index ba1d1762f06..5a4c8f3bde8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -11,6 +11,10 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; @@ -21,11 +25,11 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; +import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; @@ -106,11 +110,12 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { private SamlRealm samlRealm; private TokenService tokenService; private List indexRequests; - private List updateRequests; + private List bulkRequests; private List searchRequests; private TransportSamlInvalidateSessionAction action; private SamlLogoutRequestHandler.Result logoutRequest; private Function searchFunction = ignore -> new SearchHit[0]; + private Function searchScrollFunction = ignore -> new SearchHit[0]; @Before public void setup() throws Exception { @@ -132,8 +137,8 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { new Authentication(new User("kibana"), new RealmRef("realm", "type", "node"), null).writeToContext(threadContext); indexRequests = new ArrayList<>(); - updateRequests = new ArrayList<>(); searchRequests = new ArrayList<>(); + bulkRequests = new ArrayList<>(); final Client client = new NoOpClient(threadPool) { @Override protected @@ -143,20 +148,29 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { IndexRequest indexRequest = (IndexRequest) request; indexRequests.add(indexRequest); final IndexResponse response = new IndexResponse( - indexRequest.shardId(), indexRequest.type(), indexRequest.id(), 1, 1, 1, true); + indexRequest.shardId(), indexRequest.type(), indexRequest.id(), 1, 1, 1, true); + listener.onResponse((Response) response); + } else if (BulkAction.NAME.equals(action.name())) { + assertThat(request, instanceOf(BulkRequest.class)); + bulkRequests.add((BulkRequest) request); + final BulkResponse response = new BulkResponse(new BulkItemResponse[0], 1); listener.onResponse((Response) response); - } else if (UpdateAction.NAME.equals(action.name())) { - assertThat(request, instanceOf(UpdateRequest.class)); - updateRequests.add((UpdateRequest) request); - listener.onResponse((Response) new UpdateResponse()); } else if (SearchAction.NAME.equals(action.name())) { assertThat(request, instanceOf(SearchRequest.class)); SearchRequest searchRequest = (SearchRequest) request; searchRequests.add(searchRequest); final SearchHit[] hits = searchFunction.apply(searchRequest); final SearchResponse response = new SearchResponse( - new SearchResponseSections(new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); + new SearchResponseSections(new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); + listener.onResponse((Response) response); + } else if (SearchScrollAction.NAME.equals(action.name())){ + assertThat(request, instanceOf(SearchScrollRequest.class)); + SearchScrollRequest searchScrollRequest = (SearchScrollRequest) request; + final SearchHit[] hits = searchScrollFunction.apply(searchScrollRequest); + final SearchResponse response = new SearchResponse( + new SearchResponseSections(new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, null, false, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); listener.onResponse((Response) response); } else if (ClearScrollAction.NAME.equals(action.name())) { assertThat(request, instanceOf(ClearScrollRequest.class)); @@ -296,15 +310,33 @@ public class TransportSamlInvalidateSessionActionTests extends SamlTestCase { assertThat(((TermQueryBuilder) filter1.get(1)).fieldName(), equalTo("refresh_token.token")); assertThat(((TermQueryBuilder) filter1.get(1)).value(), equalTo(tokenToInvalidate1.v2())); - assertThat(updateRequests.size(), equalTo(4)); // (refresh-token + access-token) * 2 - assertThat(updateRequests.get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); - assertThat(updateRequests.get(1).id(), equalTo(updateRequests.get(0).id())); - assertThat(updateRequests.get(2).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); - assertThat(updateRequests.get(3).id(), equalTo(updateRequests.get(2).id())); - - assertThat(indexRequests.size(), equalTo(2)); // bwc-invalidate * 2 - assertThat(indexRequests.get(0).id(), startsWith("invalidated-token_")); - assertThat(indexRequests.get(1).id(), startsWith("invalidated-token_")); + assertThat(bulkRequests.size(), equalTo(6)); // 4 updates (refresh-token + access-token) plus 2 indexes (bwc-invalidate * 2) + // Invalidate refresh token 1 + assertThat(bulkRequests.get(0).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(0).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + UpdateRequest updateRequest1 = (UpdateRequest) bulkRequests.get(0).requests().get(0); + assertThat(updateRequest1.toString().contains("refresh_token"), equalTo(true)); + // BWC incalidate access token 1 + assertThat(bulkRequests.get(1).requests().get(0), instanceOf(IndexRequest.class)); + assertThat(bulkRequests.get(1).requests().get(0).id(), equalTo("invalidated-token_" + tokenToInvalidate1.v1().getId())); + // Invalidate access token 1 + assertThat(bulkRequests.get(2).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(2).requests().get(0).id(), equalTo("token_" + tokenToInvalidate1.v1().getId())); + UpdateRequest updateRequest2 = (UpdateRequest) bulkRequests.get(2).requests().get(0); + assertThat(updateRequest2.toString().contains("access_token"), equalTo(true)); + // Invalidate refresh token 2 + assertThat(bulkRequests.get(3).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(3).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + UpdateRequest updateRequest3 = (UpdateRequest) bulkRequests.get(3).requests().get(0); + assertThat(updateRequest3.toString().contains("refresh_token"), equalTo(true)); + // BWC incalidate access token 2 + assertThat(bulkRequests.get(4).requests().get(0), instanceOf(IndexRequest.class)); + assertThat(bulkRequests.get(4).requests().get(0).id(), equalTo("invalidated-token_" + tokenToInvalidate2.v1().getId())); + // Invalidate access token 2 + assertThat(bulkRequests.get(5).requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequests.get(5).requests().get(0).id(), equalTo("token_" + tokenToInvalidate2.v1().getId())); + UpdateRequest updateRequest4 = (UpdateRequest) bulkRequests.get(5).requests().get(0); + assertThat(updateRequest4.toString().contains("access_token"), equalTo(true)); } private Function findTokenByRefreshToken(SearchHit[] searchHits) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 66d3233b07a..7dec105e1ee 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -6,7 +6,11 @@ package org.elasticsearch.xpack.security.action.saml; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetAction; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; @@ -24,7 +28,6 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateRequestBuilder; -import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.MapBuilder; @@ -72,6 +75,9 @@ import java.util.function.Consumer; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; import static org.elasticsearch.xpack.security.authc.TokenServiceTests.mockGetTokenFromId; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; import static org.mockito.Matchers.any; @@ -89,7 +95,7 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { private SamlRealm samlRealm; private TokenService tokenService; private List indexRequests; - private List updateRequests; + private List bulkRequests; private TransportSamlLogoutAction action; private Client client; @@ -112,7 +118,7 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { new Authentication(new User("kibana"), new Authentication.RealmRef("realm", "type", "node"), null).writeToContext(threadContext); indexRequests = new ArrayList<>(); - updateRequests = new ArrayList<>(); + bulkRequests = new ArrayList<>(); client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); when(client.settings()).thenReturn(settings); @@ -137,6 +143,10 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { .setId((String) invocationOnMock.getArguments()[2]); return builder; }).when(client).prepareUpdate(anyString(), anyString(), anyString()); + doAnswer(invocationOnMock -> { + BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE); + return builder; + }).when(client).prepareBulk(); when(client.prepareMultiGet()).thenReturn(new MultiGetRequestBuilder(client, MultiGetAction.INSTANCE)); doAnswer(invocationOnMock -> { ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; @@ -154,15 +164,6 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { listener.onResponse(response); return Void.TYPE; }).when(client).multiGet(any(MultiGetRequest.class), any(ActionListener.class)); - doAnswer(invocationOnMock -> { - UpdateRequest updateRequest = (UpdateRequest) invocationOnMock.getArguments()[0]; - ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; - updateRequests.add(updateRequest); - final UpdateResponse response = new UpdateResponse( - updateRequest.getShardId(), updateRequest.type(), updateRequest.id(), 1, DocWriteResponse.Result.UPDATED); - listener.onResponse(response); - return Void.TYPE; - }).when(client).update(any(UpdateRequest.class), any(ActionListener.class)); doAnswer(invocationOnMock -> { IndexRequest indexRequest = (IndexRequest) invocationOnMock.getArguments()[0]; ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; @@ -181,6 +182,14 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { listener.onResponse(response); return Void.TYPE; }).when(client).execute(eq(IndexAction.INSTANCE), any(IndexRequest.class), any(ActionListener.class)); + doAnswer(invocationOnMock -> { + BulkRequest bulkRequest = (BulkRequest) invocationOnMock.getArguments()[0]; + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + bulkRequests.add(bulkRequest); + final BulkResponse response = new BulkResponse(new BulkItemResponse[0], 1); + listener.onResponse(response); + return Void.TYPE; + }).when(client).bulk(any(BulkRequest.class), any(ActionListener.class)); final SecurityIndexManager securityIndex = mock(SecurityIndexManager.class); doAnswer(inv -> { @@ -247,9 +256,17 @@ public class TransportSamlLogoutActionTests extends SamlTestCase { assertThat(indexRequest1, notNullValue()); assertThat(indexRequest1.id(), startsWith("token")); - final IndexRequest indexRequest2 = indexRequests.get(1); - assertThat(indexRequest2, notNullValue()); - assertThat(indexRequest2.id(), startsWith("invalidated-token")); + assertThat(bulkRequests.size(), equalTo(2)); + final BulkRequest bulkRequest1 = bulkRequests.get(0); + assertThat(bulkRequest1.requests().size(), equalTo(1)); + assertThat(bulkRequest1.requests().get(0), instanceOf(IndexRequest.class)); + assertThat(bulkRequest1.requests().get(0).id(), startsWith("invalidated-token_")); + + final BulkRequest bulkRequest2 = bulkRequests.get(1); + assertThat(bulkRequest2.requests().size(), equalTo(1)); + assertThat(bulkRequest2.requests().get(0), instanceOf(UpdateRequest.class)); + assertThat(bulkRequest2.requests().get(0).id(), startsWith("token_")); + assertThat(bulkRequest2.requests().get(0).toString(), containsString("\"access_token\":{\"invalidated\":true")); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index c4efdc16e10..968c17f556b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -144,7 +144,9 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { .prepareInvalidateToken(response.getTokenString()) .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) .get(); - assertTrue(invalidateResponse.isCreated()); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); AtomicReference docId = new AtomicReference<>(); assertBusy(() -> { SearchResponse searchResponse = client.prepareSearch(SecurityIndexManager.SECURITY_INDEX_NAME) @@ -189,6 +191,72 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { }, 30, TimeUnit.SECONDS); } + public void testInvalidateAllTokensForUser() throws Exception{ + final int numOfRequests = randomIntBetween(5, 10); + for (int i = 0; i < numOfRequests; i++) { + securityClient().prepareCreateToken() + .setGrantType("password") + .setUsername(SecuritySettingsSource.TEST_USER_NAME) + .setPassword(new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())) + .get(); + } + Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); + SecurityClient securityClientSuperuser = new SecurityClient(client); + InvalidateTokenResponse invalidateResponse = securityClientSuperuser + .prepareInvalidateToken() + .setUserName(SecuritySettingsSource.TEST_USER_NAME) + .get(); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(2 * (numOfRequests))); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + } + + public void testInvalidateAllTokensForRealm() throws Exception{ + final int numOfRequests = randomIntBetween(5, 10); + for (int i = 0; i < numOfRequests; i++) { + securityClient().prepareCreateToken() + .setGrantType("password") + .setUsername(SecuritySettingsSource.TEST_USER_NAME) + .setPassword(new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())) + .get(); + } + Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); + SecurityClient securityClientSuperuser = new SecurityClient(client); + InvalidateTokenResponse invalidateResponse = securityClientSuperuser + .prepareInvalidateToken() + .setRealmName("file") + .get(); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(2 * (numOfRequests))); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + } + + public void testInvalidateAllTokensForRealmThatHasNone() { + final int numOfRequests = randomIntBetween(2, 4); + for (int i = 0; i < numOfRequests; i++) { + securityClient().prepareCreateToken() + .setGrantType("password") + .setUsername(SecuritySettingsSource.TEST_USER_NAME) + .setPassword(new SecureString(SecuritySettingsSourceField.TEST_PASSWORD.toCharArray())) + .get(); + } + Client client = client().filterWithHeader(Collections.singletonMap("Authorization", + UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING))); + SecurityClient securityClientSuperuser = new SecurityClient(client); + InvalidateTokenResponse invalidateResponse = securityClientSuperuser + .prepareInvalidateToken() + .setRealmName("saml") + .get(); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + } + public void testExpireMultipleTimes() { CreateTokenResponse response = securityClient().prepareCreateToken() .setGrantType("password") @@ -200,12 +268,16 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { .prepareInvalidateToken(response.getTokenString()) .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) .get(); - assertTrue(invalidateResponse.isCreated()); - assertFalse(securityClient() - .prepareInvalidateToken(response.getTokenString()) - .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) - .get() - .isCreated()); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); + InvalidateTokenResponse invalidateAgainResponse = securityClient() + .prepareInvalidateToken(response.getTokenString()) + .setType(InvalidateTokenRequest.Type.ACCESS_TOKEN) + .get(); + assertThat(invalidateAgainResponse.getResult().getInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateAgainResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateAgainResponse.getResult().getErrors().size(), equalTo(0)); } public void testRefreshingToken() { @@ -248,7 +320,9 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { .prepareInvalidateToken(createTokenResponse.getRefreshToken()) .setType(InvalidateTokenRequest.Type.REFRESH_TOKEN) .get(); - assertTrue(invalidateResponse.isCreated()); + assertThat(invalidateResponse.getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponse.getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponse.getResult().getErrors().size(), equalTo(0)); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> securityClient.prepareRefreshToken(createTokenResponse.getRefreshToken()).get()); @@ -362,9 +436,11 @@ public class TokenAuthIntegTests extends SecurityIntegTestCase { // invalidate PlainActionFuture invalidateResponseFuture = new PlainActionFuture<>(); InvalidateTokenRequest invalidateTokenRequest = - new InvalidateTokenRequest(createTokenResponse.getTokenString(), InvalidateTokenRequest.Type.ACCESS_TOKEN); + new InvalidateTokenRequest(createTokenResponse.getTokenString(), InvalidateTokenRequest.Type.ACCESS_TOKEN.getValue()); securityClient.invalidateToken(invalidateTokenRequest, invalidateResponseFuture); - assertTrue(invalidateResponseFuture.get().isCreated()); + assertThat(invalidateResponseFuture.get().getResult().getInvalidatedTokens().size(), equalTo(1)); + assertThat(invalidateResponseFuture.get().getResult().getPreviouslyInvalidatedTokens().size(), equalTo(0)); + assertThat(invalidateResponseFuture.get().getResult().getErrors().size(), equalTo(0)); ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () -> { PlainActionFuture responseFuture = new PlainActionFuture<>(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 7926b44a38c..286f07667ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -48,6 +48,7 @@ import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.TokenMetaData; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -523,7 +524,7 @@ public class TokenServiceTests extends ESTestCase { assertNull(future.get()); e = expectThrows(IllegalStateException.class, () -> { - PlainActionFuture invalidateFuture = new PlainActionFuture<>(); + PlainActionFuture invalidateFuture = new PlainActionFuture<>(); tokenService.invalidateAccessToken((String) null, invalidateFuture); invalidateFuture.actionGet(); }); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java new file mode 100644 index 00000000000..06c9411d0bc --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/TokensInvalidationResultTests.java @@ -0,0 +1,74 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.security.authc.support; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult; + +import java.util.Arrays; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class TokensInvalidationResultTests extends ESTestCase { + + public void testToXcontent() throws Exception{ + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList("token1", "token2"), + Arrays.asList("token3", "token4"), + Arrays.asList(new ElasticsearchException("foo", new IllegalStateException("bar")), + new ElasticsearchException("boo", new IllegalStateException("far"))), + randomIntBetween(0, 5)); + + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + result.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(Strings.toString(builder), + equalTo( + "{\"created\":false," + + "\"invalidated_tokens\":2," + + "\"previously_invalidated_tokens\":2," + + "\"error_count\":2," + + "\"error_details\":[" + + "{\"type\":\"exception\"," + + "\"reason\":\"foo\"," + + "\"caused_by\":{" + + "\"type\":\"illegal_state_exception\"," + + "\"reason\":\"bar\"" + + "}" + + "}," + + "{\"type\":\"exception\"," + + "\"reason\":\"boo\"," + + "\"caused_by\":{" + + "\"type\":\"illegal_state_exception\"," + + "\"reason\":\"far\"" + + "}" + + "}" + + "]" + + "}")); + } + } + + public void testToXcontentWithNoErrors() throws Exception{ + TokensInvalidationResult result = new TokensInvalidationResult(Arrays.asList("token1", "token2"), + Collections.emptyList(), + Collections.emptyList(), randomIntBetween(0, 5)); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + result.toXContent(builder, ToXContent.EMPTY_PARAMS); + assertThat(Strings.toString(builder), + equalTo( + "{\"created\":true," + + "\"invalidated_tokens\":2," + + "\"previously_invalidated_tokens\":0," + + "\"error_count\":0" + + "}")); + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java new file mode 100644 index 00000000000..00850ba6e5a --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestInvalidateTokenActionTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.rest.action.oauth2; + +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; + +import static org.hamcrest.Matchers.containsString; + +public class RestInvalidateTokenActionTests extends ESTestCase { + + public void testParserForUserAndRealm() throws Exception { + final String request = "{" + + "\"username\": \"user1\"," + + "\"realm_name\": \"realm1\"" + + "}"; + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, request)) { + InvalidateTokenRequest invalidateTokenRequest = RestInvalidateTokenAction.PARSER.parse(parser, null); + assertEquals("user1", invalidateTokenRequest.getUserName()); + assertEquals("realm1", invalidateTokenRequest.getRealmName()); + assertNull(invalidateTokenRequest.getTokenString()); + assertNull(invalidateTokenRequest.getTokenType()); + } + } + + public void testParserForToken() throws Exception { + final String request = "{" + + "\"refresh_token\": \"refresh_token_string\"" + + "}"; + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, request)) { + InvalidateTokenRequest invalidateTokenRequest = RestInvalidateTokenAction.PARSER.parse(parser, null); + assertEquals("refresh_token_string", invalidateTokenRequest.getTokenString()); + assertEquals("refresh_token", invalidateTokenRequest.getTokenType().getValue()); + assertNull(invalidateTokenRequest.getRealmName()); + assertNull(invalidateTokenRequest.getUserName()); + } + } + + public void testParserForIncorrectInput() throws Exception { + final String request = "{" + + "\"refresh_token\": \"refresh_token_string\"," + + "\"token\": \"access_token_string\"" + + "}"; + try (XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, request)) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestInvalidateTokenAction.PARSER.parse(parser, + null)); + assertThat(e.getCause().getMessage(), containsString("only one of [token, refresh_token] may be sent per request")); + + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml index 43f25a11db0..81389ac8524 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/token/10_basic.yml @@ -5,7 +5,7 @@ setup: - do: cluster.health: - wait_for_status: yellow + wait_for_status: yellow - do: security.put_user: @@ -79,7 +79,93 @@ teardown: body: token: $token - - match: { created: true } + - match: { created: true} + - match: { invalidated_tokens: 1 } + - match: { previously_invalidated_tokens: 0 } + - match: { error_count: 0 } + + - do: + catch: unauthorized + headers: + Authorization: Bearer ${token} + security.authenticate: {} + +--- +"Test invalidate user's tokens": + + - do: + security.get_token: + body: + grant_type: "password" + username: "token_user" + password: "x-pack-test-password" + + - match: { type: "Bearer" } + - is_true: access_token + - set: { access_token: token } + - match: { expires_in: 1200 } + - is_false: scope + + - do: + headers: + Authorization: Bearer ${token} + security.authenticate: {} + + - match: { username: "token_user" } + - match: { roles.0: "superuser" } + - match: { full_name: "Token User" } + + - do: + security.invalidate_token: + body: + username: "token_user" + + - match: { created: true} + - match: { invalidated_tokens: 2 } + - match: { previously_invalidated_tokens: 0 } + - match: { error_count: 0 } + + - do: + catch: unauthorized + headers: + Authorization: Bearer ${token} + security.authenticate: {} + + +--- +"Test invalidate realm's tokens": + + - do: + security.get_token: + body: + grant_type: "password" + username: "token_user" + password: "x-pack-test-password" + + - match: { type: "Bearer" } + - is_true: access_token + - set: { access_token: token } + - match: { expires_in: 1200 } + - is_false: scope + + - do: + headers: + Authorization: Bearer ${token} + security.authenticate: {} + + - match: { username: "token_user" } + - match: { roles.0: "superuser" } + - match: { full_name: "Token User" } + + - do: + security.invalidate_token: + body: + realm_name: "default_native" + + - match: { created: true} + - match: { invalidated_tokens: 2 } + - match: { previously_invalidated_tokens: 0 } + - match: { error_count: 0 } - do: catch: unauthorized From b57e12aa44cfe6aa8d1f708824de5933d85b4cad Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 18 Dec 2018 09:20:51 +0100 Subject: [PATCH 26/26] Add raw sort values to SearchSortValues transport serialization (#36617) In order for CCS alternate execution mode (see #32125) to be able to do the final reduction step on the CCS coordinating node, we need to serialize additional info in the transport layer as part of each `SearchHit`. Sort values are already present but they are formatted according to the provided `DocValueFormat` provided. The CCS node needs to be able to reconstruct the lucene `FieldDoc` to include in the `TopFieldDocs` and `CollapseTopFieldDocs` which will feed the `mergeTopDocs` method used to reduce multiple search responses (one per cluster) into one. This commit adds such information to the `SearchSortValues` and exposes it through a new getter method added to `SearchHit` for retrieval. This info is only serialized at transport and never printed out at REST. --- .../org/elasticsearch/search/SearchHit.java | 31 ++-- .../search/SearchSortValues.java | 134 +++++++----------- .../common/lucene/LuceneTests.java | 20 +-- .../search/SearchSortValuesTests.java | 58 +++++--- .../test/AbstractWireSerializingTestCase.java | 2 +- 5 files changed, 125 insertions(+), 120 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 3d8ea384546..7fd68852ce2 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -19,16 +19,6 @@ package org.elasticsearch.search; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - import org.apache.lucene.search.Explanation; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.OriginalIndices; @@ -61,6 +51,16 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.lookup.SourceLookup; import org.elasticsearch.transport.RemoteClusterAware; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableMap; @@ -311,10 +311,17 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable 0) { - sortValues = new Object[size]; - for (int i = 0; i < sortValues.length; i++) { - byte type = in.readByte(); - if (type == 0) { - sortValues[i] = null; - } else if (type == 1) { - sortValues[i] = in.readString(); - } else if (type == 2) { - sortValues[i] = in.readInt(); - } else if (type == 3) { - sortValues[i] = in.readLong(); - } else if (type == 4) { - sortValues[i] = in.readFloat(); - } else if (type == 5) { - sortValues[i] = in.readDouble(); - } else if (type == 6) { - sortValues[i] = in.readByte(); - } else if (type == 7) { - sortValues[i] = in.readShort(); - } else if (type == 8) { - sortValues[i] = in.readBoolean(); - } else { - throw new IOException("Can't match type [" + type + "]"); - } - } + SearchSortValues(StreamInput in) throws IOException { + this.formattedSortValues = in.readArray(Lucene::readSortValue, Object[]::new); + if (in.getVersion().onOrAfter(Version.V_7_0_0)) { + this.rawSortValues = in.readArray(Lucene::readSortValue, Object[]::new); } else { - sortValues = new Object[0]; + this.rawSortValues = EMPTY_ARRAY; } } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(sortValues.length); - for (Object sortValue : sortValues) { - if (sortValue == null) { - out.writeByte((byte) 0); - } else { - Class type = sortValue.getClass(); - if (type == String.class) { - out.writeByte((byte) 1); - out.writeString((String) sortValue); - } else if (type == Integer.class) { - out.writeByte((byte) 2); - out.writeInt((Integer) sortValue); - } else if (type == Long.class) { - out.writeByte((byte) 3); - out.writeLong((Long) sortValue); - } else if (type == Float.class) { - out.writeByte((byte) 4); - out.writeFloat((Float) sortValue); - } else if (type == Double.class) { - out.writeByte((byte) 5); - out.writeDouble((Double) sortValue); - } else if (type == Byte.class) { - out.writeByte((byte) 6); - out.writeByte((Byte) sortValue); - } else if (type == Short.class) { - out.writeByte((byte) 7); - out.writeShort((Short) sortValue); - } else if (type == Boolean.class) { - out.writeByte((byte) 8); - out.writeBoolean((Boolean) sortValue); - } else { - throw new IOException("Can't handle sort field value of type [" + type + "]"); - } - } + out.writeArray(Lucene::writeSortValue, this.formattedSortValues); + if (out.getVersion().onOrAfter(Version.V_7_0_0)) { + out.writeArray(Lucene::writeSortValue, this.rawSortValues); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (sortValues.length > 0) { + if (formattedSortValues.length > 0) { builder.startArray(Fields.SORT); - for (Object sortValue : sortValues) { + for (Object sortValue : formattedSortValues) { builder.value(sortValue); } builder.endArray(); @@ -142,24 +99,37 @@ public class SearchSortValues implements ToXContentFragment, Writeable { return new SearchSortValues(parser.list().toArray()); } - public Object[] sortValues() { - return sortValues; + /** + * Returns the formatted version of the values that sorting was performed against + */ + public Object[] getFormattedSortValues() { + return formattedSortValues; + } + + /** + * Returns the raw version of the values that sorting was performed against + */ + public Object[] getRawSortValues() { + return rawSortValues; } @Override - public boolean equals(Object obj) { - if (this == obj) { + public boolean equals(Object o) { + if (this == o) { return true; } - if (obj == null || getClass() != obj.getClass()) { + if (o == null || getClass() != o.getClass()) { return false; } - SearchSortValues other = (SearchSortValues) obj; - return Arrays.equals(sortValues, other.sortValues); + SearchSortValues that = (SearchSortValues) o; + return Arrays.equals(formattedSortValues, that.formattedSortValues) && + Arrays.equals(rawSortValues, that.rawSortValues); } @Override public int hashCode() { - return Arrays.hashCode(sortValues); + int result = Arrays.hashCode(formattedSortValues); + result = 31 * result + Arrays.hashCode(rawSortValues); + return result; } } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index ea894a2edd0..1891be362b8 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -531,24 +531,26 @@ public class LuceneTests extends ESTestCase { } public static Object randomSortValue() { - switch(randomIntBetween(0, 8)) { + switch(randomIntBetween(0, 9)) { case 0: - return randomAlphaOfLengthBetween(3, 10); + return null; case 1: - return randomInt(); + return randomAlphaOfLengthBetween(3, 10); case 2: - return randomLong(); + return randomInt(); case 3: - return randomFloat(); + return randomLong(); case 4: - return randomDouble(); + return randomFloat(); case 5: - return randomByte(); + return randomDouble(); case 6: - return randomShort(); + return randomByte(); case 7: - return randomBoolean(); + return randomShort(); case 8: + return randomBoolean(); + case 9: return new BytesRef(randomAlphaOfLengthBetween(3, 10)); default: throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java index f6b8dc828f4..797b5dd888e 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchSortValuesTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.search; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.lucene.LuceneTests; import org.elasticsearch.common.xcontent.ToXContent; @@ -31,23 +32,36 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.test.RandomObjects; +import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.Arrays; +import java.util.Base64; public class SearchSortValuesTests extends AbstractSerializingTestCase { public static SearchSortValues createTestItem(XContentType xContentType, boolean transportSerialization) { int size = randomIntBetween(1, 20); Object[] values = new Object[size]; - DocValueFormat[] sortValueFormats = new DocValueFormat[size]; - for (int i = 0; i < size; i++) { - Object sortValue = randomSortValue(xContentType, transportSerialization); - values[i] = sortValue; - //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef) - sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat(); + if (transportSerialization) { + DocValueFormat[] sortValueFormats = new DocValueFormat[size]; + for (int i = 0; i < size; i++) { + Object sortValue = randomSortValue(xContentType, transportSerialization); + values[i] = sortValue; + //make sure that for BytesRef, we provide a specific doc value format that overrides format(BytesRef) + sortValueFormats[i] = sortValue instanceof BytesRef ? DocValueFormat.RAW : randomDocValueFormat(); + } + return new SearchSortValues(values, sortValueFormats); + } else { + //xcontent serialization doesn't write/parse the raw sort values, only the formatted ones + for (int i = 0; i < size; i++) { + Object sortValue = randomSortValue(xContentType, transportSerialization); + //make sure that BytesRef are not provided as formatted values + sortValue = sortValue instanceof BytesRef ? DocValueFormat.RAW.format((BytesRef)sortValue) : sortValue; + values[i] = sortValue; + } + return new SearchSortValues(values); } - return new SearchSortValues(values, sortValueFormats); } private static Object randomSortValue(XContentType xContentType, boolean transportSerialization) { @@ -79,7 +93,7 @@ public class SearchSortValuesTests extends AbstractSerializingTestCase exten @Override protected T copyInstance(T instance, Version version) throws IOException { - return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader()); + return copyWriteable(instance, getNamedWriteableRegistry(), instanceReader(), version); } }