Merge remote-tracking branch 'es/7.x' into enrich-7.x
This commit is contained in:
commit
df9f06213d
|
@ -19,16 +19,20 @@
|
||||||
|
|
||||||
package org.elasticsearch.client.dataframe.transforms;
|
package org.elasticsearch.client.dataframe.transforms;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfig;
|
import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfig;
|
||||||
|
import org.elasticsearch.client.dataframe.transforms.util.TimeUtil;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.time.Instant;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
@ -40,6 +44,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
public static final ParseField SOURCE = new ParseField("source");
|
public static final ParseField SOURCE = new ParseField("source");
|
||||||
public static final ParseField DEST = new ParseField("dest");
|
public static final ParseField DEST = new ParseField("dest");
|
||||||
public static final ParseField DESCRIPTION = new ParseField("description");
|
public static final ParseField DESCRIPTION = new ParseField("description");
|
||||||
|
public static final ParseField VERSION = new ParseField("version");
|
||||||
|
public static final ParseField CREATE_TIME = new ParseField("create_time");
|
||||||
// types of transforms
|
// types of transforms
|
||||||
public static final ParseField PIVOT_TRANSFORM = new ParseField("pivot");
|
public static final ParseField PIVOT_TRANSFORM = new ParseField("pivot");
|
||||||
|
|
||||||
|
@ -48,6 +54,8 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
private final DestConfig dest;
|
private final DestConfig dest;
|
||||||
private final PivotConfig pivotConfig;
|
private final PivotConfig pivotConfig;
|
||||||
private final String description;
|
private final String description;
|
||||||
|
private final Version transformVersion;
|
||||||
|
private final Instant createTime;
|
||||||
|
|
||||||
public static final ConstructingObjectParser<DataFrameTransformConfig, Void> PARSER =
|
public static final ConstructingObjectParser<DataFrameTransformConfig, Void> PARSER =
|
||||||
new ConstructingObjectParser<>("data_frame_transform", true,
|
new ConstructingObjectParser<>("data_frame_transform", true,
|
||||||
|
@ -57,7 +65,9 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
DestConfig dest = (DestConfig) args[2];
|
DestConfig dest = (DestConfig) args[2];
|
||||||
PivotConfig pivotConfig = (PivotConfig) args[3];
|
PivotConfig pivotConfig = (PivotConfig) args[3];
|
||||||
String description = (String)args[4];
|
String description = (String)args[4];
|
||||||
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description);
|
Instant createTime = (Instant)args[5];
|
||||||
|
String transformVersion = (String)args[6];
|
||||||
|
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description, createTime, transformVersion);
|
||||||
});
|
});
|
||||||
|
|
||||||
static {
|
static {
|
||||||
|
@ -66,6 +76,9 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
|
PARSER.declareObject(constructorArg(), (p, c) -> DestConfig.PARSER.apply(p, null), DEST);
|
||||||
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
|
PARSER.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p), PIVOT_TRANSFORM);
|
||||||
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
|
PARSER.declareString(optionalConstructorArg(), DESCRIPTION);
|
||||||
|
PARSER.declareField(optionalConstructorArg(),
|
||||||
|
p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), CREATE_TIME, ObjectParser.ValueType.VALUE);
|
||||||
|
PARSER.declareString(optionalConstructorArg(), VERSION);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DataFrameTransformConfig fromXContent(final XContentParser parser) {
|
public static DataFrameTransformConfig fromXContent(final XContentParser parser) {
|
||||||
|
@ -84,19 +97,23 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
|
* @return A DataFrameTransformConfig to preview, NOTE it will have a {@code null} id, destination and index.
|
||||||
*/
|
*/
|
||||||
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
|
public static DataFrameTransformConfig forPreview(final SourceConfig source, final PivotConfig pivotConfig) {
|
||||||
return new DataFrameTransformConfig(null, source, null, pivotConfig, null);
|
return new DataFrameTransformConfig(null, source, null, pivotConfig, null, null, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
DataFrameTransformConfig(final String id,
|
DataFrameTransformConfig(final String id,
|
||||||
final SourceConfig source,
|
final SourceConfig source,
|
||||||
final DestConfig dest,
|
final DestConfig dest,
|
||||||
final PivotConfig pivotConfig,
|
final PivotConfig pivotConfig,
|
||||||
final String description) {
|
final String description,
|
||||||
|
final Instant createTime,
|
||||||
|
final String version) {
|
||||||
this.id = id;
|
this.id = id;
|
||||||
this.source = source;
|
this.source = source;
|
||||||
this.dest = dest;
|
this.dest = dest;
|
||||||
this.pivotConfig = pivotConfig;
|
this.pivotConfig = pivotConfig;
|
||||||
this.description = description;
|
this.description = description;
|
||||||
|
this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli());
|
||||||
|
this.transformVersion = version == null ? null : Version.fromString(version);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getId() {
|
public String getId() {
|
||||||
|
@ -115,6 +132,14 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
return pivotConfig;
|
return pivotConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Version getVersion() {
|
||||||
|
return transformVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Instant getCreateTime() {
|
||||||
|
return createTime;
|
||||||
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
public String getDescription() {
|
public String getDescription() {
|
||||||
return description;
|
return description;
|
||||||
|
@ -138,6 +163,12 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
if (description != null) {
|
if (description != null) {
|
||||||
builder.field(DESCRIPTION.getPreferredName(), description);
|
builder.field(DESCRIPTION.getPreferredName(), description);
|
||||||
}
|
}
|
||||||
|
if (createTime != null) {
|
||||||
|
builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli());
|
||||||
|
}
|
||||||
|
if (transformVersion != null) {
|
||||||
|
builder.field(VERSION.getPreferredName(), transformVersion);
|
||||||
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
@ -155,15 +186,17 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
final DataFrameTransformConfig that = (DataFrameTransformConfig) other;
|
final DataFrameTransformConfig that = (DataFrameTransformConfig) other;
|
||||||
|
|
||||||
return Objects.equals(this.id, that.id)
|
return Objects.equals(this.id, that.id)
|
||||||
&& Objects.equals(this.source, that.source)
|
&& Objects.equals(this.source, that.source)
|
||||||
&& Objects.equals(this.dest, that.dest)
|
&& Objects.equals(this.dest, that.dest)
|
||||||
&& Objects.equals(this.description, that.description)
|
&& Objects.equals(this.description, that.description)
|
||||||
&& Objects.equals(this.pivotConfig, that.pivotConfig);
|
&& Objects.equals(this.transformVersion, that.transformVersion)
|
||||||
|
&& Objects.equals(this.createTime, that.createTime)
|
||||||
|
&& Objects.equals(this.pivotConfig, that.pivotConfig);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(id, source, dest, pivotConfig, description);
|
return Objects.hash(id, source, dest, pivotConfig, description, createTime, transformVersion);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -209,7 +242,7 @@ public class DataFrameTransformConfig implements ToXContentObject {
|
||||||
}
|
}
|
||||||
|
|
||||||
public DataFrameTransformConfig build() {
|
public DataFrameTransformConfig build() {
|
||||||
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description);
|
return new DataFrameTransformConfig(id, source, dest, pivotConfig, description, null, null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.dataframe.transforms.util;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.time.DateFormatters;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
public final class TimeUtil {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse out a Date object given the current parser and field name.
|
||||||
|
*
|
||||||
|
* @param parser current XContentParser
|
||||||
|
* @param fieldName the field's preferred name (utilized in exception)
|
||||||
|
* @return parsed Date object
|
||||||
|
* @throws IOException from XContentParser
|
||||||
|
*/
|
||||||
|
public static Date parseTimeField(XContentParser parser, String fieldName) throws IOException {
|
||||||
|
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||||
|
return new Date(parser.longValue());
|
||||||
|
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||||
|
return new Date(DateFormatters.from(DateTimeFormatter.ISO_INSTANT.parse(parser.text())).toInstant().toEpochMilli());
|
||||||
|
}
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Instant parseTimeFieldToInstant(XContentParser parser, String fieldName) throws IOException {
|
||||||
|
if (parser.currentToken() == XContentParser.Token.VALUE_NUMBER) {
|
||||||
|
return Instant.ofEpochMilli(parser.longValue());
|
||||||
|
} else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) {
|
||||||
|
return DateFormatters.from(DateTimeFormatter.ISO_INSTANT.parse(parser.text())).toInstant();
|
||||||
|
}
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"unexpected token [" + parser.currentToken() + "] for [" + fieldName + "]");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -195,7 +195,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
||||||
client::getDataFrameTransformAsync);
|
client::getDataFrameTransformAsync);
|
||||||
assertNull(getResponse.getInvalidTransforms());
|
assertNull(getResponse.getInvalidTransforms());
|
||||||
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
|
assertThat(getResponse.getTransformConfigurations(), hasSize(1));
|
||||||
assertEquals(transform, getResponse.getTransformConfigurations().get(0));
|
assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(0).getId());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetAllAndPageTransforms() throws IOException {
|
public void testGetAllAndPageTransforms() throws IOException {
|
||||||
|
@ -219,7 +219,7 @@ public class DataFrameTransformIT extends ESRestHighLevelClientTestCase {
|
||||||
client::getDataFrameTransformAsync);
|
client::getDataFrameTransformAsync);
|
||||||
assertNull(getResponse.getInvalidTransforms());
|
assertNull(getResponse.getInvalidTransforms());
|
||||||
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
|
assertThat(getResponse.getTransformConfigurations(), hasSize(2));
|
||||||
assertEquals(transform, getResponse.getTransformConfigurations().get(1));
|
assertEquals(transform.getId(), getResponse.getTransformConfigurations().get(1).getId());
|
||||||
|
|
||||||
getRequest.setPageParams(new PageParams(0,1));
|
getRequest.setPageParams(new PageParams(0,1));
|
||||||
getResponse = execute(getRequest, client::getDataFrameTransform,
|
getResponse = execute(getRequest, client::getDataFrameTransform,
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.client.dataframe.transforms;
|
package org.elasticsearch.client.dataframe.transforms;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfigTests;
|
import org.elasticsearch.client.dataframe.transforms.pivot.PivotConfigTests;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
@ -27,6 +28,7 @@ import org.elasticsearch.search.SearchModule;
|
||||||
import org.elasticsearch.test.AbstractXContentTestCase;
|
import org.elasticsearch.test.AbstractXContentTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.time.Instant;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
@ -36,8 +38,13 @@ import static org.elasticsearch.client.dataframe.transforms.SourceConfigTests.ra
|
||||||
public class DataFrameTransformConfigTests extends AbstractXContentTestCase<DataFrameTransformConfig> {
|
public class DataFrameTransformConfigTests extends AbstractXContentTestCase<DataFrameTransformConfig> {
|
||||||
|
|
||||||
public static DataFrameTransformConfig randomDataFrameTransformConfig() {
|
public static DataFrameTransformConfig randomDataFrameTransformConfig() {
|
||||||
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10), randomSourceConfig(),
|
return new DataFrameTransformConfig(randomAlphaOfLengthBetween(1, 10),
|
||||||
randomDestConfig(), PivotConfigTests.randomPivotConfig(), randomBoolean() ? null : randomAlphaOfLengthBetween(1, 100));
|
randomSourceConfig(),
|
||||||
|
randomDestConfig(),
|
||||||
|
PivotConfigTests.randomPivotConfig(),
|
||||||
|
randomBoolean() ? null : randomAlphaOfLengthBetween(1, 100),
|
||||||
|
randomBoolean() ? null : Instant.now(),
|
||||||
|
randomBoolean() ? null : Version.CURRENT.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -478,7 +478,6 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
||||||
|
|
||||||
RestHighLevelClient client = highLevelClient();
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
|
|
||||||
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
|
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
|
||||||
TermsGroupSource.builder().setField("user_id").build()).build();
|
TermsGroupSource.builder().setField("user_id").build()).build();
|
||||||
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
|
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
|
||||||
|
@ -564,7 +563,6 @@ public class DataFrameTransformDocumentationIT extends ESRestHighLevelClientTest
|
||||||
public void testGetDataFrameTransform() throws IOException, InterruptedException {
|
public void testGetDataFrameTransform() throws IOException, InterruptedException {
|
||||||
createIndex("source-data");
|
createIndex("source-data");
|
||||||
|
|
||||||
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
|
|
||||||
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
|
GroupConfig groupConfig = GroupConfig.builder().groupBy("reviewer",
|
||||||
TermsGroupSource.builder().setField("user_id").build()).build();
|
TermsGroupSource.builder().setField("user_id").build()).build();
|
||||||
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
|
AggregatorFactories.Builder aggBuilder = new AggregatorFactories.Builder();
|
||||||
|
|
|
@ -28,7 +28,7 @@ This page lists all the available search queries with their corresponding `Query
|
||||||
| {ref}/query-dsl-simple-query-string-query.html[Simple Query String] | {query-ref}/SimpleQueryStringBuilder.html[SimpleQueryStringBuilder] | {query-ref}/QueryBuilders.html#simpleQueryStringQuery-java.lang.String-[QueryBuilders.simpleQueryStringQuery()]
|
| {ref}/query-dsl-simple-query-string-query.html[Simple Query String] | {query-ref}/SimpleQueryStringBuilder.html[SimpleQueryStringBuilder] | {query-ref}/QueryBuilders.html#simpleQueryStringQuery-java.lang.String-[QueryBuilders.simpleQueryStringQuery()]
|
||||||
|======
|
|======
|
||||||
|
|
||||||
==== Term level queries
|
==== Term-level queries
|
||||||
[options="header"]
|
[options="header"]
|
||||||
|======
|
|======
|
||||||
| Search Query | QueryBuilder Class | Method in QueryBuilders
|
| Search Query | QueryBuilder Class | Method in QueryBuilders
|
||||||
|
|
|
@ -100,7 +100,7 @@ PUT _data_frame/transforms/ecommerce_transform
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[setup:kibana_sample_data_ecommerce]
|
// TEST[skip: https://github.com/elastic/elasticsearch/issues/43271]
|
||||||
|
|
||||||
When the transform is created, you receive the following results:
|
When the transform is created, you receive the following results:
|
||||||
[source,js]
|
[source,js]
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
|
|
||||||
[partintro]
|
[partintro]
|
||||||
--
|
--
|
||||||
|
TIP: The fastest way to get started with {es} is to https://www.elastic.co/cloud/elasticsearch-service/signup[start a free 14-day trial of Elasticsearch Service] in the cloud.
|
||||||
|
|
||||||
Elasticsearch is a highly scalable open-source full-text search and analytics engine. It allows you to store, search, and analyze big volumes of data quickly and in near real time. It is generally used as the underlying engine/technology that powers applications that have complex search features and requirements.
|
Elasticsearch is a highly scalable open-source full-text search and analytics engine. It allows you to store, search, and analyze big volumes of data quickly and in near real time. It is generally used as the underlying engine/technology that powers applications that have complex search features and requirements.
|
||||||
|
|
||||||
|
@ -118,10 +119,11 @@ NOTE: Elasticsearch includes a bundled version of http://openjdk.java.net[OpenJD
|
||||||
from the JDK maintainers (GPLv2+CE). To use your own version of Java,
|
from the JDK maintainers (GPLv2+CE). To use your own version of Java,
|
||||||
see the <<jvm-version, JVM version requirements>>
|
see the <<jvm-version, JVM version requirements>>
|
||||||
|
|
||||||
The binaries are available from http://www.elastic.co/downloads[`www.elastic.co/downloads`]
|
The binaries are available from http://www.elastic.co/downloads[`www.elastic.co/downloads`].
|
||||||
along with all the releases that have been made in the past. For each release, platform
|
Platform dependent archives are available for Windows, Linux and macOS. In addition,
|
||||||
dependent archive versions are available for Windows, Linux and MacOS, as well as `DEB` and `RPM`
|
`DEB` and `RPM` packages are available for Linux, and an `MSI` installation package
|
||||||
packages for Linux, and `MSI` installation packages for Windows.
|
is available for Windows. You can also use the Elastic Homebrew tap to <<brew,install
|
||||||
|
using the brew package manager>> on macOS.
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
=== Installation example on Linux
|
=== Installation example on Linux
|
||||||
|
|
|
@ -26,7 +26,26 @@ index.search.slowlog.threshold.fetch.trace: 200ms
|
||||||
index.search.slowlog.level: info
|
index.search.slowlog.level: info
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
All of the above settings are _dynamic_ and are set per-index.
|
All of the above settings are _dynamic_ and can be set for each index using the
|
||||||
|
<<indices-update-settings, update indices settings>> API. For example:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
PUT /twitter/_settings
|
||||||
|
{
|
||||||
|
"index.search.slowlog.threshold.query.warn": "10s",
|
||||||
|
"index.search.slowlog.threshold.query.info": "5s",
|
||||||
|
"index.search.slowlog.threshold.query.debug": "2s",
|
||||||
|
"index.search.slowlog.threshold.query.trace": "500ms",
|
||||||
|
"index.search.slowlog.threshold.fetch.warn": "1s",
|
||||||
|
"index.search.slowlog.threshold.fetch.info": "800ms",
|
||||||
|
"index.search.slowlog.threshold.fetch.debug": "500ms",
|
||||||
|
"index.search.slowlog.threshold.fetch.trace": "200ms",
|
||||||
|
"index.search.slowlog.level": "info"
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// CONSOLE
|
||||||
|
// TEST[setup:twitter]
|
||||||
|
|
||||||
By default, none are enabled (set to `-1`). Levels (`warn`, `info`,
|
By default, none are enabled (set to `-1`). Levels (`warn`, `info`,
|
||||||
`debug`, `trace`) allow to control under which logging level the log
|
`debug`, `trace`) allow to control under which logging level the log
|
||||||
|
@ -83,7 +102,23 @@ index.indexing.slowlog.level: info
|
||||||
index.indexing.slowlog.source: 1000
|
index.indexing.slowlog.source: 1000
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
All of the above settings are _dynamic_ and are set per-index.
|
All of the above settings are _dynamic_ and can be set for each index using the
|
||||||
|
<<indices-update-settings, update indices settings>> API. For example:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
PUT /twitter/_settings
|
||||||
|
{
|
||||||
|
"index.indexing.slowlog.threshold.index.warn": "10s",
|
||||||
|
"index.indexing.slowlog.threshold.index.info": "5s",
|
||||||
|
"index.indexing.slowlog.threshold.index.debug": "2s",
|
||||||
|
"index.indexing.slowlog.threshold.index.trace": "500ms",
|
||||||
|
"index.indexing.slowlog.level": "info",
|
||||||
|
"index.indexing.slowlog.source": "1000"
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// CONSOLE
|
||||||
|
// TEST[setup:twitter]
|
||||||
|
|
||||||
By default Elasticsearch will log the first 1000 characters of the _source in
|
By default Elasticsearch will log the first 1000 characters of the _source in
|
||||||
the slowlog. You can change that with `index.indexing.slowlog.source`. Setting
|
the slowlog. You can change that with `index.indexing.slowlog.source`. Setting
|
||||||
|
|
|
@ -249,7 +249,7 @@ Response:
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TESTRESPONSE[s/"source" : \{[^}]*\}/"source" : $body.$_path/]
|
// TESTRESPONSE[s/"source" : \{[^}]*\}/"source" : $body.$_path/]
|
||||||
// TESTRESPONSE[s/"details" : \[[^\]]*\]//]
|
// TESTRESPONSE[s/"details" : \[[^\]]*\]/"details" : $body.$_path/]
|
||||||
// TESTRESPONSE[s/: (\-)?[0-9]+/: $body.$_path/]
|
// TESTRESPONSE[s/: (\-)?[0-9]+/: $body.$_path/]
|
||||||
// TESTRESPONSE[s/: "[^"]*"/: $body.$_path/]
|
// TESTRESPONSE[s/: "[^"]*"/: $body.$_path/]
|
||||||
////
|
////
|
||||||
|
|
|
@ -47,8 +47,7 @@ PUT my_index/_doc/2
|
||||||
<1> The `number_one` field will contain the integer `10`.
|
<1> The `number_one` field will contain the integer `10`.
|
||||||
<2> This document will be rejected because coercion is disabled.
|
<2> This document will be rejected because coercion is disabled.
|
||||||
|
|
||||||
TIP: The `coerce` setting is allowed to have different settings for fields of
|
TIP: The `coerce` setting value can be updated on existing fields
|
||||||
the same name in the same index. Its value can be updated on existing fields
|
|
||||||
using the <<indices-put-mapping,PUT mapping API>>.
|
using the <<indices-put-mapping,PUT mapping API>>.
|
||||||
|
|
||||||
[[coerce-setting]]
|
[[coerce-setting]]
|
||||||
|
|
|
@ -46,8 +46,7 @@ PUT my_index/_doc/2
|
||||||
<1> This document will have the `text` field indexed, but not the `number_one` field.
|
<1> This document will have the `text` field indexed, but not the `number_one` field.
|
||||||
<2> This document will be rejected because `number_two` does not allow malformed values.
|
<2> This document will be rejected because `number_two` does not allow malformed values.
|
||||||
|
|
||||||
TIP: The `ignore_malformed` setting is allowed to have different settings for
|
TIP: The `ignore_malformed` setting value can be updated on
|
||||||
fields of the same name in the same index. Its value can be updated on
|
|
||||||
existing fields using the <<indices-put-mapping,PUT mapping API>>.
|
existing fields using the <<indices-put-mapping,PUT mapping API>>.
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -60,8 +60,7 @@ GET my_index/_search
|
||||||
|
|
||||||
NOTE: Multi-fields do not change the original `_source` field.
|
NOTE: Multi-fields do not change the original `_source` field.
|
||||||
|
|
||||||
TIP: The `fields` setting is allowed to have different settings for fields of
|
TIP: New multi-fields can be added to existing
|
||||||
the same name in the same index. New multi-fields can be added to existing
|
|
||||||
fields using the <<indices-put-mapping,PUT mapping API>>.
|
fields using the <<indices-put-mapping,PUT mapping API>>.
|
||||||
|
|
||||||
==== Multi-fields with multiple analyzers
|
==== Multi-fields with multiple analyzers
|
||||||
|
|
|
@ -7,7 +7,7 @@ produces a single token.
|
||||||
|
|
||||||
The `normalizer` is applied prior to indexing the keyword, as well as at
|
The `normalizer` is applied prior to indexing the keyword, as well as at
|
||||||
search-time when the `keyword` field is searched via a query parser such as
|
search-time when the `keyword` field is searched via a query parser such as
|
||||||
the <<query-dsl-match-query,`match`>> query or via a term level query
|
the <<query-dsl-match-query,`match`>> query or via a term-level query
|
||||||
such as the <<query-dsl-term-query,`term`>> query.
|
such as the <<query-dsl-term-query,`term`>> query.
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
|
|
|
@ -11,11 +11,10 @@ don't need scoring on a specific field, you should disable norms on that
|
||||||
field. In particular, this is the case for fields that are used solely for
|
field. In particular, this is the case for fields that are used solely for
|
||||||
filtering or aggregations.
|
filtering or aggregations.
|
||||||
|
|
||||||
TIP: The `norms` setting must have the same setting for fields of the
|
TIP: Norms can be disabled on existing fields using
|
||||||
same name in the same index. Norms can be disabled on existing fields using
|
|
||||||
the <<indices-put-mapping,PUT mapping API>>.
|
the <<indices-put-mapping,PUT mapping API>>.
|
||||||
|
|
||||||
Norms can be disabled (but not reenabled) after the fact, using the
|
Norms can be disabled (but not reenabled after the fact), using the
|
||||||
<<indices-put-mapping,PUT mapping API>> like so:
|
<<indices-put-mapping,PUT mapping API>> like so:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
|
|
|
@ -108,6 +108,8 @@ geo-points containing any more than latitude and longitude (two dimensions) valu
|
||||||
and reject the whole document.
|
and reject the whole document.
|
||||||
| `true`
|
| `true`
|
||||||
|
|
||||||
|
|`coerce` |If `true` unclosed linear rings in polygons will be automatically closed.
|
||||||
|
| `false`
|
||||||
|
|
||||||
|=======================================================================
|
|=======================================================================
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,187 @@
|
||||||
|
[role="xpack"]
|
||||||
|
[testenv="basic"]
|
||||||
|
[[configuring-filebeat]]
|
||||||
|
=== Collecting {es} log data with {filebeat}
|
||||||
|
|
||||||
|
[subs="attributes"]
|
||||||
|
++++
|
||||||
|
<titleabbrev>Collecting log data with {filebeat}</titleabbrev>
|
||||||
|
++++
|
||||||
|
|
||||||
|
You can use {filebeat} to monitor the {es} log files, collect log events, and
|
||||||
|
ship them to the monitoring cluster. Your recent logs are visible on the
|
||||||
|
*Monitoring* page in {kib}.
|
||||||
|
|
||||||
|
//NOTE: The tagged regions are re-used in the Stack Overview.
|
||||||
|
|
||||||
|
. Verify that {es} is running and that the monitoring cluster is ready to
|
||||||
|
receive data from {filebeat}.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
TIP: In production environments, we strongly recommend using a separate cluster
|
||||||
|
(referred to as the _monitoring cluster_) to store the data. Using a separate
|
||||||
|
monitoring cluster prevents production cluster outages from impacting your
|
||||||
|
ability to access your monitoring data. It also prevents monitoring activities
|
||||||
|
from impacting the performance of your production cluster. See
|
||||||
|
{stack-ov}/monitoring-production.html[Monitoring in a production environment].
|
||||||
|
|
||||||
|
--
|
||||||
|
|
||||||
|
. Enable the collection of monitoring data on your cluster.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
include::configuring-metricbeat.asciidoc[tag=enable-collection]
|
||||||
|
|
||||||
|
For more information, see <<monitoring-settings>> and <<cluster-update-settings>>.
|
||||||
|
--
|
||||||
|
|
||||||
|
. Identify which logs you want to monitor.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
The {filebeat} {es} module can handle
|
||||||
|
{stack-ov}/audit-log-output.html[audit logs],
|
||||||
|
{ref}/logging.html#deprecation-logging[deprecation logs],
|
||||||
|
{ref}/gc-logging.html[gc logs], {ref}/logging.html[server logs], and
|
||||||
|
{ref}/index-modules-slowlog.html[slow logs].
|
||||||
|
For more information about the location of your {es} logs, see the
|
||||||
|
{ref}/path-settings.html[path.logs] setting.
|
||||||
|
|
||||||
|
IMPORTANT: If there are both structured (`*.json`) and unstructured (plain text)
|
||||||
|
versions of the logs, you must use the structured logs. Otherwise, they might
|
||||||
|
not appear in the appropriate context in {kib}.
|
||||||
|
|
||||||
|
--
|
||||||
|
|
||||||
|
. {filebeat-ref}/filebeat-installation.html[Install {filebeat}] on the {es}
|
||||||
|
nodes that contain logs that you want to monitor.
|
||||||
|
|
||||||
|
. Identify where to send the log data.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
// tag::output-elasticsearch[]
|
||||||
|
For example, specify {es} output information for your monitoring cluster in
|
||||||
|
the {filebeat} configuration file (`filebeat.yml`):
|
||||||
|
|
||||||
|
[source,yaml]
|
||||||
|
----------------------------------
|
||||||
|
output.elasticsearch:
|
||||||
|
# Array of hosts to connect to.
|
||||||
|
hosts: ["http://es-mon-1:9200", "http://es-mon2:9200"] <1>
|
||||||
|
|
||||||
|
# Optional protocol and basic auth credentials.
|
||||||
|
#protocol: "https"
|
||||||
|
#username: "elastic"
|
||||||
|
#password: "changeme"
|
||||||
|
----------------------------------
|
||||||
|
<1> In this example, the data is stored on a monitoring cluster with nodes
|
||||||
|
`es-mon-1` and `es-mon-2`.
|
||||||
|
|
||||||
|
If you configured the monitoring cluster to use encrypted communications, you
|
||||||
|
must access it via HTTPS. For example, use a `hosts` setting like
|
||||||
|
`https://es-mon-1:9200`.
|
||||||
|
|
||||||
|
IMPORTANT: The {es} {monitor-features} use ingest pipelines, therefore the
|
||||||
|
cluster that stores the monitoring data must have at least one
|
||||||
|
<<ingest,ingest node>>.
|
||||||
|
|
||||||
|
If {es} {security-features} are enabled on the monitoring cluster, you must
|
||||||
|
provide a valid user ID and password so that {filebeat} can send metrics
|
||||||
|
successfully.
|
||||||
|
|
||||||
|
For more information about these configuration options, see
|
||||||
|
{filebeat-ref}/elasticsearch-output.html[Configure the {es} output].
|
||||||
|
// end::output-elasticsearch[]
|
||||||
|
--
|
||||||
|
|
||||||
|
. Optional: Identify where to visualize the data.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
// tag::setup-kibana[]
|
||||||
|
{filebeat} provides example {kib} dashboards, visualizations and searches. To
|
||||||
|
load the dashboards into the appropriate {kib} instance, specify the
|
||||||
|
`setup.kibana` information in the {filebeat} configuration file
|
||||||
|
(`filebeat.yml`) on each node:
|
||||||
|
|
||||||
|
[source,yaml]
|
||||||
|
----------------------------------
|
||||||
|
setup.kibana:
|
||||||
|
host: "localhost:5601"
|
||||||
|
#username: "my_kibana_user"
|
||||||
|
#password: "YOUR_PASSWORD"
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
TIP: In production environments, we strongly recommend using a dedicated {kib}
|
||||||
|
instance for your monitoring cluster.
|
||||||
|
|
||||||
|
If {security-features} are enabled, you must provide a valid user ID and
|
||||||
|
password so that {filebeat} can connect to {kib}:
|
||||||
|
|
||||||
|
.. Create a user on the monitoring cluster that has the
|
||||||
|
{stack-ov}/built-in-roles.html[`kibana_user` built-in role] or equivalent
|
||||||
|
privileges.
|
||||||
|
|
||||||
|
.. Add the `username` and `password` settings to the {es} output information in
|
||||||
|
the {filebeat} configuration file. The example shows a hard-coded password, but
|
||||||
|
you should store sensitive values in the
|
||||||
|
{filebeat-ref}/keystore.html[secrets keystore].
|
||||||
|
|
||||||
|
See {filebeat-ref}/setup-kibana-endpoint.html[Configure the {kib} endpoint].
|
||||||
|
|
||||||
|
// end::setup-kibana[]
|
||||||
|
--
|
||||||
|
|
||||||
|
. Enable the {es} module and set up the initial {filebeat} environment on each
|
||||||
|
node.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
// tag::enable-es-module[]
|
||||||
|
For example:
|
||||||
|
|
||||||
|
["source","sh",subs="attributes,callouts"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
filebeat modules enable elasticsearch
|
||||||
|
filebeat setup -e
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
||||||
|
For more information, see
|
||||||
|
{filebeat-ref}/filebeat-module-elasticsearch.html[{es} module].
|
||||||
|
|
||||||
|
// end::enable-es-module[]
|
||||||
|
--
|
||||||
|
|
||||||
|
. Configure the {es} module in {filebeat} on each node.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
// tag::configure-es-module[]
|
||||||
|
If the logs that you want to monitor aren't in the default location, set the
|
||||||
|
appropriate path variables in the `modules.d/elasticsearch.yml` file. See
|
||||||
|
{filebeat-ref}/filebeat-module-elasticsearch.html#configuring-elasticsearch-module[Configure the {es} module].
|
||||||
|
|
||||||
|
IMPORTANT: If there are JSON logs, configure the `var.paths` settings to point
|
||||||
|
to them instead of the plain text logs.
|
||||||
|
|
||||||
|
// end::configure-es-module[]
|
||||||
|
--
|
||||||
|
|
||||||
|
. {filebeat-ref}/filebeat-starting.html[Start {filebeat}] on each node.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
NOTE: Depending on how you’ve installed {filebeat}, you might see errors related
|
||||||
|
to file ownership or permissions when you try to run {filebeat} modules. See
|
||||||
|
{beats-ref}/config-file-permissions.html[Config file ownership and permissions].
|
||||||
|
|
||||||
|
--
|
||||||
|
|
||||||
|
. Check whether the appropriate indices exist on the monitoring cluster.
|
||||||
|
+
|
||||||
|
--
|
||||||
|
For example, use the {ref}/cat-indices.html[cat indices] command to verify
|
||||||
|
that there are new `filebeat-*` indices.
|
||||||
|
|
||||||
|
TIP: If you want to use the *Monitoring* UI in {kib}, there must also be
|
||||||
|
`.monitoring-*` indices. Those indices are generated when you collect metrics
|
||||||
|
about {stack} products. For example, see <<configuring-metricbeat>>.
|
||||||
|
|
||||||
|
--
|
||||||
|
|
||||||
|
. {kibana-ref}/monitoring-data.html[View the monitoring data in {kib}].
|
|
@ -12,9 +12,12 @@ methods to collect metrics about {es}:
|
||||||
* <<collecting-monitoring-data>>
|
* <<collecting-monitoring-data>>
|
||||||
* <<configuring-metricbeat>>
|
* <<configuring-metricbeat>>
|
||||||
|
|
||||||
|
You can also <<configuring-filebeat,use {filebeat} to collect {es} logs>>.
|
||||||
|
|
||||||
To learn about monitoring in general, see
|
To learn about monitoring in general, see
|
||||||
{stack-ov}/xpack-monitoring.html[Monitoring the {stack}].
|
{stack-ov}/xpack-monitoring.html[Monitoring the {stack}].
|
||||||
|
|
||||||
include::collecting-monitoring-data.asciidoc[]
|
include::collecting-monitoring-data.asciidoc[]
|
||||||
include::configuring-metricbeat.asciidoc[]
|
include::configuring-metricbeat.asciidoc[]
|
||||||
|
include::configuring-filebeat.asciidoc[]
|
||||||
include::indices.asciidoc[]
|
include::indices.asciidoc[]
|
|
@ -1,12 +1,12 @@
|
||||||
[[query-dsl-constant-score-query]]
|
[[query-dsl-constant-score-query]]
|
||||||
=== Constant Score Query
|
=== Constant Score Query
|
||||||
|
|
||||||
A query that wraps another query and simply returns a
|
Wraps a <<query-dsl-bool-query, filter query>> and returns every matching
|
||||||
constant score equal to the query boost for every document in the
|
document with a <<query-filter-context, relevance score>> equal to the `boost`
|
||||||
filter. Maps to Lucene `ConstantScoreQuery`.
|
parameter value.
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
----
|
||||||
GET /_search
|
GET /_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
|
@ -18,8 +18,22 @@ GET /_search
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
----
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
||||||
Filter clauses are executed in <<query-filter-context,filter context>>,
|
[[constant-score-top-level-params]]
|
||||||
meaning that scoring is ignored and clauses are considered for caching.
|
==== Top-level parameters for `constant_score`
|
||||||
|
`filter`::
|
||||||
|
+
|
||||||
|
--
|
||||||
|
<<query-dsl-bool-query, Filter query>> you wish to run. Any returned documents
|
||||||
|
must match this query. Required.
|
||||||
|
|
||||||
|
Filter queries do not calculate <<query-filter-context, relevance scores>>. To
|
||||||
|
speed up performance, {es} automatically caches frequently used filter queries.
|
||||||
|
--
|
||||||
|
|
||||||
|
`boost`::
|
||||||
|
Floating point number used as the constant <<query-filter-context, relevance
|
||||||
|
score>> for every document matching the `filter` query. Default is `1.0`.
|
||||||
|
Optional.
|
|
@ -39,7 +39,7 @@ Then the following simple query can be executed with a
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -94,7 +94,7 @@ representations of the geo point, the filter can accept it as well:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -129,7 +129,7 @@ conform with http://geojson.org/[GeoJSON].
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -157,7 +157,7 @@ Format in `lat,lon`.
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -183,7 +183,7 @@ GET /_search
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -208,7 +208,7 @@ GET /_search
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -243,7 +243,7 @@ geohash the geohash can be specified in both `top_left` and
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"geo_bounding_box" : {
|
"geo_bounding_box" : {
|
||||||
|
@ -273,7 +273,7 @@ values separately.
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
@ -323,7 +323,7 @@ are not supported. Here is an example:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_search
|
GET my_locations/_search
|
||||||
{
|
{
|
||||||
"query": {
|
"query": {
|
||||||
"bool" : {
|
"bool" : {
|
||||||
|
|
|
@ -40,7 +40,6 @@ POST _search
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"boost" : 2.0,
|
|
||||||
"_name" : "favourite_food"
|
"_name" : "favourite_food"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -298,4 +297,4 @@ POST _search
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
|
@ -1,72 +1,63 @@
|
||||||
[[term-level-queries]]
|
[[term-level-queries]]
|
||||||
== Term level queries
|
== Term-level queries
|
||||||
|
|
||||||
While the <<full-text-queries,full text queries>> will analyze the query
|
You can use **term-level queries** to find documents based on precise values in
|
||||||
string before executing, the _term-level queries_ operate on the exact terms
|
structured data. Examples of structured data include date ranges, IP addresses,
|
||||||
that are stored in the inverted index, and will normalize terms before executing
|
prices, or product IDs.
|
||||||
only for <<keyword,`keyword`>> fields with <<normalizer,`normalizer`>> property.
|
|
||||||
|
|
||||||
These queries are usually used for structured data like numbers, dates, and
|
Unlike <<full-text-queries, full-text queries>>, term-level queries do not
|
||||||
enums, rather than full text fields. Alternatively, they allow you to craft
|
analyze search terms. Instead, term-level queries match the exact terms stored
|
||||||
low-level queries, foregoing the analysis process.
|
in a field.
|
||||||
|
|
||||||
The queries in this group are:
|
|
||||||
|
[NOTE]
|
||||||
|
====
|
||||||
|
Term-level queries still normalize search terms for `keyword` fields with the
|
||||||
|
`normalizer` property. For more details, see <<normalizer, `normalizer`>>.
|
||||||
|
====
|
||||||
|
|
||||||
|
[float]
|
||||||
|
[[term-level-query-types]]
|
||||||
|
=== Types of term-level queries
|
||||||
|
|
||||||
<<query-dsl-term-query,`term` query>>::
|
<<query-dsl-term-query,`term` query>>::
|
||||||
|
Returns documents that contain an exact term in a provided field.
|
||||||
Find documents which contain the exact term specified in the field
|
|
||||||
specified.
|
|
||||||
|
|
||||||
<<query-dsl-terms-query,`terms` query>>::
|
<<query-dsl-terms-query,`terms` query>>::
|
||||||
|
Returns documents that contain one or more exact terms in a provided field.
|
||||||
Find documents which contain any of the exact terms specified in the field
|
|
||||||
specified.
|
|
||||||
|
|
||||||
<<query-dsl-terms-set-query,`terms_set` query>>::
|
<<query-dsl-terms-set-query,`terms_set` query>>::
|
||||||
|
Returns documents that contain a minimum number of exact terms in a provided
|
||||||
Find documents which match with one or more of the specified terms. The
|
field. You can define the minimum number of matching terms using a field or
|
||||||
number of terms that must match depend on the specified minimum should
|
script.
|
||||||
match field or script.
|
|
||||||
|
|
||||||
<<query-dsl-range-query,`range` query>>::
|
<<query-dsl-range-query,`range` query>>::
|
||||||
|
Returns documents that contain terms within a provided range.
|
||||||
Find documents where the field specified contains values (dates, numbers,
|
|
||||||
or strings) in the range specified.
|
|
||||||
|
|
||||||
<<query-dsl-exists-query,`exists` query>>::
|
<<query-dsl-exists-query,`exists` query>>::
|
||||||
|
Returns documents that contain any indexed value for a field.
|
||||||
Find documents where the field specified contains any non-null value.
|
|
||||||
|
|
||||||
<<query-dsl-prefix-query,`prefix` query>>::
|
<<query-dsl-prefix-query,`prefix` query>>::
|
||||||
|
Returns documents that contain a specific prefix in a provided field.
|
||||||
Find documents where the field specified contains terms which begin with
|
|
||||||
the exact prefix specified.
|
|
||||||
|
|
||||||
<<query-dsl-wildcard-query,`wildcard` query>>::
|
<<query-dsl-wildcard-query,`wildcard` query>>::
|
||||||
|
Returns documents that contain terms matching a wildcard pattern.
|
||||||
Find documents where the field specified contains terms which match the
|
|
||||||
pattern specified, where the pattern supports single character wildcards
|
|
||||||
(`?`) and multi-character wildcards (`*`)
|
|
||||||
|
|
||||||
<<query-dsl-regexp-query,`regexp` query>>::
|
<<query-dsl-regexp-query,`regexp` query>>::
|
||||||
|
Returns documents that contain terms matching a
|
||||||
Find documents where the field specified contains terms which match the
|
https://en.wikipedia.org/wiki/Regular_expression[regular expression].
|
||||||
<<regexp-syntax,regular expression>> specified.
|
|
||||||
|
|
||||||
<<query-dsl-fuzzy-query,`fuzzy` query>>::
|
<<query-dsl-fuzzy-query,`fuzzy` query>>::
|
||||||
|
Returns documents that contain terms similar to the search term. {es} measures
|
||||||
Find documents where the field specified contains terms which are fuzzily
|
similarity, or fuzziness, using a
|
||||||
similar to the specified term. Fuzziness is measured as a
|
http://en.wikipedia.org/wiki/Levenshtein_distance[Levenshtein edit distance].
|
||||||
http://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance[Levenshtein edit distance]
|
|
||||||
of 1 or 2.
|
|
||||||
|
|
||||||
<<query-dsl-type-query,`type` query>>::
|
<<query-dsl-type-query,`type` query>>::
|
||||||
|
|
||||||
Find documents of the specified type.
|
Returns documents of the specified type.
|
||||||
|
|
||||||
<<query-dsl-ids-query,`ids` query>>::
|
<<query-dsl-ids-query,`ids` query>>::
|
||||||
|
Returns documents based on their <<mapping-id-field, document IDs>>.
|
||||||
Find documents with the specified type and IDs.
|
|
||||||
|
|
||||||
|
|
||||||
include::term-query.asciidoc[]
|
include::term-query.asciidoc[]
|
||||||
|
|
|
@ -224,7 +224,7 @@ The response contains suggestions scored by the most likely spell correction fir
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _search
|
POST test/_search
|
||||||
{
|
{
|
||||||
"suggest": {
|
"suggest": {
|
||||||
"text" : "noble prize",
|
"text" : "noble prize",
|
||||||
|
@ -293,7 +293,7 @@ properties that can be configured.
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _search
|
POST test/_search
|
||||||
{
|
{
|
||||||
"suggest": {
|
"suggest": {
|
||||||
"text" : "obel prize",
|
"text" : "obel prize",
|
||||||
|
@ -414,7 +414,7 @@ accept ordinary analyzer names.
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _search
|
POST test/_search
|
||||||
{
|
{
|
||||||
"suggest": {
|
"suggest": {
|
||||||
"text" : "obel prize",
|
"text" : "obel prize",
|
||||||
|
|
|
@ -59,6 +59,13 @@ downloaded from the Elastic Docker Registry.
|
||||||
+
|
+
|
||||||
{ref}/docker.html[Install {es} with Docker]
|
{ref}/docker.html[Install {es} with Docker]
|
||||||
|
|
||||||
|
`brew`::
|
||||||
|
|
||||||
|
Formulae are available from the Elastic Homebrew tap for installing
|
||||||
|
{es} on macOS with the Homebrew package manager.
|
||||||
|
+
|
||||||
|
{ref}/brew.html[Install {es} on macOS with Homebrew]
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[config-mgmt-tools]]
|
[[config-mgmt-tools]]
|
||||||
=== Configuration Management Tools
|
=== Configuration Management Tools
|
||||||
|
@ -84,3 +91,4 @@ include::install/windows.asciidoc[]
|
||||||
|
|
||||||
include::install/docker.asciidoc[]
|
include::install/docker.asciidoc[]
|
||||||
|
|
||||||
|
include::install/brew.asciidoc[]
|
||||||
|
|
|
@ -0,0 +1,69 @@
|
||||||
|
[[brew]]
|
||||||
|
=== Install {es} on macOS with Homebrew
|
||||||
|
|
||||||
|
Elastic publishes Homebrew formulae so you can install {es} with the
|
||||||
|
https://brew.sh/[Homebrew] package manager.
|
||||||
|
|
||||||
|
To install with Homebrew, you first need to tap the
|
||||||
|
Elastic Homebrew repository:
|
||||||
|
|
||||||
|
[source,sh]
|
||||||
|
-------------------------
|
||||||
|
brew tap elastic/tap
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
Once you've tapped the Elastic Homebrew repo, you can use `brew install` to
|
||||||
|
install the default distribution of {es}:
|
||||||
|
|
||||||
|
[source,sh]
|
||||||
|
-------------------------
|
||||||
|
brew install elastic/tap/elasticsearch-full
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
This installs the most recently released default distribution of {es}.
|
||||||
|
To install the OSS distribution, specify `elastic/tap/elasticsearch-oss`.
|
||||||
|
|
||||||
|
[[brew-layout]]
|
||||||
|
==== Directory layout for Homebrew installs
|
||||||
|
|
||||||
|
When you install {es} with `brew install` the config files, logs,
|
||||||
|
and data directory are stored in the following locations.
|
||||||
|
|
||||||
|
[cols="<h,<,<m,<m",options="header",]
|
||||||
|
|=======================================================================
|
||||||
|
| Type | Description | Default Location | Setting
|
||||||
|
| home
|
||||||
|
| Elasticsearch home directory or `$ES_HOME`
|
||||||
|
| /usr/local/var/homebrew/linked/elasticsearch-full
|
||||||
|
d|
|
||||||
|
|
||||||
|
| bin
|
||||||
|
| Binary scripts including `elasticsearch` to start a node
|
||||||
|
and `elasticsearch-plugin` to install plugins
|
||||||
|
| /usr/local/var/homebrew/linked/elasticsearch-full/bin
|
||||||
|
d|
|
||||||
|
|
||||||
|
| conf
|
||||||
|
| Configuration files including `elasticsearch.yml`
|
||||||
|
| /usr/local/etc/elasticsearch
|
||||||
|
| <<config-files-location,ES_PATH_CONF>>
|
||||||
|
|
||||||
|
| data
|
||||||
|
| The location of the data files of each index / shard allocated
|
||||||
|
on the node. Can hold multiple locations.
|
||||||
|
| /usr/local/var/lib/elasticsearch
|
||||||
|
| path.data
|
||||||
|
|
||||||
|
| logs
|
||||||
|
| Log files location.
|
||||||
|
| /usr/local/var/log/elasticsearch
|
||||||
|
| path.logs
|
||||||
|
|
||||||
|
| plugins
|
||||||
|
| Plugin files location. Each plugin will be contained in a subdirectory.
|
||||||
|
| /usr/local/var/homebrew/linked/elasticsearch/plugins
|
||||||
|
|
|
||||||
|
|
||||||
|
|=======================================================================
|
||||||
|
|
||||||
|
include::next-steps.asciidoc[]
|
|
@ -7,8 +7,12 @@ keystore and the `elasticsearch-keystore` tool to manage the settings in the key
|
||||||
|
|
||||||
NOTE: All commands here should be run as the user which will run Elasticsearch.
|
NOTE: All commands here should be run as the user which will run Elasticsearch.
|
||||||
|
|
||||||
NOTE: Only some settings are designed to be read from the keystore. See
|
IMPORTANT: Only some settings are designed to be read from the keystore. However,
|
||||||
documentation for each setting to see if it is supported as part of the keystore.
|
the keystore has no validation to block unsupported settings.
|
||||||
|
Adding unsupported settings to the keystore will cause {es}
|
||||||
|
Additional unsupported settings being added to the keystore will cause Elasticsearch
|
||||||
|
to fail to start. See documentation for each setting to see if it is supported
|
||||||
|
as part of the keystore.
|
||||||
|
|
||||||
NOTE: All the modifications to the keystore take affect only after restarting
|
NOTE: All the modifications to the keystore take affect only after restarting
|
||||||
Elasticsearch.
|
Elasticsearch.
|
||||||
|
|
|
@ -289,7 +289,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||||
return readListOrderedMap(this);
|
return readListOrderedMap(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface MapFactory {
|
public interface MapFactory {
|
||||||
Map<String, Object> newMap();
|
Map<String, Object> newMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -391,7 +391,7 @@ public abstract class AbstractXContentParser implements XContentParser {
|
||||||
return list;
|
return list;
|
||||||
}
|
}
|
||||||
|
|
||||||
static Object readValue(XContentParser parser, MapFactory mapFactory, XContentParser.Token token) throws IOException {
|
public static Object readValue(XContentParser parser, MapFactory mapFactory, XContentParser.Token token) throws IOException {
|
||||||
if (token == XContentParser.Token.VALUE_NULL) {
|
if (token == XContentParser.Token.VALUE_NULL) {
|
||||||
return null;
|
return null;
|
||||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||||
|
|
|
@ -1,451 +0,0 @@
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.painless;
|
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.apache.logging.log4j.Logger;
|
|
||||||
import org.elasticsearch.common.io.PathUtils;
|
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessClass;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessConstructor;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessField;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessLookup;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessLookupUtility;
|
|
||||||
import org.elasticsearch.painless.lookup.PainlessMethod;
|
|
||||||
import org.elasticsearch.painless.lookup.def;
|
|
||||||
import org.elasticsearch.painless.spi.Whitelist;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.PrintStream;
|
|
||||||
import java.lang.reflect.Modifier;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.nio.file.StandardOpenOption;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.TreeMap;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static java.util.Comparator.comparing;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates an API reference from the method and type whitelists in {@link PainlessLookup}.
|
|
||||||
*/
|
|
||||||
public class PainlessDocGenerator {
|
|
||||||
|
|
||||||
private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS);
|
|
||||||
private static final Logger logger = LogManager.getLogger(PainlessDocGenerator.class);
|
|
||||||
private static final Comparator<PainlessField> FIELD_NAME = comparing(f -> f.javaField.getName());
|
|
||||||
private static final Comparator<PainlessMethod> METHOD_NAME = comparing(m -> m.javaMethod.getName());
|
|
||||||
private static final Comparator<PainlessMethod> METHOD_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size());
|
|
||||||
private static final Comparator<PainlessConstructor> CONSTRUCTOR_NUMBER_OF_PARAMS = comparing(m -> m.typeParameters.size());
|
|
||||||
|
|
||||||
public static void main(String[] args) throws IOException {
|
|
||||||
Path apiRootPath = PathUtils.get(args[0]);
|
|
||||||
|
|
||||||
// Blow away the last execution and recreate it from scratch
|
|
||||||
IOUtils.rm(apiRootPath);
|
|
||||||
Files.createDirectories(apiRootPath);
|
|
||||||
|
|
||||||
Path indexPath = apiRootPath.resolve("index.asciidoc");
|
|
||||||
logger.info("Starting to write [index.asciidoc]");
|
|
||||||
try (PrintStream indexStream = new PrintStream(
|
|
||||||
Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
|
|
||||||
false, StandardCharsets.UTF_8.name())) {
|
|
||||||
emitGeneratedWarning(indexStream);
|
|
||||||
List<Class<?>> classes = PAINLESS_LOOKUP.getClasses().stream().sorted(
|
|
||||||
Comparator.comparing(Class::getCanonicalName)).collect(Collectors.toList());
|
|
||||||
for (Class<?> clazz : classes) {
|
|
||||||
PainlessClass struct = PAINLESS_LOOKUP.lookupPainlessClass(clazz);
|
|
||||||
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(clazz);
|
|
||||||
|
|
||||||
if (clazz.isPrimitive()) {
|
|
||||||
// Primitives don't have methods to reference
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (clazz == def.class) {
|
|
||||||
// def is special but doesn't have any methods all of its own.
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
indexStream.print("include::");
|
|
||||||
indexStream.print(canonicalClassName);
|
|
||||||
indexStream.println(".asciidoc[]");
|
|
||||||
|
|
||||||
Path typePath = apiRootPath.resolve(canonicalClassName + ".asciidoc");
|
|
||||||
logger.info("Writing [{}.asciidoc]", canonicalClassName);
|
|
||||||
try (PrintStream typeStream = new PrintStream(
|
|
||||||
Files.newOutputStream(typePath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE),
|
|
||||||
false, StandardCharsets.UTF_8.name())) {
|
|
||||||
emitGeneratedWarning(typeStream);
|
|
||||||
typeStream.print("[[");
|
|
||||||
emitAnchor(typeStream, clazz);
|
|
||||||
typeStream.print("]]++");
|
|
||||||
typeStream.print(canonicalClassName);
|
|
||||||
typeStream.println("++::");
|
|
||||||
|
|
||||||
Consumer<PainlessField> documentField = field -> PainlessDocGenerator.documentField(typeStream, field);
|
|
||||||
Consumer<PainlessMethod> documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method);
|
|
||||||
Consumer<PainlessConstructor> documentConstructor =
|
|
||||||
constructor -> PainlessDocGenerator.documentConstructor(typeStream, constructor);
|
|
||||||
struct.staticFields.values().stream().sorted(FIELD_NAME).forEach(documentField);
|
|
||||||
struct.fields.values().stream().sorted(FIELD_NAME).forEach(documentField);
|
|
||||||
struct.staticMethods.values().stream().sorted(
|
|
||||||
METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(documentMethod);
|
|
||||||
struct.constructors.values().stream().sorted(CONSTRUCTOR_NUMBER_OF_PARAMS).forEach(documentConstructor);
|
|
||||||
Map<String, Class<?>> inherited = new TreeMap<>();
|
|
||||||
struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(METHOD_NUMBER_OF_PARAMS)).forEach(method -> {
|
|
||||||
if (method.targetClass == clazz) {
|
|
||||||
documentMethod(typeStream, method);
|
|
||||||
} else {
|
|
||||||
inherited.put(canonicalClassName, method.targetClass);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (false == inherited.isEmpty()) {
|
|
||||||
typeStream.print("* Inherits methods from ");
|
|
||||||
boolean first = true;
|
|
||||||
for (Class<?> inheritsFrom : inherited.values()) {
|
|
||||||
if (first) {
|
|
||||||
first = false;
|
|
||||||
} else {
|
|
||||||
typeStream.print(", ");
|
|
||||||
}
|
|
||||||
typeStream.print("++");
|
|
||||||
emitStruct(typeStream, inheritsFrom);
|
|
||||||
typeStream.print("++");
|
|
||||||
}
|
|
||||||
typeStream.println();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.info("Done writing [index.asciidoc]");
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void documentField(PrintStream stream, PainlessField field) {
|
|
||||||
stream.print("** [[");
|
|
||||||
emitAnchor(stream, field);
|
|
||||||
stream.print("]]");
|
|
||||||
|
|
||||||
if (Modifier.isStatic(field.javaField.getModifiers())) {
|
|
||||||
stream.print("static ");
|
|
||||||
}
|
|
||||||
|
|
||||||
emitType(stream, field.typeParameter);
|
|
||||||
stream.print(' ');
|
|
||||||
|
|
||||||
String javadocRoot = javadocRoot(field);
|
|
||||||
emitJavadocLink(stream, javadocRoot, field);
|
|
||||||
stream.print('[');
|
|
||||||
stream.print(field.javaField.getName());
|
|
||||||
stream.print(']');
|
|
||||||
|
|
||||||
if (javadocRoot.equals("java8")) {
|
|
||||||
stream.print(" (");
|
|
||||||
emitJavadocLink(stream, "java9", field);
|
|
||||||
stream.print("[java 9])");
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.println();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Document a constructor.
|
|
||||||
*/
|
|
||||||
private static void documentConstructor(PrintStream stream, PainlessConstructor constructor) {
|
|
||||||
stream.print("* ++[[");
|
|
||||||
emitAnchor(stream, constructor);
|
|
||||||
stream.print("]]");
|
|
||||||
|
|
||||||
String javadocRoot = javadocRoot(constructor.javaConstructor.getDeclaringClass());
|
|
||||||
emitJavadocLink(stream, javadocRoot, constructor);
|
|
||||||
stream.print('[');
|
|
||||||
|
|
||||||
stream.print(constructorName(constructor));
|
|
||||||
|
|
||||||
stream.print("](");
|
|
||||||
boolean first = true;
|
|
||||||
for (Class<?> arg : constructor.typeParameters) {
|
|
||||||
if (first) {
|
|
||||||
first = false;
|
|
||||||
} else {
|
|
||||||
stream.print(", ");
|
|
||||||
}
|
|
||||||
emitType(stream, arg);
|
|
||||||
}
|
|
||||||
stream.print(")++");
|
|
||||||
|
|
||||||
if (javadocRoot.equals("java8")) {
|
|
||||||
stream.print(" (");
|
|
||||||
emitJavadocLink(stream, "java9", constructor);
|
|
||||||
stream.print("[java 9])");
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.println();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Document a method.
|
|
||||||
*/
|
|
||||||
private static void documentMethod(PrintStream stream, PainlessMethod method) {
|
|
||||||
stream.print("* ++[[");
|
|
||||||
emitAnchor(stream, method);
|
|
||||||
stream.print("]]");
|
|
||||||
|
|
||||||
if (method.targetClass == method.javaMethod.getDeclaringClass() && Modifier.isStatic(method.javaMethod.getModifiers())) {
|
|
||||||
stream.print("static ");
|
|
||||||
}
|
|
||||||
|
|
||||||
emitType(stream, method.returnType);
|
|
||||||
stream.print(' ');
|
|
||||||
|
|
||||||
String javadocRoot = javadocRoot(method);
|
|
||||||
emitJavadocLink(stream, javadocRoot, method);
|
|
||||||
stream.print('[');
|
|
||||||
|
|
||||||
stream.print(methodName(method));
|
|
||||||
|
|
||||||
stream.print("](");
|
|
||||||
boolean first = true;
|
|
||||||
for (Class<?> arg : method.typeParameters) {
|
|
||||||
if (first) {
|
|
||||||
first = false;
|
|
||||||
} else {
|
|
||||||
stream.print(", ");
|
|
||||||
}
|
|
||||||
emitType(stream, arg);
|
|
||||||
}
|
|
||||||
stream.print(")++");
|
|
||||||
|
|
||||||
if (javadocRoot.equals("java8")) {
|
|
||||||
stream.print(" (");
|
|
||||||
emitJavadocLink(stream, "java9", method);
|
|
||||||
stream.print("[java 9])");
|
|
||||||
}
|
|
||||||
|
|
||||||
stream.println();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Anchor text for a {@link PainlessClass}.
|
|
||||||
*/
|
|
||||||
private static void emitAnchor(PrintStream stream, Class<?> clazz) {
|
|
||||||
stream.print("painless-api-reference-");
|
|
||||||
stream.print(PainlessLookupUtility.typeToCanonicalTypeName(clazz).replace('.', '-'));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Anchor text for a {@link PainlessConstructor}.
|
|
||||||
*/
|
|
||||||
private static void emitAnchor(PrintStream stream, PainlessConstructor constructor) {
|
|
||||||
emitAnchor(stream, constructor.javaConstructor.getDeclaringClass());
|
|
||||||
stream.print('-');
|
|
||||||
stream.print(constructorName(constructor));
|
|
||||||
stream.print('-');
|
|
||||||
stream.print(constructor.typeParameters.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Anchor text for a {@link PainlessMethod}.
|
|
||||||
*/
|
|
||||||
private static void emitAnchor(PrintStream stream, PainlessMethod method) {
|
|
||||||
emitAnchor(stream, method.targetClass);
|
|
||||||
stream.print('-');
|
|
||||||
stream.print(methodName(method));
|
|
||||||
stream.print('-');
|
|
||||||
stream.print(method.typeParameters.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Anchor text for a {@link PainlessField}.
|
|
||||||
*/
|
|
||||||
private static void emitAnchor(PrintStream stream, PainlessField field) {
|
|
||||||
emitAnchor(stream, field.javaField.getDeclaringClass());
|
|
||||||
stream.print('-');
|
|
||||||
stream.print(field.javaField.getName());
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String constructorName(PainlessConstructor constructor) {
|
|
||||||
return PainlessLookupUtility.typeToCanonicalTypeName(constructor.javaConstructor.getDeclaringClass());
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String methodName(PainlessMethod method) {
|
|
||||||
return PainlessLookupUtility.typeToCanonicalTypeName(method.targetClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit a {@link Class}. If the type is primitive or an array of primitives this just emits the name of the type. Otherwise this emits
|
|
||||||
an internal link with the text.
|
|
||||||
*/
|
|
||||||
private static void emitType(PrintStream stream, Class<?> clazz) {
|
|
||||||
emitStruct(stream, clazz);
|
|
||||||
while ((clazz = clazz.getComponentType()) != null) {
|
|
||||||
stream.print("[]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit a {@link PainlessClass}. If the {@linkplain PainlessClass} is primitive or def this just emits the name of the struct.
|
|
||||||
* Otherwise this emits an internal link with the name.
|
|
||||||
*/
|
|
||||||
private static void emitStruct(PrintStream stream, Class<?> clazz) {
|
|
||||||
String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(clazz);
|
|
||||||
|
|
||||||
if (false == clazz.isPrimitive() && clazz != def.class) {
|
|
||||||
stream.print("<<");
|
|
||||||
emitAnchor(stream, clazz);
|
|
||||||
stream.print(',');
|
|
||||||
stream.print(canonicalClassName);
|
|
||||||
stream.print(">>");
|
|
||||||
} else {
|
|
||||||
stream.print(canonicalClassName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit an external link to Javadoc for a {@link PainlessMethod}.
|
|
||||||
*
|
|
||||||
* @param root name of the root uri variable
|
|
||||||
*/
|
|
||||||
private static void emitJavadocLink(PrintStream stream, String root, PainlessConstructor constructor) {
|
|
||||||
stream.print("link:{");
|
|
||||||
stream.print(root);
|
|
||||||
stream.print("-javadoc}/");
|
|
||||||
stream.print(classUrlPath(constructor.javaConstructor.getDeclaringClass()));
|
|
||||||
stream.print(".html#");
|
|
||||||
stream.print(constructorName(constructor));
|
|
||||||
stream.print("%2D");
|
|
||||||
boolean first = true;
|
|
||||||
for (Class<?> clazz: constructor.typeParameters) {
|
|
||||||
if (first) {
|
|
||||||
first = false;
|
|
||||||
} else {
|
|
||||||
stream.print("%2D");
|
|
||||||
}
|
|
||||||
stream.print(clazz.getName());
|
|
||||||
if (clazz.isArray()) {
|
|
||||||
stream.print(":A");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stream.print("%2D");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit an external link to Javadoc for a {@link PainlessMethod}.
|
|
||||||
*
|
|
||||||
* @param root name of the root uri variable
|
|
||||||
*/
|
|
||||||
private static void emitJavadocLink(PrintStream stream, String root, PainlessMethod method) {
|
|
||||||
stream.print("link:{");
|
|
||||||
stream.print(root);
|
|
||||||
stream.print("-javadoc}/");
|
|
||||||
stream.print(classUrlPath(method.javaMethod.getDeclaringClass()));
|
|
||||||
stream.print(".html#");
|
|
||||||
stream.print(methodName(method));
|
|
||||||
stream.print("%2D");
|
|
||||||
boolean first = true;
|
|
||||||
if (method.targetClass != method.javaMethod.getDeclaringClass()) {
|
|
||||||
first = false;
|
|
||||||
stream.print(method.javaMethod.getDeclaringClass().getName());
|
|
||||||
}
|
|
||||||
for (Class<?> clazz: method.typeParameters) {
|
|
||||||
if (first) {
|
|
||||||
first = false;
|
|
||||||
} else {
|
|
||||||
stream.print("%2D");
|
|
||||||
}
|
|
||||||
stream.print(clazz.getName());
|
|
||||||
if (clazz.isArray()) {
|
|
||||||
stream.print(":A");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
stream.print("%2D");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Emit an external link to Javadoc for a {@link PainlessField}.
|
|
||||||
*
|
|
||||||
* @param root name of the root uri variable
|
|
||||||
*/
|
|
||||||
private static void emitJavadocLink(PrintStream stream, String root, PainlessField field) {
|
|
||||||
stream.print("link:{");
|
|
||||||
stream.print(root);
|
|
||||||
stream.print("-javadoc}/");
|
|
||||||
stream.print(classUrlPath(field.javaField.getDeclaringClass()));
|
|
||||||
stream.print(".html#");
|
|
||||||
stream.print(field.javaField.getName());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pick the javadoc root for a {@link PainlessMethod}.
|
|
||||||
*/
|
|
||||||
private static String javadocRoot(PainlessMethod method) {
|
|
||||||
if (method.targetClass != method.javaMethod.getDeclaringClass()) {
|
|
||||||
return "painless";
|
|
||||||
}
|
|
||||||
return javadocRoot(method.targetClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pick the javadoc root for a {@link PainlessField}.
|
|
||||||
*/
|
|
||||||
private static String javadocRoot(PainlessField field) {
|
|
||||||
return javadocRoot(field.javaField.getDeclaringClass());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Pick the javadoc root for a {@link Class}.
|
|
||||||
*/
|
|
||||||
private static String javadocRoot(Class<?> clazz) {
|
|
||||||
String classPackage = clazz.getPackage().getName();
|
|
||||||
if (classPackage.startsWith("java")) {
|
|
||||||
return "java8";
|
|
||||||
}
|
|
||||||
if (classPackage.startsWith("org.elasticsearch.painless")) {
|
|
||||||
return "painless";
|
|
||||||
}
|
|
||||||
if (classPackage.startsWith("org.elasticsearch")) {
|
|
||||||
return "elasticsearch";
|
|
||||||
}
|
|
||||||
if (classPackage.startsWith("org.joda.time")) {
|
|
||||||
return "joda-time";
|
|
||||||
}
|
|
||||||
if (classPackage.startsWith("org.apache.lucene")) {
|
|
||||||
return "lucene-core";
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("Unrecognized package: " + classPackage);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void emitGeneratedWarning(PrintStream stream) {
|
|
||||||
stream.println("////");
|
|
||||||
stream.println("Automatically generated by PainlessDocGenerator. Do not edit.");
|
|
||||||
stream.println("Rebuild by running `gradle generatePainlessApi`.");
|
|
||||||
stream.println("////");
|
|
||||||
stream.println();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String classUrlPath(Class<?> clazz) {
|
|
||||||
return clazz.getName().replace('.', '/').replace('$', '.');
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -19,89 +19,21 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.reindex;
|
package org.elasticsearch.index.reindex;
|
||||||
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.elasticsearch.action.index.IndexRequest;
|
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.ParseField;
|
|
||||||
import org.elasticsearch.common.Strings;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
|
||||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
|
||||||
import org.elasticsearch.index.VersionType;
|
|
||||||
import org.elasticsearch.rest.RestController;
|
import org.elasticsearch.rest.RestController;
|
||||||
import org.elasticsearch.rest.RestRequest;
|
import org.elasticsearch.rest.RestRequest;
|
||||||
import org.elasticsearch.script.Script;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.net.URI;
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static java.util.Objects.requireNonNull;
|
|
||||||
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
|
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
|
||||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Expose reindex over rest.
|
* Expose reindex over rest.
|
||||||
*/
|
*/
|
||||||
public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexRequest, ReindexAction> {
|
public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexRequest, ReindexAction> {
|
||||||
static final ObjectParser<ReindexRequest, Void> PARSER = new ObjectParser<>("reindex");
|
|
||||||
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in reindex requests is deprecated.";
|
|
||||||
private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestReindexAction.class));
|
|
||||||
|
|
||||||
static {
|
|
||||||
ObjectParser.Parser<ReindexRequest, Void> sourceParser = (parser, request, context) -> {
|
|
||||||
// Funky hack to work around Search not having a proper ObjectParser and us wanting to extract query if using remote.
|
|
||||||
Map<String, Object> source = parser.map();
|
|
||||||
String[] indices = extractStringArray(source, "index");
|
|
||||||
if (indices != null) {
|
|
||||||
request.getSearchRequest().indices(indices);
|
|
||||||
}
|
|
||||||
String[] types = extractStringArray(source, "type");
|
|
||||||
if (types != null) {
|
|
||||||
deprecationLogger.deprecatedAndMaybeLog("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
request.getSearchRequest().types(types);
|
|
||||||
}
|
|
||||||
request.setRemoteInfo(buildRemoteInfo(source));
|
|
||||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
|
||||||
builder.map(source);
|
|
||||||
try (InputStream stream = BytesReference.bytes(builder).streamInput();
|
|
||||||
XContentParser innerParser = parser.contentType().xContent()
|
|
||||||
.createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), stream)) {
|
|
||||||
request.getSearchRequest().source().parseXContent(innerParser, false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
ObjectParser<IndexRequest, Void> destParser = new ObjectParser<>("dest");
|
|
||||||
destParser.declareString(IndexRequest::index, new ParseField("index"));
|
|
||||||
destParser.declareString((request, type) -> {
|
|
||||||
deprecationLogger.deprecatedAndMaybeLog("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
request.type(type);
|
|
||||||
}, new ParseField("type"));
|
|
||||||
destParser.declareString(IndexRequest::routing, new ParseField("routing"));
|
|
||||||
destParser.declareString(IndexRequest::opType, new ParseField("op_type"));
|
|
||||||
destParser.declareString(IndexRequest::setPipeline, new ParseField("pipeline"));
|
|
||||||
destParser.declareString((s, i) -> s.versionType(VersionType.fromString(i)), new ParseField("version_type"));
|
|
||||||
|
|
||||||
PARSER.declareField(sourceParser::parse, new ParseField("source"), ValueType.OBJECT);
|
|
||||||
PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT);
|
|
||||||
PARSER.declareInt(RestReindexAction::setMaxDocsValidateIdentical, new ParseField("max_docs", "size"));
|
|
||||||
PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p)), new ParseField("script"),
|
|
||||||
ValueType.OBJECT);
|
|
||||||
PARSER.declareString(ReindexRequest::setConflicts, new ParseField("conflicts"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public RestReindexAction(Settings settings, RestController controller) {
|
public RestReindexAction(Settings settings, RestController controller) {
|
||||||
super(settings, ReindexAction.INSTANCE);
|
super(settings, ReindexAction.INSTANCE);
|
||||||
|
@ -124,123 +56,15 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
|
||||||
throw new IllegalArgumentException("_reindex doesn't support [pipeline] as a query parameter. "
|
throw new IllegalArgumentException("_reindex doesn't support [pipeline] as a query parameter. "
|
||||||
+ "Specify it in the [dest] object instead.");
|
+ "Specify it in the [dest] object instead.");
|
||||||
}
|
}
|
||||||
ReindexRequest internal = new ReindexRequest();
|
|
||||||
|
ReindexRequest internal;
|
||||||
try (XContentParser parser = request.contentParser()) {
|
try (XContentParser parser = request.contentParser()) {
|
||||||
PARSER.parse(parser, internal, null);
|
internal = ReindexRequest.fromXContent(parser);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.hasParam("scroll")) {
|
if (request.hasParam("scroll")) {
|
||||||
internal.setScroll(parseTimeValue(request.param("scroll"), "scroll"));
|
internal.setScroll(parseTimeValue(request.param("scroll"), "scroll"));
|
||||||
}
|
}
|
||||||
return internal;
|
return internal;
|
||||||
}
|
}
|
||||||
|
|
||||||
static RemoteInfo buildRemoteInfo(Map<String, Object> source) throws IOException {
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
Map<String, Object> remote = (Map<String, Object>) source.remove("remote");
|
|
||||||
if (remote == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
String username = extractString(remote, "username");
|
|
||||||
String password = extractString(remote, "password");
|
|
||||||
String hostInRequest = requireNonNull(extractString(remote, "host"), "[host] must be specified to reindex from a remote cluster");
|
|
||||||
URI uri;
|
|
||||||
try {
|
|
||||||
uri = new URI(hostInRequest);
|
|
||||||
// URI has less stringent URL parsing than our code. We want to fail if all values are not provided.
|
|
||||||
if (uri.getPort() == -1) {
|
|
||||||
throw new URISyntaxException(hostInRequest, "The port was not defined in the [host]");
|
|
||||||
}
|
|
||||||
} catch (URISyntaxException ex) {
|
|
||||||
throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was ["
|
|
||||||
+ hostInRequest + "]", ex);
|
|
||||||
}
|
|
||||||
|
|
||||||
String scheme = uri.getScheme();
|
|
||||||
String host = uri.getHost();
|
|
||||||
int port = uri.getPort();
|
|
||||||
|
|
||||||
String pathPrefix = null;
|
|
||||||
if (uri.getPath().isEmpty() == false) {
|
|
||||||
pathPrefix = uri.getPath();
|
|
||||||
}
|
|
||||||
|
|
||||||
Map<String, String> headers = extractStringStringMap(remote, "headers");
|
|
||||||
TimeValue socketTimeout = extractTimeValue(remote, "socket_timeout", RemoteInfo.DEFAULT_SOCKET_TIMEOUT);
|
|
||||||
TimeValue connectTimeout = extractTimeValue(remote, "connect_timeout", RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
|
||||||
if (false == remote.isEmpty()) {
|
|
||||||
throw new IllegalArgumentException(
|
|
||||||
"Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]");
|
|
||||||
}
|
|
||||||
return new RemoteInfo(scheme, host, port, pathPrefix, queryForRemote(source),
|
|
||||||
username, password, headers, socketTimeout, connectTimeout);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Yank a string array from a map. Emulates XContent's permissive String to
|
|
||||||
* String array conversions.
|
|
||||||
*/
|
|
||||||
private static String[] extractStringArray(Map<String, Object> source, String name) {
|
|
||||||
Object value = source.remove(name);
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (value instanceof List) {
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
List<String> list = (List<String>) value;
|
|
||||||
return list.toArray(new String[list.size()]);
|
|
||||||
} else if (value instanceof String) {
|
|
||||||
return new String[] {(String) value};
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("Expected [" + name + "] to be a list of a string but was [" + value + ']');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String extractString(Map<String, Object> source, String name) {
|
|
||||||
Object value = source.remove(name);
|
|
||||||
if (value == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
if (value instanceof String) {
|
|
||||||
return (String) value;
|
|
||||||
}
|
|
||||||
throw new IllegalArgumentException("Expected [" + name + "] to be a string but was [" + value + "]");
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Map<String, String> extractStringStringMap(Map<String, Object> source, String name) {
|
|
||||||
Object value = source.remove(name);
|
|
||||||
if (value == null) {
|
|
||||||
return emptyMap();
|
|
||||||
}
|
|
||||||
if (false == value instanceof Map) {
|
|
||||||
throw new IllegalArgumentException("Expected [" + name + "] to be an object containing strings but was [" + value + "]");
|
|
||||||
}
|
|
||||||
Map<?, ?> map = (Map<?, ?>) value;
|
|
||||||
for (Map.Entry<?, ?> entry : map.entrySet()) {
|
|
||||||
if (false == entry.getKey() instanceof String || false == entry.getValue() instanceof String) {
|
|
||||||
throw new IllegalArgumentException("Expected [" + name + "] to be an object containing strings but has [" + entry + "]");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@SuppressWarnings("unchecked") // We just checked....
|
|
||||||
Map<String, String> safe = (Map<String, String>) map;
|
|
||||||
return safe;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static TimeValue extractTimeValue(Map<String, Object> source, String name, TimeValue defaultValue) {
|
|
||||||
String string = extractString(source, name);
|
|
||||||
return string == null ? defaultValue : parseTimeValue(string, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static BytesReference queryForRemote(Map<String, Object> source) throws IOException {
|
|
||||||
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
|
|
||||||
Object query = source.remove("query");
|
|
||||||
if (query == null) {
|
|
||||||
return BytesReference.bytes(matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS));
|
|
||||||
}
|
|
||||||
if (!(query instanceof Map)) {
|
|
||||||
throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]");
|
|
||||||
}
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
Map<String, Object> map = (Map<String, Object>) query;
|
|
||||||
return BytesReference.bytes(builder.map(map));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.elasticsearch.common.bytes.BytesArray;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.rest.RestRequest.Method;
|
import org.elasticsearch.rest.RestRequest.Method;
|
||||||
|
@ -33,11 +32,8 @@ import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
|
||||||
|
|
||||||
public class RestReindexActionTests extends RestActionTestCase {
|
public class RestReindexActionTests extends RestActionTestCase {
|
||||||
|
|
||||||
|
@ -48,126 +44,6 @@ public class RestReindexActionTests extends RestActionTestCase {
|
||||||
action = new RestReindexAction(Settings.EMPTY, controller());
|
action = new RestReindexAction(Settings.EMPTY, controller());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBuildRemoteInfoNoRemote() throws IOException {
|
|
||||||
assertNull(RestReindexAction.buildRemoteInfo(new HashMap<>()));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBuildRemoteInfoFullyLoaded() throws IOException {
|
|
||||||
Map<String, String> headers = new HashMap<>();
|
|
||||||
headers.put("first", "a");
|
|
||||||
headers.put("second", "b");
|
|
||||||
headers.put("third", "");
|
|
||||||
|
|
||||||
Map<String, Object> remote = new HashMap<>();
|
|
||||||
remote.put("host", "https://example.com:9200");
|
|
||||||
remote.put("username", "testuser");
|
|
||||||
remote.put("password", "testpass");
|
|
||||||
remote.put("headers", headers);
|
|
||||||
remote.put("socket_timeout", "90s");
|
|
||||||
remote.put("connect_timeout", "10s");
|
|
||||||
|
|
||||||
Map<String, Object> query = new HashMap<>();
|
|
||||||
query.put("a", "b");
|
|
||||||
|
|
||||||
Map<String, Object> source = new HashMap<>();
|
|
||||||
source.put("remote", remote);
|
|
||||||
source.put("query", query);
|
|
||||||
|
|
||||||
RemoteInfo remoteInfo = RestReindexAction.buildRemoteInfo(source);
|
|
||||||
assertEquals("https", remoteInfo.getScheme());
|
|
||||||
assertEquals("example.com", remoteInfo.getHost());
|
|
||||||
assertEquals(9200, remoteInfo.getPort());
|
|
||||||
assertEquals("{\n \"a\" : \"b\"\n}", remoteInfo.getQuery().utf8ToString());
|
|
||||||
assertEquals("testuser", remoteInfo.getUsername());
|
|
||||||
assertEquals("testpass", remoteInfo.getPassword());
|
|
||||||
assertEquals(headers, remoteInfo.getHeaders());
|
|
||||||
assertEquals(timeValueSeconds(90), remoteInfo.getSocketTimeout());
|
|
||||||
assertEquals(timeValueSeconds(10), remoteInfo.getConnectTimeout());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBuildRemoteInfoWithoutAllParts() throws IOException {
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("example.com"));
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase(":9200"));
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("http://:9200"));
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("example.com:9200"));
|
|
||||||
expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("http://example.com"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testBuildRemoteInfoWithAllHostParts() throws IOException {
|
|
||||||
RemoteInfo info = buildRemoteInfoHostTestCase("http://example.com:9200");
|
|
||||||
assertEquals("http", info.getScheme());
|
|
||||||
assertEquals("example.com", info.getHost());
|
|
||||||
assertEquals(9200, info.getPort());
|
|
||||||
assertNull(info.getPathPrefix());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); // Didn't set the timeout so we should get the default
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); // Didn't set the timeout so we should get the default
|
|
||||||
|
|
||||||
info = buildRemoteInfoHostTestCase("https://other.example.com:9201");
|
|
||||||
assertEquals("https", info.getScheme());
|
|
||||||
assertEquals("other.example.com", info.getHost());
|
|
||||||
assertEquals(9201, info.getPort());
|
|
||||||
assertNull(info.getPathPrefix());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
|
||||||
|
|
||||||
info = buildRemoteInfoHostTestCase("https://[::1]:9201");
|
|
||||||
assertEquals("https", info.getScheme());
|
|
||||||
assertEquals("[::1]", info.getHost());
|
|
||||||
assertEquals(9201, info.getPort());
|
|
||||||
assertNull(info.getPathPrefix());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
|
||||||
|
|
||||||
info = buildRemoteInfoHostTestCase("https://other.example.com:9201/");
|
|
||||||
assertEquals("https", info.getScheme());
|
|
||||||
assertEquals("other.example.com", info.getHost());
|
|
||||||
assertEquals(9201, info.getPort());
|
|
||||||
assertEquals("/", info.getPathPrefix());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
|
||||||
|
|
||||||
info = buildRemoteInfoHostTestCase("https://other.example.com:9201/proxy-path/");
|
|
||||||
assertEquals("https", info.getScheme());
|
|
||||||
assertEquals("other.example.com", info.getHost());
|
|
||||||
assertEquals(9201, info.getPort());
|
|
||||||
assertEquals("/proxy-path/", info.getPathPrefix());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
|
||||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
|
||||||
|
|
||||||
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
|
||||||
() -> buildRemoteInfoHostTestCase("https"));
|
|
||||||
assertEquals("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was [https]",
|
|
||||||
exception.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testReindexFromRemoteRequestParsing() throws IOException {
|
|
||||||
BytesReference request;
|
|
||||||
try (XContentBuilder b = JsonXContent.contentBuilder()) {
|
|
||||||
b.startObject(); {
|
|
||||||
b.startObject("source"); {
|
|
||||||
b.startObject("remote"); {
|
|
||||||
b.field("host", "http://localhost:9200");
|
|
||||||
}
|
|
||||||
b.endObject();
|
|
||||||
b.field("index", "source");
|
|
||||||
}
|
|
||||||
b.endObject();
|
|
||||||
b.startObject("dest"); {
|
|
||||||
b.field("index", "dest");
|
|
||||||
}
|
|
||||||
b.endObject();
|
|
||||||
}
|
|
||||||
b.endObject();
|
|
||||||
request = BytesReference.bytes(b);
|
|
||||||
}
|
|
||||||
try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) {
|
|
||||||
ReindexRequest r = new ReindexRequest();
|
|
||||||
RestReindexAction.PARSER.parse(p, r, null);
|
|
||||||
assertEquals("localhost", r.getRemoteInfo().getHost());
|
|
||||||
assertArrayEquals(new String[] {"source"}, r.getSearchRequest().indices());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testPipelineQueryParameterIsError() throws IOException {
|
public void testPipelineQueryParameterIsError() throws IOException {
|
||||||
FakeRestRequest.Builder request = new FakeRestRequest.Builder(xContentRegistry());
|
FakeRestRequest.Builder request = new FakeRestRequest.Builder(xContentRegistry());
|
||||||
try (XContentBuilder body = JsonXContent.contentBuilder().prettyPrint()) {
|
try (XContentBuilder body = JsonXContent.contentBuilder().prettyPrint()) {
|
||||||
|
@ -206,16 +82,6 @@ public class RestReindexActionTests extends RestActionTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private RemoteInfo buildRemoteInfoHostTestCase(String hostInRest) throws IOException {
|
|
||||||
Map<String, Object> remote = new HashMap<>();
|
|
||||||
remote.put("host", hostInRest);
|
|
||||||
|
|
||||||
Map<String, Object> source = new HashMap<>();
|
|
||||||
source.put("remote", remote);
|
|
||||||
|
|
||||||
return RestReindexAction.buildRemoteInfo(source);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* test deprecation is logged if one or more types are used in source search request inside reindex
|
* test deprecation is logged if one or more types are used in source search request inside reindex
|
||||||
*/
|
*/
|
||||||
|
@ -234,7 +100,7 @@ public class RestReindexActionTests extends RestActionTestCase {
|
||||||
b.endObject();
|
b.endObject();
|
||||||
requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
|
requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
|
||||||
dispatchRequest(requestBuilder.build());
|
dispatchRequest(requestBuilder.build());
|
||||||
assertWarnings(RestReindexAction.TYPES_DEPRECATION_MESSAGE);
|
assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -255,6 +121,6 @@ public class RestReindexActionTests extends RestActionTestCase {
|
||||||
b.endObject();
|
b.endObject();
|
||||||
requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
|
requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
|
||||||
dispatchRequest(requestBuilder.build());
|
dispatchRequest(requestBuilder.build());
|
||||||
assertWarnings(RestReindexAction.TYPES_DEPRECATION_MESSAGE);
|
assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ esplugin {
|
||||||
}
|
}
|
||||||
|
|
||||||
versions << [
|
versions << [
|
||||||
'aws': '1.11.505'
|
'aws': '1.11.562'
|
||||||
]
|
]
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
d19328c227b2b5ad81d137361ebc9cbcd0396465
|
|
|
@ -0,0 +1 @@
|
||||||
|
b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae
|
|
@ -1 +0,0 @@
|
||||||
b669b3c90ea9bf73734ab26f0cb30c5c66addf55
|
|
|
@ -0,0 +1 @@
|
||||||
|
0211a055fb3e036033af4b1ca25ada0574a756ec
|
File diff suppressed because it is too large
Load Diff
|
@ -77,10 +77,6 @@ public class AzureBlobStore implements BlobStore {
|
||||||
public void close() {
|
public void close() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean containerExist() throws URISyntaxException, StorageException {
|
|
||||||
return service.doesContainerExist(clientName, container);
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean blobExists(String blob) throws URISyntaxException, StorageException {
|
public boolean blobExists(String blob) throws URISyntaxException, StorageException {
|
||||||
return service.blobExists(clientName, container, blob);
|
return service.blobExists(clientName, container, blob);
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,11 +20,9 @@
|
||||||
package org.elasticsearch.repositories.azure;
|
package org.elasticsearch.repositories.azure;
|
||||||
|
|
||||||
import com.microsoft.azure.storage.LocationMode;
|
import com.microsoft.azure.storage.LocationMode;
|
||||||
import com.microsoft.azure.storage.StorageException;
|
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
|
||||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.blobstore.BlobPath;
|
import org.elasticsearch.common.blobstore.BlobPath;
|
||||||
|
@ -34,14 +32,9 @@ import org.elasticsearch.common.settings.Setting.Property;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.repositories.IndexId;
|
|
||||||
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
|
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
|
||||||
import org.elasticsearch.snapshots.SnapshotCreationException;
|
|
||||||
import org.elasticsearch.snapshots.SnapshotId;
|
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
@ -140,20 +133,6 @@ public class AzureRepository extends BlobStoreRepository {
|
||||||
return chunkSize;
|
return chunkSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData clusterMetadata) {
|
|
||||||
try {
|
|
||||||
final AzureBlobStore blobStore = (AzureBlobStore) blobStore();
|
|
||||||
if (blobStore.containerExist() == false) {
|
|
||||||
throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before "
|
|
||||||
+ " creating an azure snapshot repository backed by it.");
|
|
||||||
}
|
|
||||||
} catch (URISyntaxException | StorageException e) {
|
|
||||||
throw new SnapshotCreationException(metadata.name(), snapshotId, e);
|
|
||||||
}
|
|
||||||
super.initializeSnapshot(snapshotId, indices, clusterMetadata);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isReadOnly() {
|
public boolean isReadOnly() {
|
||||||
return readonly;
|
return readonly;
|
||||||
|
|
|
@ -141,12 +141,6 @@ public class AzureStorageService {
|
||||||
return prevSettings;
|
return prevSettings;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean doesContainerExist(String account, String container) throws URISyntaxException, StorageException {
|
|
||||||
final Tuple<CloudBlobClient, Supplier<OperationContext>> client = client(account);
|
|
||||||
final CloudBlobContainer blobContainer = client.v1().getContainerReference(container);
|
|
||||||
return SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, client.v2().get()));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract the blob name from a URI like https://myservice.azure.net/container/path/to/myfile
|
* Extract the blob name from a URI like https://myservice.azure.net/container/path/to/myfile
|
||||||
* It should remove the container part (first part of the path) and gives path/to/myfile
|
* It should remove the container part (first part of the path) and gives path/to/myfile
|
||||||
|
|
|
@ -55,11 +55,6 @@ public class AzureStorageServiceMock extends AzureStorageService {
|
||||||
super(Settings.EMPTY);
|
super(Settings.EMPTY);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean doesContainerExist(String account, String container) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean blobExists(String account, String container, String blob) {
|
public boolean blobExists(String account, String container, String blob) {
|
||||||
return blobs.containsKey(blob);
|
return blobs.containsKey(blob);
|
||||||
|
|
|
@ -25,39 +25,35 @@ esplugin {
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile 'com.google.cloud:google-cloud-storage:1.59.0'
|
compile 'com.google.cloud:google-cloud-storage:1.77.0'
|
||||||
compile 'com.google.cloud:google-cloud-core:1.59.0'
|
compile 'com.google.cloud:google-cloud-core:1.77.0'
|
||||||
compile 'com.google.guava:guava:20.0'
|
compile 'com.google.guava:guava:26.0-jre'
|
||||||
compile "joda-time:joda-time:${versions.joda}"
|
compile 'com.google.http-client:google-http-client:1.30.1'
|
||||||
compile 'com.google.http-client:google-http-client:1.24.1'
|
|
||||||
compile "org.apache.httpcomponents:httpclient:${versions.httpclient}"
|
compile "org.apache.httpcomponents:httpclient:${versions.httpclient}"
|
||||||
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
|
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
|
||||||
compile "commons-logging:commons-logging:${versions.commonslogging}"
|
compile "commons-logging:commons-logging:${versions.commonslogging}"
|
||||||
compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
|
compile "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}"
|
||||||
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
compile "commons-codec:commons-codec:${versions.commonscodec}"
|
||||||
compile 'com.google.api:api-common:1.7.0'
|
compile 'com.google.api:api-common:1.8.1'
|
||||||
compile 'com.google.api:gax:1.30.0'
|
compile 'com.google.api:gax:1.45.0'
|
||||||
compile 'org.threeten:threetenbp:1.3.3'
|
compile 'org.threeten:threetenbp:1.3.3'
|
||||||
compile 'com.google.protobuf:protobuf-java-util:3.6.0'
|
compile 'com.google.protobuf:protobuf-java-util:3.7.1'
|
||||||
compile 'com.google.protobuf:protobuf-java:3.6.0'
|
compile 'com.google.protobuf:protobuf-java:3.7.1'
|
||||||
compile 'com.google.code.gson:gson:2.7'
|
compile 'com.google.code.gson:gson:2.7'
|
||||||
compile 'com.google.api.grpc:proto-google-common-protos:1.12.0'
|
compile 'com.google.api.grpc:proto-google-common-protos:1.16.0'
|
||||||
compile 'com.google.api.grpc:proto-google-iam-v1:0.12.0'
|
compile 'com.google.api.grpc:proto-google-iam-v1:0.12.0'
|
||||||
compile 'com.google.cloud:google-cloud-core-http:1.59.0'
|
compile 'com.google.cloud:google-cloud-core-http:1.77.0'
|
||||||
compile 'com.google.auth:google-auth-library-credentials:0.10.0'
|
compile 'com.google.auth:google-auth-library-credentials:0.16.1'
|
||||||
compile 'com.google.auth:google-auth-library-oauth2-http:0.10.0'
|
compile 'com.google.auth:google-auth-library-oauth2-http:0.16.1'
|
||||||
compile 'com.google.oauth-client:google-oauth-client:1.24.1'
|
compile 'com.google.oauth-client:google-oauth-client:1.28.0'
|
||||||
compile 'com.google.api-client:google-api-client:1.24.1'
|
compile 'com.google.api-client:google-api-client:1.28.0'
|
||||||
compile 'com.google.http-client:google-http-client-appengine:1.24.1'
|
compile 'com.google.http-client:google-http-client-appengine:1.29.2'
|
||||||
compile 'com.google.http-client:google-http-client-jackson:1.24.1'
|
compile 'com.google.http-client:google-http-client-jackson2:1.29.2'
|
||||||
compile 'org.codehaus.jackson:jackson-core-asl:1.9.11'
|
compile 'com.google.api:gax-httpjson:0.62.0'
|
||||||
compile 'com.google.http-client:google-http-client-jackson2:1.24.1'
|
|
||||||
compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}"
|
|
||||||
compile 'com.google.api:gax-httpjson:0.47.0'
|
|
||||||
compile 'io.opencensus:opencensus-api:0.15.0'
|
|
||||||
compile 'io.grpc:grpc-context:1.12.0'
|
compile 'io.grpc:grpc-context:1.12.0'
|
||||||
compile 'io.opencensus:opencensus-contrib-http-util:0.15.0'
|
compile 'io.opencensus:opencensus-api:0.18.0'
|
||||||
compile 'com.google.apis:google-api-services-storage:v1-rev135-1.24.1'
|
compile 'io.opencensus:opencensus-contrib-http-util:0.18.0'
|
||||||
|
compile 'com.google.apis:google-api-services-storage:v1-rev20190426-1.28.0'
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencyLicenses {
|
dependencyLicenses {
|
||||||
|
@ -65,7 +61,6 @@ dependencyLicenses {
|
||||||
mapping from: /google-auth-.*/, to: 'google-auth'
|
mapping from: /google-auth-.*/, to: 'google-auth'
|
||||||
mapping from: /google-http-.*/, to: 'google-http'
|
mapping from: /google-http-.*/, to: 'google-http'
|
||||||
mapping from: /opencensus.*/, to: 'opencensus'
|
mapping from: /opencensus.*/, to: 'opencensus'
|
||||||
mapping from: /jackson-.*/, to: 'jackson'
|
|
||||||
mapping from: /http.*/, to: 'httpclient'
|
mapping from: /http.*/, to: 'httpclient'
|
||||||
mapping from: /protobuf.*/, to: 'protobuf'
|
mapping from: /protobuf.*/, to: 'protobuf'
|
||||||
mapping from: /proto-google.*/, to: 'proto-google'
|
mapping from: /proto-google.*/, to: 'proto-google'
|
||||||
|
@ -81,6 +76,10 @@ thirdPartyAudit {
|
||||||
'com.google.common.cache.Striped64',
|
'com.google.common.cache.Striped64',
|
||||||
'com.google.common.cache.Striped64$1',
|
'com.google.common.cache.Striped64$1',
|
||||||
'com.google.common.cache.Striped64$Cell',
|
'com.google.common.cache.Striped64$Cell',
|
||||||
|
'com.google.common.hash.Striped64',
|
||||||
|
'com.google.common.hash.Striped64$1',
|
||||||
|
'com.google.common.hash.Striped64$Cell',
|
||||||
|
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
|
||||||
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
|
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
|
||||||
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
|
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
|
||||||
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
|
'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
ea59fb8b2450999345035dec8a6f472543391766
|
|
|
@ -0,0 +1 @@
|
||||||
|
e89befb19b08ad84b262b2f226ab79aefcaa9d7f
|
|
@ -1 +0,0 @@
|
||||||
58fa2feb11b092be0a6ebe705a28736f12374230
|
|
|
@ -0,0 +1 @@
|
||||||
|
2ade3e3502f9d14e3731347a82ea02372094211f
|
|
@ -1 +0,0 @@
|
||||||
d096f3142eb3adbf877588d1044895d148d9efcb
|
|
|
@ -0,0 +1 @@
|
||||||
|
05a1a4736acd1c4f30304be953532be6aecdc2c9
|
|
@ -1 +0,0 @@
|
||||||
37de23fb9b8b077de4ecec3192d98e752b0e5d72
|
|
|
@ -0,0 +1 @@
|
||||||
|
8fe155d766ed22480939e3a9db428151e0264d9e
|
|
@ -1 +0,0 @@
|
||||||
28d3d391dfc7e7e7951760708ad2f48cecacf38f
|
|
|
@ -0,0 +1 @@
|
||||||
|
34dd008901f382507a572f5242d0e5c5ea4ad713
|
|
@ -1 +0,0 @@
|
||||||
f981288bd84fe6d140ed70d1d8dbe994a64fa3cc
|
|
|
@ -0,0 +1 @@
|
||||||
|
9a15387cc0438ac3f3e625b6050cf39f4e981e13
|
|
@ -1 +0,0 @@
|
||||||
c079a62086121973a23d90f54e2b8c13050fa39d
|
|
|
@ -0,0 +1 @@
|
||||||
|
3407d434678faef3439a7012efa336e751ddc623
|
|
@ -1 +0,0 @@
|
||||||
f2d0c00917660b244da514f82cba96f7697f2c82
|
|
|
@ -0,0 +1 @@
|
||||||
|
7cd83a789fde368a999c1793c6297e7b4e56b2ac
|
|
@ -1 +0,0 @@
|
||||||
e2a094ec3e8acb15b99f2d4bd42ac9bbc7d9f33e
|
|
|
@ -0,0 +1 @@
|
||||||
|
e16acbc935a7762ba9b220860ae45c2c67d17d8c
|
|
@ -1 +0,0 @@
|
||||||
23dc0edf739ff1fb5a91fbddd7bd1f2cbfe0f827
|
|
|
@ -0,0 +1 @@
|
||||||
|
e368e1a8bbc0d0a4354f4e5eec076f38f6966050
|
|
@ -1 +0,0 @@
|
||||||
396eac8d3fb1332675f82b208f48a469d64f3b4a
|
|
|
@ -0,0 +1 @@
|
||||||
|
573aacbda8feb0d43f7056291fbce5496f42a6aa
|
|
@ -1 +0,0 @@
|
||||||
8535031ae10bf6a196e68f25e10c0d6382699cb6
|
|
|
@ -0,0 +1 @@
|
||||||
|
d93f4d1d8c2496d75221e53173e4c503b7096a4d
|
|
@ -1 +0,0 @@
|
||||||
02c88e77c14effdda76f02a0eac968de74e0bd4e
|
|
|
@ -1 +0,0 @@
|
||||||
2ad1dffd8a450055e68d8004fe003033b751d761
|
|
|
@ -0,0 +1 @@
|
||||||
|
d67891f5a438e1f339387a09628e0ab0af8b612a
|
|
@ -1 +0,0 @@
|
||||||
7b0e0218b96808868c23a7d0b40566a713931d9f
|
|
|
@ -0,0 +1 @@
|
||||||
|
9a9e5d0c33b663d6475c96ce79b2949545a113af
|
|
@ -1 +0,0 @@
|
||||||
89507701249388e1ed5ddcf8c41f4ce1be7831ef
|
|
|
@ -0,0 +1 @@
|
||||||
|
6a806eff209f36f635f943e16d97491f00f6bfab
|
|
@ -1,8 +0,0 @@
|
||||||
This copy of Jackson JSON processor streaming parser/generator is licensed under the
|
|
||||||
Apache (Software) License, version 2.0 ("the License").
|
|
||||||
See the License for details about distribution rights, and the
|
|
||||||
specific rights regarding derivate works.
|
|
||||||
|
|
||||||
You may obtain a copy of the License at:
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
|
@ -1,20 +0,0 @@
|
||||||
# Jackson JSON processor
|
|
||||||
|
|
||||||
Jackson is a high-performance, Free/Open Source JSON processing library.
|
|
||||||
It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has
|
|
||||||
been in development since 2007.
|
|
||||||
It is currently developed by a community of developers, as well as supported
|
|
||||||
commercially by FasterXML.com.
|
|
||||||
|
|
||||||
## Licensing
|
|
||||||
|
|
||||||
Jackson core and extension components may licensed under different licenses.
|
|
||||||
To find the details that apply to this artifact see the accompanying LICENSE file.
|
|
||||||
For more information, including possible other licensing options, contact
|
|
||||||
FasterXML.com (http://fasterxml.com).
|
|
||||||
|
|
||||||
## Credits
|
|
||||||
|
|
||||||
A list of contributors may be found from CREDITS file, which is included
|
|
||||||
in some artifacts (usually source distributions); but is always available
|
|
||||||
from the source code management (SCM) system project uses.
|
|
|
@ -1 +0,0 @@
|
||||||
e32303ef8bd18a5c9272780d49b81c95e05ddf43
|
|
|
@ -1 +0,0 @@
|
||||||
9a098392b287d7924660837f4eba0ce252013683
|
|
|
@ -0,0 +1 @@
|
||||||
|
b89a8f8dfd1e1e0d68d83c82a855624814b19a6e
|
|
@ -1 +0,0 @@
|
||||||
d88690591669d9b5ba6d91d9eac7736e58ccf3da
|
|
|
@ -0,0 +1 @@
|
||||||
|
76a37e4a931d5801a9e25b0c0353e5f37c4d1e8e
|
|
@ -1 +0,0 @@
|
||||||
1140cc74df039deb044ed0e320035e674dc13062
|
|
|
@ -0,0 +1 @@
|
||||||
|
2c5f022ea3b8e8df6a619c4cd8faf9af86022daa
|
|
@ -1 +0,0 @@
|
||||||
5333f7e422744d76840c08a106e28e519fbe3acd
|
|
|
@ -0,0 +1 @@
|
||||||
|
0bce1b6dc9e4531169542ab37a1c8641bcaa8afb
|
|
@ -1 +0,0 @@
|
||||||
3680d0042d4fe0b95ada844ff24da0698a7f0773
|
|
|
@ -0,0 +1 @@
|
||||||
|
45dc95896cfad26397675fdabef7b032d6db4bb6
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.repositories.gcs;
|
||||||
|
|
||||||
import com.google.api.client.googleapis.GoogleUtils;
|
import com.google.api.client.googleapis.GoogleUtils;
|
||||||
import com.google.api.client.http.HttpTransport;
|
import com.google.api.client.http.HttpTransport;
|
||||||
import com.google.api.client.http.javanet.DefaultConnectionFactory;
|
|
||||||
import com.google.api.client.http.javanet.NetHttpTransport;
|
import com.google.api.client.http.javanet.NetHttpTransport;
|
||||||
import com.google.auth.oauth2.ServiceAccountCredentials;
|
import com.google.auth.oauth2.ServiceAccountCredentials;
|
||||||
import com.google.cloud.http.HttpTransportOptions;
|
import com.google.cloud.http.HttpTransportOptions;
|
||||||
|
@ -37,10 +36,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.LazyInitializable;
|
import org.elasticsearch.common.util.LazyInitializable;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.HttpURLConnection;
|
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
@ -104,10 +100,16 @@ public class GoogleCloudStorageService {
|
||||||
* @return a new client storage instance that can be used to manage objects
|
* @return a new client storage instance that can be used to manage objects
|
||||||
* (blobs)
|
* (blobs)
|
||||||
*/
|
*/
|
||||||
private Storage createClient(final String clientName, final GoogleCloudStorageClientSettings clientSettings) throws IOException {
|
private static Storage createClient(String clientName, GoogleCloudStorageClientSettings clientSettings) throws IOException {
|
||||||
logger.debug(() -> new ParameterizedMessage("creating GCS client with client_name [{}], endpoint [{}]", clientName,
|
logger.debug(() -> new ParameterizedMessage("creating GCS client with client_name [{}], endpoint [{}]", clientName,
|
||||||
clientSettings.getHost()));
|
clientSettings.getHost()));
|
||||||
final HttpTransport httpTransport = SocketAccess.doPrivilegedIOException(() -> createHttpTransport(clientSettings.getHost()));
|
final HttpTransport httpTransport = SocketAccess.doPrivilegedIOException(() -> {
|
||||||
|
final NetHttpTransport.Builder builder = new NetHttpTransport.Builder();
|
||||||
|
// requires java.lang.RuntimePermission "setFactory"
|
||||||
|
// Pin the TLS trust certificates.
|
||||||
|
builder.trustCertificates(GoogleUtils.getCertificateTrustStore());
|
||||||
|
return builder.build();
|
||||||
|
});
|
||||||
final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder()
|
final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder()
|
||||||
.setConnectTimeout(toTimeout(clientSettings.getConnectTimeout()))
|
.setConnectTimeout(toTimeout(clientSettings.getConnectTimeout()))
|
||||||
.setReadTimeout(toTimeout(clientSettings.getReadTimeout()))
|
.setReadTimeout(toTimeout(clientSettings.getReadTimeout()))
|
||||||
|
@ -145,54 +147,6 @@ public class GoogleCloudStorageService {
|
||||||
return storageOptionsBuilder.build().getService();
|
return storageOptionsBuilder.build().getService();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Pins the TLS trust certificates and, more importantly, overrides connection
|
|
||||||
* URLs in the case of a custom endpoint setting because some connections don't
|
|
||||||
* fully honor this setting (bugs in the SDK). The default connection factory
|
|
||||||
* opens a new connection for each request. This is required for the storage
|
|
||||||
* instance to be thread-safe.
|
|
||||||
**/
|
|
||||||
private static HttpTransport createHttpTransport(final String endpoint) throws Exception {
|
|
||||||
final NetHttpTransport.Builder builder = new NetHttpTransport.Builder();
|
|
||||||
// requires java.lang.RuntimePermission "setFactory"
|
|
||||||
builder.trustCertificates(GoogleUtils.getCertificateTrustStore());
|
|
||||||
if (Strings.hasLength(endpoint)) {
|
|
||||||
final URL endpointUrl = URI.create(endpoint).toURL();
|
|
||||||
// it is crucial to open a connection for each URL (see {@code
|
|
||||||
// DefaultConnectionFactory#openConnection}) instead of reusing connections,
|
|
||||||
// because the storage instance has to be thread-safe as it is cached.
|
|
||||||
builder.setConnectionFactory(new DefaultConnectionFactory() {
|
|
||||||
@Override
|
|
||||||
public HttpURLConnection openConnection(final URL originalUrl) throws IOException {
|
|
||||||
// test if the URL is built correctly, ie following the `host` setting
|
|
||||||
if (originalUrl.getHost().equals(endpointUrl.getHost()) && originalUrl.getPort() == endpointUrl.getPort()
|
|
||||||
&& originalUrl.getProtocol().equals(endpointUrl.getProtocol())) {
|
|
||||||
return super.openConnection(originalUrl);
|
|
||||||
}
|
|
||||||
// override connection URLs because some don't follow the config. See
|
|
||||||
// https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3254 and
|
|
||||||
// https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3255
|
|
||||||
URI originalUri;
|
|
||||||
try {
|
|
||||||
originalUri = originalUrl.toURI();
|
|
||||||
} catch (final URISyntaxException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
String overridePath = "/";
|
|
||||||
if (originalUri.getRawPath() != null) {
|
|
||||||
overridePath = originalUri.getRawPath();
|
|
||||||
}
|
|
||||||
if (originalUri.getRawQuery() != null) {
|
|
||||||
overridePath += "?" + originalUri.getRawQuery();
|
|
||||||
}
|
|
||||||
return super.openConnection(
|
|
||||||
new URL(endpointUrl.getProtocol(), endpointUrl.getHost(), endpointUrl.getPort(), overridePath));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return builder.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts timeout values from the settings to a timeout value for the Google
|
* Converts timeout values from the settings to a timeout value for the Google
|
||||||
* Cloud SDK
|
* Cloud SDK
|
||||||
|
|
|
@ -281,6 +281,11 @@ class MockStorage implements Storage {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public WriteChannel writer(URL signedURL) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
// Everything below this line is not implemented.
|
// Everything below this line is not implemented.
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -288,6 +293,11 @@ class MockStorage implements Storage {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Blob create(BlobInfo blobInfo, byte[] content, int offset, int length, BlobTargetOption... options) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) {
|
public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -28,7 +28,7 @@ esplugin {
|
||||||
}
|
}
|
||||||
|
|
||||||
versions << [
|
versions << [
|
||||||
'aws': '1.11.505'
|
'aws': '1.11.562'
|
||||||
]
|
]
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
d19328c227b2b5ad81d137361ebc9cbcd0396465
|
|
|
@ -0,0 +1 @@
|
||||||
|
b5fc47ec1b5afe180f5ebb4eda755acdca7a20ae
|
|
@ -1 +0,0 @@
|
||||||
2a219919090a6cadd7e119c899c90343ad9c0077
|
|
|
@ -0,0 +1 @@
|
||||||
|
1fdf4daf1960fe760e7a950dd28a05c5abc12788
|
|
@ -1 +0,0 @@
|
||||||
b4cf82765b04a579609314ab7f296a9a0ddae1cf
|
|
|
@ -0,0 +1 @@
|
||||||
|
1712c878f7e9483ceac1eb2356a9457a3c8df03e
|
|
@ -1 +0,0 @@
|
||||||
067234d307b210097e247a49f08875e0cd3f2b95
|
|
|
@ -0,0 +1 @@
|
||||||
|
1147ed0ad1f2c5a16b8271e38e3cda5cd488c8ae
|
|
@ -50,7 +50,7 @@ import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.not;
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
public class DieWithDignityIT extends ESRestTestCase {
|
public class DieWithDignityIT extends ESRestTestCase {
|
||||||
|
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/43413")
|
||||||
public void testDieWithDignity() throws Exception {
|
public void testDieWithDignity() throws Exception {
|
||||||
// deleting the PID file prevents stopping the cluster from failing since it occurs if and only if the PID file exists
|
// deleting the PID file prevents stopping the cluster from failing since it occurs if and only if the PID file exists
|
||||||
final Path pidFile = PathUtils.get(System.getProperty("pidfile"));
|
final Path pidFile = PathUtils.get(System.getProperty("pidfile"));
|
||||||
|
|
|
@ -130,3 +130,28 @@
|
||||||
index: [v*]
|
index: [v*]
|
||||||
|
|
||||||
- match: { $body: {} }
|
- match: { $body: {} }
|
||||||
|
---
|
||||||
|
"Indices recovery test with detailed parameter":
|
||||||
|
- skip:
|
||||||
|
version: " - 7.2.99"
|
||||||
|
reason: bug with detailed parameter fixed in 7.3
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test_3
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index:
|
||||||
|
number_of_replicas: 0
|
||||||
|
|
||||||
|
- do:
|
||||||
|
cluster.health:
|
||||||
|
wait_for_status: green
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.recovery:
|
||||||
|
index: [test_3]
|
||||||
|
human: true
|
||||||
|
detailed: true
|
||||||
|
|
||||||
|
- match: { test_3.shards.0.index.files.details: [] }
|
||||||
|
|
|
@ -38,12 +38,14 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.IdentityHashMap;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Queue;
|
import java.util.Queue;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.Predicate;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public final class ExceptionsHelper {
|
public final class ExceptionsHelper {
|
||||||
|
@ -185,22 +187,14 @@ public final class ExceptionsHelper {
|
||||||
* @return Corruption indicating exception if one is found, otherwise {@code null}
|
* @return Corruption indicating exception if one is found, otherwise {@code null}
|
||||||
*/
|
*/
|
||||||
public static IOException unwrapCorruption(Throwable t) {
|
public static IOException unwrapCorruption(Throwable t) {
|
||||||
if (t != null) {
|
return t == null ? null : ExceptionsHelper.<IOException>unwrapCausesAndSuppressed(t, cause -> {
|
||||||
do {
|
for (Class<?> clazz : CORRUPTION_EXCEPTIONS) {
|
||||||
for (Class<?> clazz : CORRUPTION_EXCEPTIONS) {
|
if (clazz.isInstance(cause)) {
|
||||||
if (clazz.isInstance(t)) {
|
return true;
|
||||||
return (IOException) t;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
for (Throwable suppressed : t.getSuppressed()) {
|
}
|
||||||
IOException corruptionException = unwrapCorruption(suppressed);
|
return false;
|
||||||
if (corruptionException != null) {
|
}).orElse(null);
|
||||||
return corruptionException;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} while ((t = t.getCause()) != null);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -213,7 +207,11 @@ public final class ExceptionsHelper {
|
||||||
*/
|
*/
|
||||||
public static Throwable unwrap(Throwable t, Class<?>... clazzes) {
|
public static Throwable unwrap(Throwable t, Class<?>... clazzes) {
|
||||||
if (t != null) {
|
if (t != null) {
|
||||||
|
final Set<Throwable> seen = Collections.newSetFromMap(new IdentityHashMap<>());
|
||||||
do {
|
do {
|
||||||
|
if (seen.add(t) == false) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
for (Class<?> clazz : clazzes) {
|
for (Class<?> clazz : clazzes) {
|
||||||
if (clazz.isInstance(t)) {
|
if (clazz.isInstance(t)) {
|
||||||
return t;
|
return t;
|
||||||
|
@ -246,33 +244,22 @@ public final class ExceptionsHelper {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
static final int MAX_ITERATIONS = 1024;
|
@SuppressWarnings("unchecked")
|
||||||
|
private static <T extends Throwable> Optional<T> unwrapCausesAndSuppressed(Throwable cause, Predicate<Throwable> predicate) {
|
||||||
/**
|
if (predicate.test(cause)) {
|
||||||
* Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable.
|
return Optional.of((T) cause);
|
||||||
*
|
|
||||||
* @param cause the root throwable
|
|
||||||
* @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable
|
|
||||||
*/
|
|
||||||
public static Optional<Error> maybeError(final Throwable cause, final Logger logger) {
|
|
||||||
// early terminate if the cause is already an error
|
|
||||||
if (cause instanceof Error) {
|
|
||||||
return Optional.of((Error) cause);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
final Queue<Throwable> queue = new LinkedList<>();
|
final Queue<Throwable> queue = new LinkedList<>();
|
||||||
queue.add(cause);
|
queue.add(cause);
|
||||||
int iterations = 0;
|
final Set<Throwable> seen = Collections.newSetFromMap(new IdentityHashMap<>());
|
||||||
while (queue.isEmpty() == false) {
|
while (queue.isEmpty() == false) {
|
||||||
iterations++;
|
|
||||||
// this is a guard against deeply nested or circular chains of exceptions
|
|
||||||
if (iterations > MAX_ITERATIONS) {
|
|
||||||
logger.warn("giving up looking for fatal errors", cause);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
final Throwable current = queue.remove();
|
final Throwable current = queue.remove();
|
||||||
if (current instanceof Error) {
|
if (seen.add(current) == false) {
|
||||||
return Optional.of((Error) current);
|
continue;
|
||||||
|
}
|
||||||
|
if (predicate.test(current)) {
|
||||||
|
return Optional.of((T) current);
|
||||||
}
|
}
|
||||||
Collections.addAll(queue, current.getSuppressed());
|
Collections.addAll(queue, current.getSuppressed());
|
||||||
if (current.getCause() != null) {
|
if (current.getCause() != null) {
|
||||||
|
@ -283,21 +270,24 @@ public final class ExceptionsHelper {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See {@link #maybeError(Throwable, Logger)}. Uses the class-local logger.
|
* Unwrap the specified throwable looking for any suppressed errors or errors as a root cause of the specified throwable.
|
||||||
|
*
|
||||||
|
* @param cause the root throwable
|
||||||
|
* @return an optional error if one is found suppressed or a root cause in the tree rooted at the specified throwable
|
||||||
*/
|
*/
|
||||||
public static Optional<Error> maybeError(final Throwable cause) {
|
public static Optional<Error> maybeError(final Throwable cause) {
|
||||||
return maybeError(cause, logger);
|
return unwrapCausesAndSuppressed(cause, t -> t instanceof Error);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* If the specified cause is an unrecoverable error, this method will rethrow the cause on a separate thread so that it can not be
|
* If the specified cause is an unrecoverable error, this method will rethrow the cause on a separate thread so that it can not be
|
||||||
* caught and bubbles up to the uncaught exception handler. Note that the cause tree is examined for any {@link Error}. See
|
* caught and bubbles up to the uncaught exception handler. Note that the cause tree is examined for any {@link Error}. See
|
||||||
* {@link #maybeError(Throwable, Logger)} for the semantics.
|
* {@link #maybeError(Throwable)} for the semantics.
|
||||||
*
|
*
|
||||||
* @param throwable the throwable to possibly throw on another thread
|
* @param throwable the throwable to possibly throw on another thread
|
||||||
*/
|
*/
|
||||||
public static void maybeDieOnAnotherThread(final Throwable throwable) {
|
public static void maybeDieOnAnotherThread(final Throwable throwable) {
|
||||||
ExceptionsHelper.maybeError(throwable, logger).ifPresent(error -> {
|
ExceptionsHelper.maybeError(throwable).ifPresent(error -> {
|
||||||
/*
|
/*
|
||||||
* Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, sometimes the stack
|
* Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, sometimes the stack
|
||||||
* contains statements that catch any throwable (e.g., Netty, and the JDK futures framework). This means that a rethrow here
|
* contains statements that catch any throwable (e.g., Netty, and the JDK futures framework). This means that a rethrow here
|
||||||
|
|
|
@ -96,6 +96,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
||||||
public static final Version V_6_7_2 = new Version(6070299, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
public static final Version V_6_7_2 = new Version(6070299, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
||||||
public static final Version V_6_8_0 = new Version(6080099, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
public static final Version V_6_8_0 = new Version(6080099, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
||||||
public static final Version V_6_8_1 = new Version(6080199, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
public static final Version V_6_8_1 = new Version(6080199, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
||||||
|
public static final Version V_6_8_2 = new Version(6080299, org.apache.lucene.util.Version.LUCENE_7_7_0);
|
||||||
public static final Version V_7_0_0 = new Version(7000099, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
public static final Version V_7_0_0 = new Version(7000099, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||||
public static final Version V_7_0_1 = new Version(7000199, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
public static final Version V_7_0_1 = new Version(7000199, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||||
public static final Version V_7_1_0 = new Version(7010099, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
public static final Version V_7_1_0 = new Version(7010099, org.apache.lucene.util.Version.LUCENE_8_0_0);
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue