2010-02-08 15:30:06 +02:00
|
|
|
/*
|
2014-01-06 22:48:02 +01:00
|
|
|
* Licensed to Elasticsearch under one or more contributor
|
|
|
|
* license agreements. See the NOTICE file distributed with
|
|
|
|
* this work for additional information regarding copyright
|
|
|
|
* ownership. Elasticsearch licenses this file to you under
|
|
|
|
* the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
* not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
2010-02-08 15:30:06 +02:00
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing,
|
|
|
|
* software distributed under the License is distributed on an
|
|
|
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
|
|
* KIND, either express or implied. See the License for the
|
|
|
|
* specific language governing permissions and limitations
|
|
|
|
* under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
package org.elasticsearch;
|
|
|
|
|
2016-02-06 11:15:24 +01:00
|
|
|
import org.elasticsearch.action.support.replication.ReplicationOperation;
|
2016-01-27 08:51:18 -05:00
|
|
|
import org.elasticsearch.cluster.action.shard.ShardStateAction;
|
2017-01-13 16:47:49 +01:00
|
|
|
import org.elasticsearch.common.CheckedFunction;
|
2017-01-17 15:44:49 +01:00
|
|
|
import org.elasticsearch.common.Nullable;
|
REST: Include suppressed exceptions on failures (#29115)
This modifies xcontent serialization of Exceptions to contain suppressed
exceptions. If there are any suppressed exceptions they are included in
the exception response by default. The reasoning here is that they are
fairly rare but when they exist they almost always add extra useful
information. Take, for example, the response when you specify two broken
ingest pipelines:
```
{
"error" : {
"root_cause" : ...snip...
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "set",
"property_name" : "field"
},
"suppressed" : [
{
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "convert",
"property_name" : "field"
}
}
]
},
"status" : 400
}
```
Moreover, when suppressed exceptions come from 500 level errors should
give us more useful debugging information.
Closes #23392
2018-03-19 10:52:50 -04:00
|
|
|
import org.elasticsearch.common.ParseField;
|
2017-04-04 00:17:05 -04:00
|
|
|
import org.elasticsearch.common.collect.Tuple;
|
2015-06-29 12:53:08 +02:00
|
|
|
import org.elasticsearch.common.io.stream.StreamInput;
|
|
|
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
2016-04-25 15:55:25 -04:00
|
|
|
import org.elasticsearch.common.io.stream.Writeable;
|
2016-02-16 08:46:03 -05:00
|
|
|
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
2017-08-09 15:53:30 +01:00
|
|
|
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
2015-04-24 09:36:10 +02:00
|
|
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
2018-05-01 07:44:58 -04:00
|
|
|
import org.elasticsearch.common.xcontent.XContentParseException;
|
2016-12-14 16:11:44 +01:00
|
|
|
import org.elasticsearch.common.xcontent.XContentParser;
|
2015-07-03 15:40:09 +02:00
|
|
|
import org.elasticsearch.index.Index;
|
|
|
|
import org.elasticsearch.index.shard.ShardId;
|
2021-03-09 16:46:55 -08:00
|
|
|
import org.opensearch.rest.RestStatus;
|
2019-09-26 14:21:23 +02:00
|
|
|
import org.elasticsearch.search.SearchException;
|
2017-12-06 09:15:28 +01:00
|
|
|
import org.elasticsearch.search.aggregations.MultiBucketConsumerService;
|
2016-06-30 13:41:53 +02:00
|
|
|
import org.elasticsearch.transport.TcpTransport;
|
2011-02-16 01:41:01 +02:00
|
|
|
|
2015-04-24 09:36:10 +02:00
|
|
|
import java.io.IOException;
|
2017-01-26 15:17:07 +01:00
|
|
|
import java.util.ArrayList;
|
2015-12-18 12:43:47 -08:00
|
|
|
import java.util.Arrays;
|
2017-01-24 16:12:45 +01:00
|
|
|
import java.util.Collections;
|
2015-12-18 12:43:47 -08:00
|
|
|
import java.util.HashMap;
|
|
|
|
import java.util.List;
|
|
|
|
import java.util.Map;
|
|
|
|
import java.util.Set;
|
2015-09-25 14:32:44 -04:00
|
|
|
import java.util.stream.Collectors;
|
2014-08-14 02:55:09 +02:00
|
|
|
|
2017-01-17 15:44:49 +01:00
|
|
|
import static java.util.Collections.emptyMap;
|
|
|
|
import static java.util.Collections.singletonMap;
|
2016-02-02 18:14:41 -05:00
|
|
|
import static java.util.Collections.unmodifiableMap;
|
2020-03-31 16:59:01 -04:00
|
|
|
import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_UUID_NA_VALUE;
|
2016-12-19 17:21:26 +01:00
|
|
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
|
2017-01-27 10:12:58 +01:00
|
|
|
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureFieldName;
|
2016-01-24 22:47:38 +01:00
|
|
|
|
2010-02-08 15:30:06 +02:00
|
|
|
/**
|
2021-03-03 16:36:43 -06:00
|
|
|
* A base class for all opensearch exceptions.
|
2010-02-08 15:30:06 +02:00
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public class OpenSearchException extends RuntimeException implements ToXContentFragment, Writeable {
|
2015-04-24 09:36:10 +02:00
|
|
|
|
2017-01-24 16:12:45 +01:00
|
|
|
private static final Version UNKNOWN_VERSION_ADDED = Version.fromId(0);
|
2016-11-23 15:49:05 -05:00
|
|
|
|
2016-11-22 18:50:29 -05:00
|
|
|
/**
|
2017-01-17 15:44:49 +01:00
|
|
|
* Passed in the {@link Params} of {@link #generateThrowableXContent(XContentBuilder, Params, Throwable)}
|
2016-11-22 18:50:29 -05:00
|
|
|
* to control if the {@code caused_by} element should render. Unlike most parameters to {@code toXContent} methods this parameter is
|
|
|
|
* internal only and not available as a URL parameter.
|
|
|
|
*/
|
2017-01-24 16:12:45 +01:00
|
|
|
private static final String REST_EXCEPTION_SKIP_CAUSE = "rest.exception.cause.skip";
|
2016-11-22 18:50:29 -05:00
|
|
|
/**
|
2017-01-17 15:44:49 +01:00
|
|
|
* Passed in the {@link Params} of {@link #generateThrowableXContent(XContentBuilder, Params, Throwable)}
|
2016-11-22 18:50:29 -05:00
|
|
|
* to control if the {@code stack_trace} element should render. Unlike most parameters to {@code toXContent} methods this parameter is
|
|
|
|
* internal only and not available as a URL parameter. Use the {@code error_trace} parameter instead.
|
|
|
|
*/
|
2015-07-14 22:36:26 +02:00
|
|
|
public static final String REST_EXCEPTION_SKIP_STACK_TRACE = "rest.exception.stacktrace.skip";
|
2015-08-19 15:40:22 +02:00
|
|
|
public static final boolean REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT = true;
|
2017-01-17 15:44:49 +01:00
|
|
|
private static final boolean REST_EXCEPTION_SKIP_CAUSE_DEFAULT = false;
|
2017-01-24 16:12:45 +01:00
|
|
|
private static final String INDEX_METADATA_KEY = "es.index";
|
|
|
|
private static final String INDEX_METADATA_KEY_UUID = "es.index_uuid";
|
|
|
|
private static final String SHARD_METADATA_KEY = "es.shard";
|
|
|
|
private static final String RESOURCE_METADATA_TYPE_KEY = "es.resource.type";
|
|
|
|
private static final String RESOURCE_METADATA_ID_KEY = "es.resource.id";
|
2015-07-03 15:40:09 +02:00
|
|
|
|
2016-12-14 16:11:44 +01:00
|
|
|
private static final String TYPE = "type";
|
|
|
|
private static final String REASON = "reason";
|
|
|
|
private static final String CAUSED_BY = "caused_by";
|
REST: Include suppressed exceptions on failures (#29115)
This modifies xcontent serialization of Exceptions to contain suppressed
exceptions. If there are any suppressed exceptions they are included in
the exception response by default. The reasoning here is that they are
fairly rare but when they exist they almost always add extra useful
information. Take, for example, the response when you specify two broken
ingest pipelines:
```
{
"error" : {
"root_cause" : ...snip...
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "set",
"property_name" : "field"
},
"suppressed" : [
{
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "convert",
"property_name" : "field"
}
}
]
},
"status" : 400
}
```
Moreover, when suppressed exceptions come from 500 level errors should
give us more useful debugging information.
Closes #23392
2018-03-19 10:52:50 -04:00
|
|
|
private static final ParseField SUPPRESSED = new ParseField("suppressed");
|
2020-06-10 20:39:32 +03:00
|
|
|
public static final String STACK_TRACE = "stack_trace";
|
2016-12-14 16:11:44 +01:00
|
|
|
private static final String HEADER = "header";
|
|
|
|
private static final String ERROR = "error";
|
|
|
|
private static final String ROOT_CAUSE = "root_cause";
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
private static final Map<Integer, CheckedFunction<StreamInput, ? extends OpenSearchException, IOException>> ID_TO_SUPPLIER;
|
|
|
|
private static final Map<Class<? extends OpenSearchException>, OpenSearchExceptionHandle> CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE;
|
2017-01-24 16:12:45 +01:00
|
|
|
private final Map<String, List<String>> metadata = new HashMap<>();
|
2015-07-02 22:40:54 +02:00
|
|
|
private final Map<String, List<String>> headers = new HashMap<>();
|
2010-02-08 15:30:06 +02:00
|
|
|
|
2015-09-28 16:27:12 +02:00
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* Construct a <code>OpenSearchException</code> with the specified cause exception.
|
2015-09-28 16:27:12 +02:00
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public OpenSearchException(Throwable cause) {
|
2015-09-13 20:11:34 +02:00
|
|
|
super(cause);
|
|
|
|
}
|
2015-09-28 16:27:12 +02:00
|
|
|
|
2010-02-08 15:30:06 +02:00
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* Construct a <code>OpenSearchException</code> with the specified detail message.
|
2010-02-08 15:30:06 +02:00
|
|
|
*
|
2015-09-21 23:35:32 -04:00
|
|
|
* The message can be parameterized using <code>{}</code> as placeholders for the given
|
2015-07-01 15:57:43 +02:00
|
|
|
* arguments
|
|
|
|
*
|
2016-01-24 22:47:38 +01:00
|
|
|
* @param msg the detail message
|
2015-07-01 15:57:43 +02:00
|
|
|
* @param args the arguments for the message
|
2010-02-08 15:30:06 +02:00
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public OpenSearchException(String msg, Object... args) {
|
2015-07-01 15:57:43 +02:00
|
|
|
super(LoggerMessageFormat.format(msg, args));
|
2010-02-08 15:30:06 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* Construct a <code>OpenSearchException</code> with the specified detail message
|
2010-02-08 15:30:06 +02:00
|
|
|
* and nested exception.
|
|
|
|
*
|
2015-09-21 23:35:32 -04:00
|
|
|
* The message can be parameterized using <code>{}</code> as placeholders for the given
|
2015-07-01 15:57:43 +02:00
|
|
|
* arguments
|
|
|
|
*
|
2010-02-08 15:30:06 +02:00
|
|
|
* @param msg the detail message
|
|
|
|
* @param cause the nested exception
|
2015-07-01 15:57:43 +02:00
|
|
|
* @param args the arguments for the message
|
2010-02-08 15:30:06 +02:00
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public OpenSearchException(String msg, Throwable cause, Object... args) {
|
2015-07-01 15:57:43 +02:00
|
|
|
super(LoggerMessageFormat.format(msg, args), cause);
|
2010-02-08 15:30:06 +02:00
|
|
|
}
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
public OpenSearchException(StreamInput in) throws IOException {
|
2016-07-04 08:41:06 -04:00
|
|
|
super(in.readOptionalString(), in.readException());
|
2015-06-29 12:53:08 +02:00
|
|
|
readStackTrace(this, in);
|
2016-08-17 17:53:11 -04:00
|
|
|
headers.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString));
|
2018-08-24 09:51:21 +02:00
|
|
|
metadata.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString));
|
2015-07-02 22:40:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2017-01-24 16:12:45 +01:00
|
|
|
* Adds a new piece of metadata with the given key.
|
|
|
|
* If the provided key is already present, the corresponding metadata will be replaced
|
2015-07-02 22:40:54 +02:00
|
|
|
*/
|
2017-01-24 16:12:45 +01:00
|
|
|
public void addMetadata(String key, String... values) {
|
|
|
|
addMetadata(key, Arrays.asList(values));
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Adds a new piece of metadata with the given key.
|
|
|
|
* If the provided key is already present, the corresponding metadata will be replaced
|
|
|
|
*/
|
|
|
|
public void addMetadata(String key, List<String> values) {
|
|
|
|
//we need to enforce this otherwise bw comp doesn't work properly, as "es." was the previous criteria to split headers in two sets
|
|
|
|
if (key.startsWith("es.") == false) {
|
|
|
|
throw new IllegalArgumentException("exception metadata must start with [es.], found [" + key + "] instead");
|
|
|
|
}
|
|
|
|
this.metadata.put(key, values);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns a set of all metadata keys on this exception
|
|
|
|
*/
|
|
|
|
public Set<String> getMetadataKeys() {
|
|
|
|
return metadata.keySet();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns the list of metadata values for the given key or {@code null} if no metadata for the
|
|
|
|
* given key exists.
|
|
|
|
*/
|
|
|
|
public List<String> getMetadata(String key) {
|
|
|
|
return metadata.get(key);
|
|
|
|
}
|
|
|
|
|
|
|
|
protected Map<String, List<String>> getMetadata() {
|
|
|
|
return metadata;
|
2015-07-02 22:40:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Adds a new header with the given key.
|
|
|
|
* This method will replace existing header if a header with the same key already exists
|
|
|
|
*/
|
|
|
|
public void addHeader(String key, List<String> value) {
|
2017-01-24 16:12:45 +01:00
|
|
|
//we need to enforce this otherwise bw comp doesn't work properly, as "es." was the previous criteria to split headers in two sets
|
|
|
|
if (key.startsWith("es.")) {
|
|
|
|
throw new IllegalArgumentException("exception headers must not start with [es.], found [" + key + "] instead");
|
|
|
|
}
|
2015-07-02 22:40:54 +02:00
|
|
|
this.headers.put(key, value);
|
|
|
|
}
|
|
|
|
|
2017-01-24 16:12:45 +01:00
|
|
|
/**
|
|
|
|
* Adds a new header with the given key.
|
|
|
|
* This method will replace existing header if a header with the same key already exists
|
|
|
|
*/
|
|
|
|
public void addHeader(String key, String... value) {
|
|
|
|
addHeader(key, Arrays.asList(value));
|
|
|
|
}
|
2015-07-02 22:40:54 +02:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns a set of all header keys on this exception
|
|
|
|
*/
|
|
|
|
public Set<String> getHeaderKeys() {
|
|
|
|
return headers.keySet();
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2017-01-24 16:12:45 +01:00
|
|
|
* Returns the list of header values for the given key or {@code null} if no header for the
|
2015-07-02 22:40:54 +02:00
|
|
|
* given key exists.
|
|
|
|
*/
|
|
|
|
public List<String> getHeader(String key) {
|
|
|
|
return headers.get(key);
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
|
|
|
|
2017-01-17 15:44:49 +01:00
|
|
|
protected Map<String, List<String>> getHeaders() {
|
|
|
|
return headers;
|
|
|
|
}
|
|
|
|
|
2011-02-16 01:41:01 +02:00
|
|
|
/**
|
|
|
|
* Returns the rest status code associated with this exception.
|
|
|
|
*/
|
|
|
|
public RestStatus status() {
|
2012-11-12 17:09:34 +01:00
|
|
|
Throwable cause = unwrapCause();
|
|
|
|
if (cause == this) {
|
2011-02-16 01:41:01 +02:00
|
|
|
return RestStatus.INTERNAL_SERVER_ERROR;
|
|
|
|
} else {
|
2015-04-24 09:36:10 +02:00
|
|
|
return ExceptionsHelper.status(cause);
|
2011-02-16 01:41:01 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-02-13 20:03:37 +02:00
|
|
|
/**
|
|
|
|
* Unwraps the actual cause from the exception for cases when the exception is a
|
2021-03-03 19:23:00 -06:00
|
|
|
* {@link OpenSearchWrapperException}.
|
2010-02-13 20:03:37 +02:00
|
|
|
*
|
2016-07-04 08:41:06 -04:00
|
|
|
* @see ExceptionsHelper#unwrapCause(Throwable)
|
2010-02-13 20:03:37 +02:00
|
|
|
*/
|
2010-02-08 15:30:06 +02:00
|
|
|
public Throwable unwrapCause() {
|
|
|
|
return ExceptionsHelper.unwrapCause(this);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Return the detail message, including the message from the nested exception
|
|
|
|
* if there is one.
|
|
|
|
*/
|
|
|
|
public String getDetailedMessage() {
|
|
|
|
if (getCause() != null) {
|
|
|
|
StringBuilder sb = new StringBuilder();
|
2010-07-12 01:33:38 +03:00
|
|
|
sb.append(toString()).append("; ");
|
2021-03-03 14:27:14 -06:00
|
|
|
if (getCause() instanceof OpenSearchException) {
|
|
|
|
sb.append(((OpenSearchException) getCause()).getDetailedMessage());
|
2010-07-12 01:33:38 +03:00
|
|
|
} else {
|
|
|
|
sb.append(getCause());
|
2010-02-08 15:30:06 +02:00
|
|
|
}
|
|
|
|
return sb.toString();
|
|
|
|
} else {
|
2010-07-12 01:33:38 +03:00
|
|
|
return super.toString();
|
2010-02-08 15:30:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2011-01-04 18:09:46 +02:00
|
|
|
* Retrieve the innermost cause of this exception, if none, returns the current exception.
|
2010-02-08 15:30:06 +02:00
|
|
|
*/
|
|
|
|
public Throwable getRootCause() {
|
2011-01-04 18:09:46 +02:00
|
|
|
Throwable rootCause = this;
|
2010-02-08 15:30:06 +02:00
|
|
|
Throwable cause = getCause();
|
|
|
|
while (cause != null && cause != rootCause) {
|
|
|
|
rootCause = cause;
|
|
|
|
cause = cause.getCause();
|
|
|
|
}
|
|
|
|
return rootCause;
|
|
|
|
}
|
|
|
|
|
2016-04-25 15:55:25 -04:00
|
|
|
@Override
|
2015-06-29 12:53:08 +02:00
|
|
|
public void writeTo(StreamOutput out) throws IOException {
|
|
|
|
out.writeOptionalString(this.getMessage());
|
2016-07-05 14:37:01 -04:00
|
|
|
out.writeException(this.getCause());
|
2020-03-25 09:21:51 -04:00
|
|
|
writeStackTraces(this, out, StreamOutput::writeException);
|
2018-08-24 09:51:21 +02:00
|
|
|
out.writeMapOfLists(headers, StreamOutput::writeString, StreamOutput::writeString);
|
|
|
|
out.writeMapOfLists(metadata, StreamOutput::writeString, StreamOutput::writeString);
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
public static OpenSearchException readException(StreamInput input, int id) throws IOException {
|
2021-03-03 16:36:43 -06:00
|
|
|
CheckedFunction<StreamInput, ? extends OpenSearchException, IOException> opensearchException = ID_TO_SUPPLIER.get(id);
|
|
|
|
if (opensearchException == null) {
|
2019-09-26 14:21:23 +02:00
|
|
|
if (id == 127 && input.getVersion().before(Version.V_7_5_0)) {
|
|
|
|
// was SearchContextException
|
|
|
|
return new SearchException(input);
|
|
|
|
}
|
2015-09-17 10:26:31 +02:00
|
|
|
throw new IllegalStateException("unknown exception for id: " + id);
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
2021-03-03 16:36:43 -06:00
|
|
|
return opensearchException.apply(input);
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
|
|
|
|
2015-06-30 10:00:10 +02:00
|
|
|
/**
|
2016-02-10 13:18:21 -08:00
|
|
|
* Returns <code>true</code> iff the given class is a registered for an exception to be read.
|
2015-06-30 10:00:10 +02:00
|
|
|
*/
|
2016-11-21 12:51:06 +01:00
|
|
|
public static boolean isRegistered(Class<? extends Throwable> exception, Version version) {
|
2021-03-03 14:27:14 -06:00
|
|
|
OpenSearchExceptionHandle openSearchExceptionHandle = CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.get(exception);
|
|
|
|
if (openSearchExceptionHandle != null) {
|
|
|
|
return version.onOrAfter(openSearchExceptionHandle.versionAdded);
|
2016-11-21 12:51:06 +01:00
|
|
|
}
|
|
|
|
return false;
|
2015-06-30 10:00:10 +02:00
|
|
|
}
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
static Set<Class<? extends OpenSearchException>> getRegisteredKeys() { // for testing
|
2015-09-25 14:32:44 -04:00
|
|
|
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.keySet();
|
2015-09-17 10:26:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns the serialization id the given exception.
|
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public static int getId(Class<? extends OpenSearchException> exception) {
|
2015-09-25 14:32:44 -04:00
|
|
|
return CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE.get(exception).id;
|
2015-06-30 10:00:10 +02:00
|
|
|
}
|
|
|
|
|
2015-04-24 09:36:10 +02:00
|
|
|
@Override
|
|
|
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
2015-07-03 12:13:56 +02:00
|
|
|
Throwable ex = ExceptionsHelper.unwrapCause(this);
|
|
|
|
if (ex != this) {
|
2017-01-17 15:44:49 +01:00
|
|
|
generateThrowableXContent(builder, params, this);
|
2015-04-24 09:36:10 +02:00
|
|
|
} else {
|
2017-01-24 16:12:45 +01:00
|
|
|
innerToXContent(builder, params, this, getExceptionName(), getMessage(), headers, metadata, getCause());
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
|
|
|
return builder;
|
|
|
|
}
|
|
|
|
|
2017-01-17 15:44:49 +01:00
|
|
|
protected static void innerToXContent(XContentBuilder builder, Params params,
|
|
|
|
Throwable throwable, String type, String message, Map<String, List<String>> headers,
|
2017-01-24 16:12:45 +01:00
|
|
|
Map<String, List<String>> metadata, Throwable cause) throws IOException {
|
2017-01-17 15:44:49 +01:00
|
|
|
builder.field(TYPE, type);
|
|
|
|
builder.field(REASON, message);
|
2015-04-24 09:36:10 +02:00
|
|
|
|
2017-01-24 16:12:45 +01:00
|
|
|
for (Map.Entry<String, List<String>> entry : metadata.entrySet()) {
|
|
|
|
headerToXContent(builder, entry.getKey().substring("es.".length()), entry.getValue());
|
2017-01-17 15:44:49 +01:00
|
|
|
}
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
if (throwable instanceof OpenSearchException) {
|
|
|
|
OpenSearchException exception = (OpenSearchException) throwable;
|
2017-01-17 15:44:49 +01:00
|
|
|
exception.metadataToXContent(builder, params);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) {
|
|
|
|
if (cause != null) {
|
|
|
|
builder.field(CAUSED_BY);
|
|
|
|
builder.startObject();
|
|
|
|
generateThrowableXContent(builder, params, cause);
|
|
|
|
builder.endObject();
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
}
|
2017-01-17 15:44:49 +01:00
|
|
|
|
2017-01-24 16:12:45 +01:00
|
|
|
if (headers.isEmpty() == false) {
|
2017-01-17 15:44:49 +01:00
|
|
|
builder.startObject(HEADER);
|
2017-01-24 16:12:45 +01:00
|
|
|
for (Map.Entry<String, List<String>> entry : headers.entrySet()) {
|
|
|
|
headerToXContent(builder, entry.getKey(), entry.getValue());
|
2017-01-17 15:44:49 +01:00
|
|
|
}
|
2015-07-03 15:40:09 +02:00
|
|
|
builder.endObject();
|
|
|
|
}
|
2017-01-17 15:44:49 +01:00
|
|
|
|
|
|
|
if (params.paramAsBoolean(REST_EXCEPTION_SKIP_STACK_TRACE, REST_EXCEPTION_SKIP_STACK_TRACE_DEFAULT) == false) {
|
|
|
|
builder.field(STACK_TRACE, ExceptionsHelper.stackTrace(throwable));
|
|
|
|
}
|
REST: Include suppressed exceptions on failures (#29115)
This modifies xcontent serialization of Exceptions to contain suppressed
exceptions. If there are any suppressed exceptions they are included in
the exception response by default. The reasoning here is that they are
fairly rare but when they exist they almost always add extra useful
information. Take, for example, the response when you specify two broken
ingest pipelines:
```
{
"error" : {
"root_cause" : ...snip...
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "set",
"property_name" : "field"
},
"suppressed" : [
{
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "convert",
"property_name" : "field"
}
}
]
},
"status" : 400
}
```
Moreover, when suppressed exceptions come from 500 level errors should
give us more useful debugging information.
Closes #23392
2018-03-19 10:52:50 -04:00
|
|
|
|
|
|
|
Throwable[] allSuppressed = throwable.getSuppressed();
|
|
|
|
if (allSuppressed.length > 0) {
|
|
|
|
builder.startArray(SUPPRESSED.getPreferredName());
|
|
|
|
for (Throwable suppressed : allSuppressed) {
|
|
|
|
builder.startObject();
|
|
|
|
generateThrowableXContent(builder, params, suppressed);
|
|
|
|
builder.endObject();
|
|
|
|
}
|
|
|
|
builder.endArray();
|
|
|
|
}
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
|
2017-01-17 15:44:49 +01:00
|
|
|
private static void headerToXContent(XContentBuilder builder, String key, List<String> values) throws IOException {
|
2015-07-03 15:40:09 +02:00
|
|
|
if (values != null && values.isEmpty() == false) {
|
2016-01-24 22:47:38 +01:00
|
|
|
if (values.size() == 1) {
|
2015-07-03 15:40:09 +02:00
|
|
|
builder.field(key, values.get(0));
|
|
|
|
} else {
|
|
|
|
builder.startArray(key);
|
|
|
|
for (String value : values) {
|
|
|
|
builder.value(value);
|
|
|
|
}
|
|
|
|
builder.endArray();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-04-24 09:36:10 +02:00
|
|
|
/**
|
2017-01-17 15:44:49 +01:00
|
|
|
* Renders additional per exception information into the XContent
|
2015-04-24 09:36:10 +02:00
|
|
|
*/
|
2017-01-17 15:44:49 +01:00
|
|
|
protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException {
|
|
|
|
}
|
|
|
|
|
2017-01-26 15:17:07 +01:00
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* Generate a {@link OpenSearchException} from a {@link XContentParser}. This does not
|
2017-01-26 15:17:07 +01:00
|
|
|
* return the original exception type (ie NodeClosedException for example) but just wraps
|
|
|
|
* the type, the reason and the cause of the exception. It also recursively parses the
|
2021-03-03 14:27:14 -06:00
|
|
|
* tree structure of the cause, returning it as a tree structure of {@link OpenSearchException}
|
2017-01-26 15:17:07 +01:00
|
|
|
* instances.
|
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public static OpenSearchException fromXContent(XContentParser parser) throws IOException {
|
2017-01-26 15:17:07 +01:00
|
|
|
XContentParser.Token token = parser.nextToken();
|
2020-10-05 19:08:32 +02:00
|
|
|
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
|
2017-02-02 17:00:16 +01:00
|
|
|
return innerFromXContent(parser, false);
|
2017-01-26 15:17:07 +01:00
|
|
|
}
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
public static OpenSearchException innerFromXContent(XContentParser parser, boolean parseRootCauses) throws IOException {
|
2017-01-26 15:17:07 +01:00
|
|
|
XContentParser.Token token = parser.currentToken();
|
2020-10-05 19:08:32 +02:00
|
|
|
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser);
|
2017-01-26 15:17:07 +01:00
|
|
|
|
|
|
|
String type = null, reason = null, stack = null;
|
2021-03-03 14:27:14 -06:00
|
|
|
OpenSearchException cause = null;
|
2017-01-26 15:17:07 +01:00
|
|
|
Map<String, List<String>> metadata = new HashMap<>();
|
|
|
|
Map<String, List<String>> headers = new HashMap<>();
|
2021-03-03 14:27:14 -06:00
|
|
|
List<OpenSearchException> rootCauses = new ArrayList<>();
|
|
|
|
List<OpenSearchException> suppressed = new ArrayList<>();
|
2017-01-26 15:17:07 +01:00
|
|
|
|
|
|
|
for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) {
|
|
|
|
String currentFieldName = parser.currentName();
|
|
|
|
token = parser.nextToken();
|
|
|
|
|
|
|
|
if (token.isValue()) {
|
|
|
|
if (TYPE.equals(currentFieldName)) {
|
|
|
|
type = parser.text();
|
|
|
|
} else if (REASON.equals(currentFieldName)) {
|
|
|
|
reason = parser.text();
|
|
|
|
} else if (STACK_TRACE.equals(currentFieldName)) {
|
|
|
|
stack = parser.text();
|
|
|
|
} else if (token == XContentParser.Token.VALUE_STRING) {
|
|
|
|
metadata.put(currentFieldName, Collections.singletonList(parser.text()));
|
|
|
|
}
|
|
|
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
|
|
|
if (CAUSED_BY.equals(currentFieldName)) {
|
|
|
|
cause = fromXContent(parser);
|
|
|
|
} else if (HEADER.equals(currentFieldName)) {
|
|
|
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
|
|
|
if (token == XContentParser.Token.FIELD_NAME) {
|
|
|
|
currentFieldName = parser.currentName();
|
|
|
|
} else {
|
|
|
|
List<String> values = headers.getOrDefault(currentFieldName, new ArrayList<>());
|
|
|
|
if (token == XContentParser.Token.VALUE_STRING) {
|
|
|
|
values.add(parser.text());
|
|
|
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
|
|
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
|
|
|
if (token == XContentParser.Token.VALUE_STRING) {
|
|
|
|
values.add(parser.text());
|
|
|
|
} else {
|
|
|
|
parser.skipChildren();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if (token == XContentParser.Token.START_OBJECT) {
|
|
|
|
parser.skipChildren();
|
|
|
|
}
|
|
|
|
headers.put(currentFieldName, values);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Any additional metadata object added by the metadataToXContent method is ignored
|
|
|
|
// and skipped, so that the parser does not fail on unknown fields. The parser only
|
|
|
|
// support metadata key-pairs and metadata arrays of values.
|
|
|
|
parser.skipChildren();
|
|
|
|
}
|
|
|
|
} else if (token == XContentParser.Token.START_ARRAY) {
|
2017-02-02 17:00:16 +01:00
|
|
|
if (parseRootCauses && ROOT_CAUSE.equals(currentFieldName)) {
|
|
|
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
|
|
|
rootCauses.add(fromXContent(parser));
|
2017-01-26 15:17:07 +01:00
|
|
|
}
|
REST: Include suppressed exceptions on failures (#29115)
This modifies xcontent serialization of Exceptions to contain suppressed
exceptions. If there are any suppressed exceptions they are included in
the exception response by default. The reasoning here is that they are
fairly rare but when they exist they almost always add extra useful
information. Take, for example, the response when you specify two broken
ingest pipelines:
```
{
"error" : {
"root_cause" : ...snip...
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "set",
"property_name" : "field"
},
"suppressed" : [
{
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "convert",
"property_name" : "field"
}
}
]
},
"status" : 400
}
```
Moreover, when suppressed exceptions come from 500 level errors should
give us more useful debugging information.
Closes #23392
2018-03-19 10:52:50 -04:00
|
|
|
} else if (SUPPRESSED.match(currentFieldName, parser.getDeprecationHandler())) {
|
|
|
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
|
|
|
suppressed.add(fromXContent(parser));
|
|
|
|
}
|
2017-02-02 17:00:16 +01:00
|
|
|
} else {
|
|
|
|
// Parse the array and add each item to the corresponding list of metadata.
|
|
|
|
// Arrays of objects are not supported yet and just ignored and skipped.
|
|
|
|
List<String> values = new ArrayList<>();
|
|
|
|
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
|
|
|
if (token == XContentParser.Token.VALUE_STRING) {
|
|
|
|
values.add(parser.text());
|
|
|
|
} else {
|
|
|
|
parser.skipChildren();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (values.size() > 0) {
|
|
|
|
if (metadata.containsKey(currentFieldName)) {
|
|
|
|
values.addAll(metadata.get(currentFieldName));
|
|
|
|
}
|
|
|
|
metadata.put(currentFieldName, values);
|
2017-01-26 15:17:07 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
OpenSearchException e = new OpenSearchException(buildMessage(type, reason, stack), cause);
|
2017-01-26 15:17:07 +01:00
|
|
|
for (Map.Entry<String, List<String>> entry : metadata.entrySet()) {
|
|
|
|
//subclasses can print out additional metadata through the metadataToXContent method. Simple key-value pairs will be
|
|
|
|
//parsed back and become part of this metadata set, while objects and arrays are not supported when parsing back.
|
|
|
|
//Those key-value pairs become part of the metadata set and inherit the "es." prefix as that is currently required
|
|
|
|
//by addMetadata. The prefix will get stripped out when printing metadata out so it will be effectively invisible.
|
|
|
|
//TODO move subclasses that print out simple metadata to using addMetadata directly and support also numbers and booleans.
|
|
|
|
//TODO rename metadataToXContent and have only SearchPhaseExecutionException use it, which prints out complex objects
|
|
|
|
e.addMetadata("es." + entry.getKey(), entry.getValue());
|
|
|
|
}
|
|
|
|
for (Map.Entry<String, List<String>> header : headers.entrySet()) {
|
|
|
|
e.addHeader(header.getKey(), header.getValue());
|
|
|
|
}
|
2017-02-02 17:00:16 +01:00
|
|
|
|
|
|
|
// Adds root causes as suppressed exception. This way they are not lost
|
|
|
|
// after parsing and can be retrieved using getSuppressed() method.
|
2021-03-03 14:27:14 -06:00
|
|
|
for (OpenSearchException rootCause : rootCauses) {
|
2017-02-02 17:00:16 +01:00
|
|
|
e.addSuppressed(rootCause);
|
|
|
|
}
|
2021-03-03 14:27:14 -06:00
|
|
|
for (OpenSearchException s : suppressed) {
|
REST: Include suppressed exceptions on failures (#29115)
This modifies xcontent serialization of Exceptions to contain suppressed
exceptions. If there are any suppressed exceptions they are included in
the exception response by default. The reasoning here is that they are
fairly rare but when they exist they almost always add extra useful
information. Take, for example, the response when you specify two broken
ingest pipelines:
```
{
"error" : {
"root_cause" : ...snip...
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "set",
"property_name" : "field"
},
"suppressed" : [
{
"type" : "parse_exception",
"reason" : "[field] required property is missing",
"header" : {
"processor_type" : "convert",
"property_name" : "field"
}
}
]
},
"status" : 400
}
```
Moreover, when suppressed exceptions come from 500 level errors should
give us more useful debugging information.
Closes #23392
2018-03-19 10:52:50 -04:00
|
|
|
e.addSuppressed(s);
|
|
|
|
}
|
2017-01-26 15:17:07 +01:00
|
|
|
return e;
|
|
|
|
}
|
|
|
|
|
2017-01-17 15:44:49 +01:00
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* Static toXContent helper method that renders {@link OpenSearchException} or {@link Throwable} instances
|
2017-01-17 15:44:49 +01:00
|
|
|
* as XContent, delegating the rendering to {@link #toXContent(XContentBuilder, Params)}
|
2017-01-24 16:12:45 +01:00
|
|
|
* or {@link #innerToXContent(XContentBuilder, Params, Throwable, String, String, Map, Map, Throwable)}.
|
2017-01-17 15:44:49 +01:00
|
|
|
*
|
2017-01-26 15:17:07 +01:00
|
|
|
* This method is usually used when the {@link Throwable} is rendered as a part of another XContent object, and its result can
|
|
|
|
* be parsed back using the {@link #fromXContent(XContentParser)} method.
|
2017-01-17 15:44:49 +01:00
|
|
|
*/
|
|
|
|
public static void generateThrowableXContent(XContentBuilder builder, Params params, Throwable t) throws IOException {
|
|
|
|
t = ExceptionsHelper.unwrapCause(t);
|
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
if (t instanceof OpenSearchException) {
|
|
|
|
((OpenSearchException) t).toXContent(builder, params);
|
2015-04-24 09:36:10 +02:00
|
|
|
} else {
|
2017-01-24 16:12:45 +01:00
|
|
|
innerToXContent(builder, params, t, getExceptionName(t), t.getMessage(), emptyMap(), emptyMap(), t.getCause());
|
2017-01-17 15:44:49 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Render any exception as a xcontent, encapsulated within a field or object named "error". The level of details that are rendered
|
|
|
|
* depends on the value of the "detailed" parameter: when it's false only a simple message based on the type and message of the
|
|
|
|
* exception is rendered. When it's true all detail are provided including guesses root causes, cause and potentially stack
|
|
|
|
* trace.
|
|
|
|
*
|
2017-01-27 10:12:58 +01:00
|
|
|
* This method is usually used when the {@link Exception} is rendered as a full XContent object, and its output can be parsed
|
|
|
|
* by the {@link #failureFromXContent(XContentParser)} method.
|
2017-01-17 15:44:49 +01:00
|
|
|
*/
|
|
|
|
public static void generateFailureXContent(XContentBuilder builder, Params params, @Nullable Exception e, boolean detailed)
|
|
|
|
throws IOException {
|
|
|
|
// No exception to render as an error
|
|
|
|
if (e == null) {
|
|
|
|
builder.field(ERROR, "unknown");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Render the exception with a simple message
|
|
|
|
if (detailed == false) {
|
2021-03-03 14:27:14 -06:00
|
|
|
String message = "No OpenSearchException found";
|
2017-01-17 15:44:49 +01:00
|
|
|
Throwable t = e;
|
|
|
|
for (int counter = 0; counter < 10 && t != null; counter++) {
|
2021-03-03 14:27:14 -06:00
|
|
|
if (t instanceof OpenSearchException) {
|
2017-01-17 15:44:49 +01:00
|
|
|
message = t.getClass().getSimpleName() + "[" + t.getMessage() + "]";
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
t = t.getCause();
|
|
|
|
}
|
|
|
|
builder.field(ERROR, message);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Render the exception with all details
|
2021-03-03 14:27:14 -06:00
|
|
|
final OpenSearchException[] rootCauses = OpenSearchException.guessRootCauses(e);
|
2017-01-17 15:44:49 +01:00
|
|
|
builder.startObject(ERROR);
|
|
|
|
{
|
|
|
|
builder.startArray(ROOT_CAUSE);
|
2021-03-03 14:27:14 -06:00
|
|
|
for (OpenSearchException rootCause : rootCauses) {
|
2015-04-24 09:36:10 +02:00
|
|
|
builder.startObject();
|
2017-01-17 15:44:49 +01:00
|
|
|
rootCause.toXContent(builder, new DelegatingMapParams(singletonMap(REST_EXCEPTION_SKIP_CAUSE, "true"), params));
|
2015-04-24 09:36:10 +02:00
|
|
|
builder.endObject();
|
|
|
|
}
|
2017-01-17 15:44:49 +01:00
|
|
|
builder.endArray();
|
2016-12-14 16:11:44 +01:00
|
|
|
}
|
2017-01-17 15:44:49 +01:00
|
|
|
generateThrowableXContent(builder, params, e);
|
|
|
|
builder.endObject();
|
2016-12-14 16:11:44 +01:00
|
|
|
}
|
|
|
|
|
2017-01-27 10:12:58 +01:00
|
|
|
/**
|
|
|
|
* Parses the output of {@link #generateFailureXContent(XContentBuilder, Params, Exception, boolean)}
|
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public static OpenSearchException failureFromXContent(XContentParser parser) throws IOException {
|
2017-02-01 10:11:17 +01:00
|
|
|
XContentParser.Token token = parser.currentToken();
|
2017-01-27 10:12:58 +01:00
|
|
|
ensureFieldName(parser, token, ERROR);
|
|
|
|
|
|
|
|
token = parser.nextToken();
|
|
|
|
if (token.isValue()) {
|
2021-03-03 14:27:14 -06:00
|
|
|
return new OpenSearchException(buildMessage("exception", parser.text(), null));
|
2017-01-27 10:12:58 +01:00
|
|
|
}
|
|
|
|
|
2020-10-05 19:08:32 +02:00
|
|
|
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser);
|
2017-01-27 10:12:58 +01:00
|
|
|
token = parser.nextToken();
|
|
|
|
|
2017-02-02 17:00:16 +01:00
|
|
|
// Root causes are parsed in the innerFromXContent() and are added as suppressed exceptions.
|
|
|
|
return innerFromXContent(parser, true);
|
2017-01-27 10:12:58 +01:00
|
|
|
}
|
|
|
|
|
2015-04-24 09:36:10 +02:00
|
|
|
/**
|
2016-02-10 13:18:21 -08:00
|
|
|
* Returns the root cause of this exception or multiple if different shards caused different exceptions
|
2015-04-24 09:36:10 +02:00
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public OpenSearchException[] guessRootCauses() {
|
2015-04-24 09:36:10 +02:00
|
|
|
final Throwable cause = getCause();
|
2021-03-03 14:27:14 -06:00
|
|
|
if (cause != null && cause instanceof OpenSearchException) {
|
|
|
|
return ((OpenSearchException) cause).guessRootCauses();
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
2021-03-03 14:27:14 -06:00
|
|
|
return new OpenSearchException[]{this};
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2016-02-10 13:18:21 -08:00
|
|
|
* Returns the root cause of this exception or multiple if different shards caused different exceptions.
|
2021-03-03 14:27:14 -06:00
|
|
|
* If the given exception is not an instance of {@link OpenSearchException} an empty array
|
2015-04-24 09:36:10 +02:00
|
|
|
* is returned.
|
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
public static OpenSearchException[] guessRootCauses(Throwable t) {
|
2015-04-24 09:36:10 +02:00
|
|
|
Throwable ex = ExceptionsHelper.unwrapCause(t);
|
2021-03-03 14:27:14 -06:00
|
|
|
if (ex instanceof OpenSearchException) {
|
|
|
|
// OpenSearchException knows how to guess its own root cause
|
|
|
|
return ((OpenSearchException) ex).guessRootCauses();
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
2018-05-01 07:44:58 -04:00
|
|
|
if (ex instanceof XContentParseException) {
|
|
|
|
/*
|
|
|
|
* We'd like to unwrap parsing exceptions to the inner-most
|
|
|
|
* parsing exception because that is generally the most interesting
|
|
|
|
* exception to return to the user. If that exception is caused by
|
2021-03-03 14:27:14 -06:00
|
|
|
* an OpenSearchException we'd like to keep unwrapping because
|
2018-05-01 07:44:58 -04:00
|
|
|
* ElasticserachExceptions tend to contain useful information for
|
|
|
|
* the user.
|
|
|
|
*/
|
|
|
|
Throwable cause = ex.getCause();
|
|
|
|
if (cause != null) {
|
2021-03-03 14:27:14 -06:00
|
|
|
if (cause instanceof XContentParseException || cause instanceof OpenSearchException) {
|
2018-05-01 07:44:58 -04:00
|
|
|
return guessRootCauses(ex.getCause());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-03-03 14:27:14 -06:00
|
|
|
return new OpenSearchException[]{new OpenSearchException(ex.getMessage(), ex) {
|
2015-04-28 13:59:20 +02:00
|
|
|
@Override
|
|
|
|
protected String getExceptionName() {
|
|
|
|
return getExceptionName(getCause());
|
|
|
|
}
|
|
|
|
}};
|
|
|
|
}
|
|
|
|
|
|
|
|
protected String getExceptionName() {
|
|
|
|
return getExceptionName(this);
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* Returns an underscore case name for the given exception. This method strips {@code OpenSearch} prefixes from exception names.
|
2015-04-24 09:36:10 +02:00
|
|
|
*/
|
|
|
|
public static String getExceptionName(Throwable ex) {
|
|
|
|
String simpleName = ex.getClass().getSimpleName();
|
2021-03-03 14:27:14 -06:00
|
|
|
if (simpleName.startsWith("OpenSearch")) {
|
|
|
|
simpleName = simpleName.substring("OpenSearch".length());
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
2016-04-22 09:12:53 -07:00
|
|
|
// TODO: do we really need to make the exception name in underscore casing?
|
|
|
|
return toUnderscoreCase(simpleName);
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
|
|
|
|
2017-01-27 10:12:58 +01:00
|
|
|
static String buildMessage(String type, String reason, String stack) {
|
2021-03-03 14:27:14 -06:00
|
|
|
StringBuilder message = new StringBuilder("OpenSearch exception [");
|
2017-01-27 10:12:58 +01:00
|
|
|
message.append(TYPE).append('=').append(type).append(", ");
|
|
|
|
message.append(REASON).append('=').append(reason);
|
|
|
|
if (stack != null) {
|
|
|
|
message.append(", ").append(STACK_TRACE).append('=').append(stack);
|
|
|
|
}
|
|
|
|
message.append(']');
|
|
|
|
return message.toString();
|
|
|
|
}
|
|
|
|
|
2015-04-24 09:36:10 +02:00
|
|
|
@Override
|
|
|
|
public String toString() {
|
2015-07-03 15:40:09 +02:00
|
|
|
StringBuilder builder = new StringBuilder();
|
2017-01-24 16:12:45 +01:00
|
|
|
if (metadata.containsKey(INDEX_METADATA_KEY)) {
|
2016-01-24 22:47:38 +01:00
|
|
|
builder.append(getIndex());
|
2017-01-24 16:12:45 +01:00
|
|
|
if (metadata.containsKey(SHARD_METADATA_KEY)) {
|
2015-07-03 15:40:09 +02:00
|
|
|
builder.append('[').append(getShardId()).append(']');
|
|
|
|
}
|
|
|
|
builder.append(' ');
|
|
|
|
}
|
|
|
|
return builder.append(ExceptionsHelper.detailedMessage(this).trim()).toString();
|
2015-04-24 09:36:10 +02:00
|
|
|
}
|
2015-06-29 12:53:08 +02:00
|
|
|
|
2015-06-29 20:39:04 +02:00
|
|
|
/**
|
|
|
|
* Deserializes stacktrace elements as well as suppressed exceptions from the given output stream and
|
|
|
|
* adds it to the given exception.
|
|
|
|
*/
|
2015-06-29 12:53:08 +02:00
|
|
|
public static <T extends Throwable> T readStackTrace(T throwable, StreamInput in) throws IOException {
|
2020-07-29 07:20:44 +02:00
|
|
|
throwable.setStackTrace(in.readArray(i -> {
|
|
|
|
final String declaringClasss = i.readString();
|
|
|
|
final String fileName = i.readOptionalString();
|
|
|
|
final String methodName = i.readString();
|
|
|
|
final int lineNumber = i.readVInt();
|
|
|
|
return new StackTraceElement(declaringClasss, methodName, fileName, lineNumber);
|
|
|
|
}, StackTraceElement[]::new));
|
2015-06-29 12:53:08 +02:00
|
|
|
|
|
|
|
int numSuppressed = in.readVInt();
|
|
|
|
for (int i = 0; i < numSuppressed; i++) {
|
2016-07-04 08:41:06 -04:00
|
|
|
throwable.addSuppressed(in.readException());
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
|
|
|
return throwable;
|
|
|
|
}
|
|
|
|
|
2015-06-29 20:39:04 +02:00
|
|
|
/**
|
|
|
|
* Serializes the given exceptions stacktrace elements as well as it's suppressed exceptions to the given output stream.
|
|
|
|
*/
|
2020-03-25 09:21:51 -04:00
|
|
|
public static <T extends Throwable> T writeStackTraces(T throwable, StreamOutput out,
|
|
|
|
Writer<Throwable> exceptionWriter) throws IOException {
|
2020-07-29 07:20:44 +02:00
|
|
|
out.writeArray((o, v) -> {
|
|
|
|
o.writeString(v.getClassName());
|
|
|
|
o.writeOptionalString(v.getFileName());
|
|
|
|
o.writeString(v.getMethodName());
|
|
|
|
o.writeVInt(v.getLineNumber());
|
|
|
|
}, throwable.getStackTrace());
|
|
|
|
out.writeArray(exceptionWriter, throwable.getSuppressed());
|
2015-06-29 12:53:08 +02:00
|
|
|
return throwable;
|
|
|
|
}
|
|
|
|
|
2016-02-02 18:14:41 -05:00
|
|
|
/**
|
2021-03-03 14:27:14 -06:00
|
|
|
* This is the list of Exceptions OpenSearch can throw over the wire or save into a corruption marker. Each value in the enum is a
|
2016-02-02 18:14:41 -05:00
|
|
|
* single exception tying the Class to an id for use of the encode side and the id back to a constructor for use on the decode side. As
|
|
|
|
* such its ok if the exceptions to change names so long as their constructor can still read the exception. Each exception is listed
|
|
|
|
* in id order below. If you want to remove an exception leave a tombstone comment and mark the id as null in
|
|
|
|
* ExceptionSerializationTests.testIds.ids.
|
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
private enum OpenSearchExceptionHandle {
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_SNAPSHOT_FAILED_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException::new, 0, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
DFS_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.dfs.DfsPhaseExecutionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.dfs.DfsPhaseExecutionException::new, 1, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
EXECUTION_CANCELLED_EXCEPTION(org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.util.CancellableThreads.ExecutionCancelledException::new, 2, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
MASTER_NOT_DISCOVERED_EXCEPTION(org.elasticsearch.discovery.MasterNotDiscoveredException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.discovery.MasterNotDiscoveredException::new, 3, UNKNOWN_VERSION_ADDED),
|
2021-03-03 16:44:34 -06:00
|
|
|
ELASTICSEARCH_SECURITY_EXCEPTION(org.elasticsearch.OpenSearchSecurityException.class,
|
|
|
|
org.elasticsearch.OpenSearchSecurityException::new, 4, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_RESTORE_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardRestoreException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.snapshots.IndexShardRestoreException::new, 5, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_CLOSED_EXCEPTION(org.elasticsearch.indices.IndexClosedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.IndexClosedException::new, 6, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
BIND_HTTP_EXCEPTION(org.elasticsearch.http.BindHttpException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.http.BindHttpException::new, 7, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
REDUCE_SEARCH_PHASE_EXCEPTION(org.elasticsearch.action.search.ReduceSearchPhaseException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.action.search.ReduceSearchPhaseException::new, 8, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
NODE_CLOSED_EXCEPTION(org.elasticsearch.node.NodeClosedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.node.NodeClosedException::new, 9, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SNAPSHOT_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.SnapshotFailedEngineException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.SnapshotFailedEngineException::new, 10, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SHARD_NOT_FOUND_EXCEPTION(org.elasticsearch.index.shard.ShardNotFoundException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.ShardNotFoundException::new, 11, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
CONNECT_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ConnectTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.ConnectTransportException::new, 12, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
NOT_SERIALIZABLE_TRANSPORT_EXCEPTION(org.elasticsearch.transport.NotSerializableTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.NotSerializableTransportException::new, 13, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RESPONSE_HANDLER_FAILURE_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ResponseHandlerFailureTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.ResponseHandlerFailureTransportException::new, 14, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_CREATION_EXCEPTION(org.elasticsearch.indices.IndexCreationException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.IndexCreationException::new, 15, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_NOT_FOUND_EXCEPTION(org.elasticsearch.index.IndexNotFoundException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.IndexNotFoundException::new, 16, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ILLEGAL_SHARD_ROUTING_STATE_EXCEPTION(org.elasticsearch.cluster.routing.IllegalShardRoutingStateException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.cluster.routing.IllegalShardRoutingStateException::new, 17, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
BROADCAST_SHARD_OPERATION_FAILED_EXCEPTION(org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException::new, 18, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RESOURCE_NOT_FOUND_EXCEPTION(org.elasticsearch.ResourceNotFoundException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.ResourceNotFoundException::new, 19, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ACTION_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ActionTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.ActionTransportException::new, 20, UNKNOWN_VERSION_ADDED),
|
2021-03-03 16:36:43 -06:00
|
|
|
ELASTICSEARCH_GENERATION_EXCEPTION(org.elasticsearch.OpenSearchGenerationException.class,
|
|
|
|
org.elasticsearch.OpenSearchGenerationException::new, 21, UNKNOWN_VERSION_ADDED),
|
2015-10-02 14:08:10 +02:00
|
|
|
// 22 was CreateFailedEngineException
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_STARTED_EXCEPTION(org.elasticsearch.index.shard.IndexShardStartedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardStartedException::new, 23, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SEARCH_CONTEXT_MISSING_EXCEPTION(org.elasticsearch.search.SearchContextMissingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.SearchContextMissingException::new, 24, UNKNOWN_VERSION_ADDED),
|
2016-06-30 13:41:53 +02:00
|
|
|
GENERAL_SCRIPT_EXCEPTION(org.elasticsearch.script.GeneralScriptException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.script.GeneralScriptException::new, 25, UNKNOWN_VERSION_ADDED),
|
2017-06-07 17:11:27 +02:00
|
|
|
// 26 was BatchOperationException
|
2021-03-10 10:19:28 -08:00
|
|
|
SNAPSHOT_CREATION_EXCEPTION(org.opensearch.snapshots.SnapshotCreationException.class,
|
|
|
|
org.opensearch.snapshots.SnapshotCreationException::new, 27, UNKNOWN_VERSION_ADDED),
|
2018-01-04 11:00:29 +00:00
|
|
|
// 28 was DeleteFailedEngineException, deprecated in 6.0, removed in 7.0
|
2016-02-02 18:14:41 -05:00
|
|
|
DOCUMENT_MISSING_EXCEPTION(org.elasticsearch.index.engine.DocumentMissingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.DocumentMissingException::new, 29, UNKNOWN_VERSION_ADDED),
|
2021-03-10 10:19:28 -08:00
|
|
|
SNAPSHOT_EXCEPTION(org.opensearch.snapshots.SnapshotException.class,
|
|
|
|
org.opensearch.snapshots.SnapshotException::new, 30, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INVALID_ALIAS_NAME_EXCEPTION(org.elasticsearch.indices.InvalidAliasNameException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.InvalidAliasNameException::new, 31, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INVALID_INDEX_NAME_EXCEPTION(org.elasticsearch.indices.InvalidIndexNameException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.InvalidIndexNameException::new, 32, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_PRIMARY_SHARD_NOT_ALLOCATED_EXCEPTION(org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException::new, 33, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
TRANSPORT_EXCEPTION(org.elasticsearch.transport.TransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.TransportException::new, 34, UNKNOWN_VERSION_ADDED),
|
2021-03-03 22:53:14 -06:00
|
|
|
ELASTICSEARCH_PARSE_EXCEPTION(org.elasticsearch.OpenSearchParseException.class,
|
|
|
|
org.elasticsearch.OpenSearchParseException::new, 35, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SEARCH_EXCEPTION(org.elasticsearch.search.SearchException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.SearchException::new, 36, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
MAPPER_EXCEPTION(org.elasticsearch.index.mapper.MapperException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.mapper.MapperException::new, 37, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INVALID_TYPE_NAME_EXCEPTION(org.elasticsearch.indices.InvalidTypeNameException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.InvalidTypeNameException::new, 38, UNKNOWN_VERSION_ADDED),
|
2021-03-10 10:19:28 -08:00
|
|
|
SNAPSHOT_RESTORE_EXCEPTION(org.opensearch.snapshots.SnapshotRestoreException.class,
|
|
|
|
org.opensearch.snapshots.SnapshotRestoreException::new, 39, UNKNOWN_VERSION_ADDED),
|
2016-11-21 12:51:06 +01:00
|
|
|
PARSING_EXCEPTION(org.elasticsearch.common.ParsingException.class, org.elasticsearch.common.ParsingException::new, 40,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_CLOSED_EXCEPTION(org.elasticsearch.index.shard.IndexShardClosedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardClosedException::new, 41, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RECOVER_FILES_RECOVERY_EXCEPTION(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.recovery.RecoverFilesRecoveryException::new, 42, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
TRUNCATED_TRANSLOG_EXCEPTION(org.elasticsearch.index.translog.TruncatedTranslogException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.translog.TruncatedTranslogException::new, 43, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RECOVERY_FAILED_EXCEPTION(org.elasticsearch.indices.recovery.RecoveryFailedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.recovery.RecoveryFailedException::new, 44, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_RELOCATED_EXCEPTION(org.elasticsearch.index.shard.IndexShardRelocatedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardRelocatedException::new, 45, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
NODE_SHOULD_NOT_CONNECT_EXCEPTION(org.elasticsearch.transport.NodeShouldNotConnectException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.NodeShouldNotConnectException::new, 46, UNKNOWN_VERSION_ADDED),
|
2016-11-14 17:09:57 +01:00
|
|
|
// 47 used to be for IndexTemplateAlreadyExistsException which was deprecated in 5.1 removed in 6.0
|
2016-02-02 18:14:41 -05:00
|
|
|
TRANSLOG_CORRUPTED_EXCEPTION(org.elasticsearch.index.translog.TranslogCorruptedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.translog.TranslogCorruptedException::new, 48, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
CLUSTER_BLOCK_EXCEPTION(org.elasticsearch.cluster.block.ClusterBlockException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.cluster.block.ClusterBlockException::new, 49, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
FETCH_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.fetch.FetchPhaseExecutionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.fetch.FetchPhaseExecutionException::new, 50, UNKNOWN_VERSION_ADDED),
|
2016-11-14 17:09:57 +01:00
|
|
|
// 51 used to be for IndexShardAlreadyExistsException which was deprecated in 5.1 removed in 6.0
|
2016-02-02 18:14:41 -05:00
|
|
|
VERSION_CONFLICT_ENGINE_EXCEPTION(org.elasticsearch.index.engine.VersionConflictEngineException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.VersionConflictEngineException::new, 52, UNKNOWN_VERSION_ADDED),
|
|
|
|
ENGINE_EXCEPTION(org.elasticsearch.index.engine.EngineException.class, org.elasticsearch.index.engine.EngineException::new, 53,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
2015-10-02 14:08:10 +02:00
|
|
|
// 54 was DocumentAlreadyExistsException, which is superseded by VersionConflictEngineException
|
2021-03-09 11:45:04 -06:00
|
|
|
NO_SUCH_NODE_EXCEPTION(org.opensearch.action.NoSuchNodeException.class, org.opensearch.action.NoSuchNodeException::new, 55,
|
2016-11-21 12:51:06 +01:00
|
|
|
UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SETTINGS_EXCEPTION(org.elasticsearch.common.settings.SettingsException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.settings.SettingsException::new, 56, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_TEMPLATE_MISSING_EXCEPTION(org.elasticsearch.indices.IndexTemplateMissingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.IndexTemplateMissingException::new, 57, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SEND_REQUEST_TRANSPORT_EXCEPTION(org.elasticsearch.transport.SendRequestTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.SendRequestTransportException::new, 58, UNKNOWN_VERSION_ADDED),
|
2018-03-16 14:34:36 -04:00
|
|
|
// 59 used to be EsRejectedExecutionException
|
2017-06-08 12:10:46 +02:00
|
|
|
// 60 used to be for EarlyTerminationException
|
2016-04-14 23:51:08 -04:00
|
|
|
// 61 used to be for RoutingValidationException
|
2016-02-02 18:14:41 -05:00
|
|
|
NOT_SERIALIZABLE_EXCEPTION_WRAPPER(org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper::new, 62, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ALIAS_FILTER_PARSING_EXCEPTION(org.elasticsearch.indices.AliasFilterParsingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.AliasFilterParsingException::new, 63, UNKNOWN_VERSION_ADDED),
|
2016-11-14 17:09:57 +01:00
|
|
|
// 64 was DeleteByQueryFailedEngineException, which was removed in 5.0
|
2016-11-21 12:51:06 +01:00
|
|
|
GATEWAY_EXCEPTION(org.elasticsearch.gateway.GatewayException.class, org.elasticsearch.gateway.GatewayException::new, 65,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_NOT_RECOVERING_EXCEPTION(org.elasticsearch.index.shard.IndexShardNotRecoveringException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardNotRecoveringException::new, 66, UNKNOWN_VERSION_ADDED),
|
|
|
|
HTTP_EXCEPTION(org.elasticsearch.http.HttpException.class, org.elasticsearch.http.HttpException::new, 67, UNKNOWN_VERSION_ADDED),
|
2021-03-03 14:27:14 -06:00
|
|
|
ELASTICSEARCH_EXCEPTION(OpenSearchException.class,
|
|
|
|
OpenSearchException::new, 68, UNKNOWN_VERSION_ADDED),
|
2021-03-10 10:19:28 -08:00
|
|
|
SNAPSHOT_MISSING_EXCEPTION(org.opensearch.snapshots.SnapshotMissingException.class,
|
|
|
|
org.opensearch.snapshots.SnapshotMissingException::new, 69, UNKNOWN_VERSION_ADDED),
|
2021-03-09 11:45:04 -06:00
|
|
|
PRIMARY_MISSING_ACTION_EXCEPTION(org.opensearch.action.PrimaryMissingActionException.class,
|
|
|
|
org.opensearch.action.PrimaryMissingActionException::new, 70, UNKNOWN_VERSION_ADDED),
|
|
|
|
FAILED_NODE_EXCEPTION(org.opensearch.action.FailedNodeException.class, org.opensearch.action.FailedNodeException::new, 71,
|
2016-11-21 12:51:06 +01:00
|
|
|
UNKNOWN_VERSION_ADDED),
|
|
|
|
SEARCH_PARSE_EXCEPTION(org.elasticsearch.search.SearchParseException.class, org.elasticsearch.search.SearchParseException::new, 72,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
2021-03-10 10:19:28 -08:00
|
|
|
CONCURRENT_SNAPSHOT_EXECUTION_EXCEPTION(org.opensearch.snapshots.ConcurrentSnapshotExecutionException.class,
|
|
|
|
org.opensearch.snapshots.ConcurrentSnapshotExecutionException::new, 73, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
BLOB_STORE_EXCEPTION(org.elasticsearch.common.blobstore.BlobStoreException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.blobstore.BlobStoreException::new, 74, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INCOMPATIBLE_CLUSTER_STATE_VERSION_EXCEPTION(org.elasticsearch.cluster.IncompatibleClusterStateVersionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.cluster.IncompatibleClusterStateVersionException::new, 75, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RECOVERY_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RecoveryEngineException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.RecoveryEngineException::new, 76, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
UNCATEGORIZED_EXECUTION_EXCEPTION(org.elasticsearch.common.util.concurrent.UncategorizedExecutionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.util.concurrent.UncategorizedExecutionException::new, 77, UNKNOWN_VERSION_ADDED),
|
2021-03-09 11:45:04 -06:00
|
|
|
TIMESTAMP_PARSING_EXCEPTION(org.opensearch.action.TimestampParsingException.class,
|
|
|
|
org.opensearch.action.TimestampParsingException::new, 78, UNKNOWN_VERSION_ADDED),
|
|
|
|
ROUTING_MISSING_EXCEPTION(org.opensearch.action.RoutingMissingException.class,
|
|
|
|
org.opensearch.action.RoutingMissingException::new, 79, UNKNOWN_VERSION_ADDED),
|
2018-01-04 11:00:29 +00:00
|
|
|
// 80 was IndexFailedEngineException, deprecated in 6.0, removed in 7.0
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_RESTORE_FAILED_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardRestoreFailedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.snapshots.IndexShardRestoreFailedException::new, 81, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
REPOSITORY_EXCEPTION(org.elasticsearch.repositories.RepositoryException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.repositories.RepositoryException::new, 82, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RECEIVE_TIMEOUT_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ReceiveTimeoutTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.ReceiveTimeoutTransportException::new, 83, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
NODE_DISCONNECTED_EXCEPTION(org.elasticsearch.transport.NodeDisconnectedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.NodeDisconnectedException::new, 84, UNKNOWN_VERSION_ADDED),
|
2017-05-24 13:49:02 +02:00
|
|
|
// 85 used to be for AlreadyExpiredException
|
2016-02-02 18:14:41 -05:00
|
|
|
AGGREGATION_EXECUTION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationExecutionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.aggregations.AggregationExecutionException::new, 86, UNKNOWN_VERSION_ADDED),
|
2015-12-02 10:47:04 +01:00
|
|
|
// 87 used to be for MergeMappingException
|
2016-02-02 18:14:41 -05:00
|
|
|
INVALID_INDEX_TEMPLATE_EXCEPTION(org.elasticsearch.indices.InvalidIndexTemplateException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.InvalidIndexTemplateException::new, 88, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
REFRESH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.RefreshFailedEngineException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.RefreshFailedEngineException::new, 90, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
AGGREGATION_INITIALIZATION_EXCEPTION(org.elasticsearch.search.aggregations.AggregationInitializationException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.aggregations.AggregationInitializationException::new, 91, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
DELAY_RECOVERY_EXCEPTION(org.elasticsearch.indices.recovery.DelayRecoveryException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.recovery.DelayRecoveryException::new, 92, UNKNOWN_VERSION_ADDED),
|
2015-12-22 17:08:35 +01:00
|
|
|
// 93 used to be for IndexWarmerMissingException
|
2016-02-02 18:14:41 -05:00
|
|
|
NO_NODE_AVAILABLE_EXCEPTION(org.elasticsearch.client.transport.NoNodeAvailableException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.client.transport.NoNodeAvailableException::new, 94, UNKNOWN_VERSION_ADDED),
|
2021-03-10 10:19:28 -08:00
|
|
|
INVALID_SNAPSHOT_NAME_EXCEPTION(org.opensearch.snapshots.InvalidSnapshotNameException.class,
|
|
|
|
org.opensearch.snapshots.InvalidSnapshotNameException::new, 96, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ILLEGAL_INDEX_SHARD_STATE_EXCEPTION(org.elasticsearch.index.shard.IllegalIndexShardStateException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IllegalIndexShardStateException::new, 97, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_SNAPSHOT_EXCEPTION(org.elasticsearch.index.snapshots.IndexShardSnapshotException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.snapshots.IndexShardSnapshotException::new, 98, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_NOT_STARTED_EXCEPTION(org.elasticsearch.index.shard.IndexShardNotStartedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardNotStartedException::new, 99, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
SEARCH_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.action.search.SearchPhaseExecutionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.action.search.SearchPhaseExecutionException::new, 100, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ACTION_NOT_FOUND_TRANSPORT_EXCEPTION(org.elasticsearch.transport.ActionNotFoundTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.ActionNotFoundTransportException::new, 101, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
TRANSPORT_SERIALIZATION_EXCEPTION(org.elasticsearch.transport.TransportSerializationException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.TransportSerializationException::new, 102, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
REMOTE_TRANSPORT_EXCEPTION(org.elasticsearch.transport.RemoteTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.RemoteTransportException::new, 103, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ENGINE_CREATION_FAILURE_EXCEPTION(org.elasticsearch.index.engine.EngineCreationFailureException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.EngineCreationFailureException::new, 104, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
ROUTING_EXCEPTION(org.elasticsearch.cluster.routing.RoutingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.cluster.routing.RoutingException::new, 105, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_RECOVERY_EXCEPTION(org.elasticsearch.index.shard.IndexShardRecoveryException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardRecoveryException::new, 106, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
REPOSITORY_MISSING_EXCEPTION(org.elasticsearch.repositories.RepositoryMissingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.repositories.RepositoryMissingException::new, 107, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
DOCUMENT_SOURCE_MISSING_EXCEPTION(org.elasticsearch.index.engine.DocumentSourceMissingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.DocumentSourceMissingException::new, 109, UNKNOWN_VERSION_ADDED),
|
2016-09-21 14:20:24 +02:00
|
|
|
// 110 used to be FlushNotAllowedEngineException
|
2016-02-02 18:14:41 -05:00
|
|
|
NO_CLASS_SETTINGS_EXCEPTION(org.elasticsearch.common.settings.NoClassSettingsException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.settings.NoClassSettingsException::new, 111, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
BIND_TRANSPORT_EXCEPTION(org.elasticsearch.transport.BindTransportException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.BindTransportException::new, 112, UNKNOWN_VERSION_ADDED),
|
2021-03-09 16:46:55 -08:00
|
|
|
ALIASES_NOT_FOUND_EXCEPTION(org.opensearch.rest.action.admin.indices.AliasesNotFoundException.class,
|
|
|
|
org.opensearch.rest.action.admin.indices.AliasesNotFoundException::new, 113, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INDEX_SHARD_RECOVERING_EXCEPTION(org.elasticsearch.index.shard.IndexShardRecoveringException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.shard.IndexShardRecoveringException::new, 114, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
TRANSLOG_EXCEPTION(org.elasticsearch.index.translog.TranslogException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.translog.TranslogException::new, 115, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
PROCESS_CLUSTER_EVENT_TIMEOUT_EXCEPTION(org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException::new, 116, UNKNOWN_VERSION_ADDED),
|
2016-02-06 11:15:24 +01:00
|
|
|
RETRY_ON_PRIMARY_EXCEPTION(ReplicationOperation.RetryOnPrimaryException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
ReplicationOperation.RetryOnPrimaryException::new, 117, UNKNOWN_VERSION_ADDED),
|
2021-03-03 17:27:56 -06:00
|
|
|
ELASTICSEARCH_TIMEOUT_EXCEPTION(org.elasticsearch.OpenSearchTimeoutException.class,
|
|
|
|
org.elasticsearch.OpenSearchTimeoutException::new, 118, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
QUERY_PHASE_EXECUTION_EXCEPTION(org.elasticsearch.search.query.QueryPhaseExecutionException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.query.QueryPhaseExecutionException::new, 119, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
REPOSITORY_VERIFICATION_EXCEPTION(org.elasticsearch.repositories.RepositoryVerificationException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.repositories.RepositoryVerificationException::new, 120, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
INVALID_AGGREGATION_PATH_EXCEPTION(org.elasticsearch.search.aggregations.InvalidAggregationPathException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.aggregations.InvalidAggregationPathException::new, 121, UNKNOWN_VERSION_ADDED),
|
2016-11-17 14:30:21 +00:00
|
|
|
// 123 used to be IndexAlreadyExistsException and was renamed
|
|
|
|
RESOURCE_ALREADY_EXISTS_EXCEPTION(ResourceAlreadyExistsException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
ResourceAlreadyExistsException::new, 123, UNKNOWN_VERSION_ADDED),
|
2016-07-13 22:28:18 +02:00
|
|
|
// 124 used to be Script.ScriptParseException
|
2019-08-30 12:59:16 +02:00
|
|
|
HTTP_REQUEST_ON_TRANSPORT_EXCEPTION(TcpTransport.HttpRequestOnTransportException.class,
|
|
|
|
TcpTransport.HttpRequestOnTransportException::new, 125, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
MAPPER_PARSING_EXCEPTION(org.elasticsearch.index.mapper.MapperParsingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.mapper.MapperParsingException::new, 126, UNKNOWN_VERSION_ADDED),
|
2019-09-26 14:21:23 +02:00
|
|
|
// 127 used to be org.elasticsearch.search.SearchContextException
|
2016-02-02 18:14:41 -05:00
|
|
|
SEARCH_SOURCE_BUILDER_EXCEPTION(org.elasticsearch.search.builder.SearchSourceBuilderException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.search.builder.SearchSourceBuilderException::new, 128, UNKNOWN_VERSION_ADDED),
|
2017-01-25 10:58:26 +01:00
|
|
|
// 129 was EngineClosedException
|
2021-03-09 11:45:04 -06:00
|
|
|
NO_SHARD_AVAILABLE_ACTION_EXCEPTION(org.opensearch.action.NoShardAvailableActionException.class,
|
|
|
|
org.opensearch.action.NoShardAvailableActionException::new, 130, UNKNOWN_VERSION_ADDED),
|
|
|
|
UNAVAILABLE_SHARDS_EXCEPTION(org.opensearch.action.UnavailableShardsException.class,
|
|
|
|
org.opensearch.action.UnavailableShardsException::new, 131, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
FLUSH_FAILED_ENGINE_EXCEPTION(org.elasticsearch.index.engine.FlushFailedEngineException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.engine.FlushFailedEngineException::new, 132, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
CIRCUIT_BREAKING_EXCEPTION(org.elasticsearch.common.breaker.CircuitBreakingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.common.breaker.CircuitBreakingException::new, 133, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
NODE_NOT_CONNECTED_EXCEPTION(org.elasticsearch.transport.NodeNotConnectedException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.transport.NodeNotConnectedException::new, 134, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
STRICT_DYNAMIC_MAPPING_EXCEPTION(org.elasticsearch.index.mapper.StrictDynamicMappingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.mapper.StrictDynamicMappingException::new, 135, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
RETRY_ON_REPLICA_EXCEPTION(org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.action.support.replication.TransportReplicationAction.RetryOnReplicaException::new, 136,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
TYPE_MISSING_EXCEPTION(org.elasticsearch.indices.TypeMissingException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.indices.TypeMissingException::new, 137, UNKNOWN_VERSION_ADDED),
|
2018-08-13 18:03:08 +02:00
|
|
|
FAILED_TO_COMMIT_CLUSTER_STATE_EXCEPTION(org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException.class,
|
|
|
|
org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException::new, 140, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
QUERY_SHARD_EXCEPTION(org.elasticsearch.index.query.QueryShardException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
org.elasticsearch.index.query.QueryShardException::new, 141, UNKNOWN_VERSION_ADDED),
|
2016-02-02 18:14:41 -05:00
|
|
|
NO_LONGER_PRIMARY_SHARD_EXCEPTION(ShardStateAction.NoLongerPrimaryShardException.class,
|
2016-11-21 12:51:06 +01:00
|
|
|
ShardStateAction.NoLongerPrimaryShardException::new, 142, UNKNOWN_VERSION_ADDED),
|
|
|
|
SCRIPT_EXCEPTION(org.elasticsearch.script.ScriptException.class, org.elasticsearch.script.ScriptException::new, 143,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
|
|
|
NOT_MASTER_EXCEPTION(org.elasticsearch.cluster.NotMasterException.class, org.elasticsearch.cluster.NotMasterException::new, 144,
|
|
|
|
UNKNOWN_VERSION_ADDED),
|
2021-03-03 17:03:42 -06:00
|
|
|
STATUS_EXCEPTION(org.elasticsearch.OpenSearchStatusException.class, org.elasticsearch.OpenSearchStatusException::new, 145,
|
2016-11-21 12:51:06 +01:00
|
|
|
UNKNOWN_VERSION_ADDED),
|
2016-10-25 12:27:33 -10:00
|
|
|
TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class,
|
2018-08-24 09:51:21 +02:00
|
|
|
org.elasticsearch.tasks.TaskCancelledException::new, 146, UNKNOWN_VERSION_ADDED),
|
2016-11-21 12:51:06 +01:00
|
|
|
SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class,
|
2018-08-24 09:51:21 +02:00
|
|
|
org.elasticsearch.env.ShardLockObtainFailedException::new, 147, UNKNOWN_VERSION_ADDED),
|
2020-03-05 09:48:11 +00:00
|
|
|
// 148 was UnknownNamedObjectException
|
2017-12-06 09:15:28 +01:00
|
|
|
TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class,
|
2019-01-31 08:28:20 -08:00
|
|
|
MultiBucketConsumerService.TooManyBucketsException::new, 149, Version.V_6_2_0),
|
2018-08-13 18:03:08 +02:00
|
|
|
COORDINATION_STATE_REJECTED_EXCEPTION(org.elasticsearch.cluster.coordination.CoordinationStateRejectedException.class,
|
2018-12-10 17:23:03 +00:00
|
|
|
org.elasticsearch.cluster.coordination.CoordinationStateRejectedException::new, 150, Version.V_7_0_0),
|
2021-03-10 10:19:28 -08:00
|
|
|
SNAPSHOT_IN_PROGRESS_EXCEPTION(org.opensearch.snapshots.SnapshotInProgressException.class,
|
|
|
|
org.opensearch.snapshots.SnapshotInProgressException::new, 151, Version.V_6_7_0),
|
2019-01-25 08:53:18 +01:00
|
|
|
NO_SUCH_REMOTE_CLUSTER_EXCEPTION(org.elasticsearch.transport.NoSuchRemoteClusterException.class,
|
2019-02-12 00:31:29 -05:00
|
|
|
org.elasticsearch.transport.NoSuchRemoteClusterException::new, 152, Version.V_6_7_0),
|
|
|
|
RETENTION_LEASE_ALREADY_EXISTS_EXCEPTION(
|
|
|
|
org.elasticsearch.index.seqno.RetentionLeaseAlreadyExistsException.class,
|
|
|
|
org.elasticsearch.index.seqno.RetentionLeaseAlreadyExistsException::new,
|
|
|
|
153,
|
|
|
|
Version.V_6_7_0),
|
|
|
|
RETENTION_LEASE_NOT_FOUND_EXCEPTION(
|
|
|
|
org.elasticsearch.index.seqno.RetentionLeaseNotFoundException.class,
|
|
|
|
org.elasticsearch.index.seqno.RetentionLeaseNotFoundException::new,
|
|
|
|
154,
|
2019-05-21 15:53:28 -04:00
|
|
|
Version.V_6_7_0),
|
|
|
|
SHARD_NOT_IN_PRIMARY_MODE_EXCEPTION(
|
|
|
|
org.elasticsearch.index.shard.ShardNotInPrimaryModeException.class,
|
|
|
|
org.elasticsearch.index.shard.ShardNotInPrimaryModeException::new,
|
|
|
|
155,
|
2019-09-12 16:20:27 -04:00
|
|
|
Version.V_6_8_1),
|
|
|
|
RETENTION_LEASE_INVALID_RETAINING_SEQUENCE_NUMBER_EXCEPTION(
|
|
|
|
org.elasticsearch.index.seqno.RetentionLeaseInvalidRetainingSeqNoException.class,
|
|
|
|
org.elasticsearch.index.seqno.RetentionLeaseInvalidRetainingSeqNoException::new,
|
|
|
|
156,
|
Introduce dedicated ingest processor exception (#48810)
Today we wrap exceptions that occur while executing an ingest processor
in an ElasticsearchException. Today, in ExceptionsHelper#unwrapCause we
only unwrap causes for exceptions that implement
ElasticsearchWrapperException, which the top-level
ElasticsearchException does not. Ultimately, this means that any
exception that occurs during processor execution does not have its cause
unwrapped, and so its status is blanket treated as a 500. This means
that while executing a bulk request with an ingest pipeline,
document-level failures that occur during a processor will cause the
status for that document to be treated as 500. Since that does not give
the client any indication that they made a mistake, it means some
clients will enter infinite retries, thinking that there is some
server-side problem that merely needs to clear. This commit addresses
this by introducing a dedicated ingest processor exception, so that its
causes can be unwrapped. While we could consider a broader change to
unwrap causes for more than just ElasticsearchWrapperExceptions, that is
a broad change with unclear implications. Since the problem of reporting
500s on client errors is a user-facing bug, we take the conservative
approach for now, and we can revisit the unwrapping in a future change.
2019-11-14 09:53:08 -06:00
|
|
|
Version.V_7_5_0),
|
|
|
|
INGEST_PROCESSOR_EXCEPTION(
|
|
|
|
org.elasticsearch.ingest.IngestProcessorException.class,
|
|
|
|
org.elasticsearch.ingest.IngestProcessorException::new,
|
|
|
|
157,
|
2020-06-08 14:17:52 -06:00
|
|
|
Version.V_7_5_0),
|
|
|
|
PEER_RECOVERY_NOT_FOUND_EXCEPTION(
|
|
|
|
org.elasticsearch.indices.recovery.PeerRecoveryNotFound.class,
|
|
|
|
org.elasticsearch.indices.recovery.PeerRecoveryNotFound::new,
|
|
|
|
158,
|
2020-07-07 14:00:02 +01:00
|
|
|
Version.V_7_9_0),
|
|
|
|
NODE_HEALTH_CHECK_FAILURE_EXCEPTION(
|
|
|
|
org.elasticsearch.cluster.coordination.NodeHealthCheckFailureException.class,
|
|
|
|
org.elasticsearch.cluster.coordination.NodeHealthCheckFailureException::new,
|
|
|
|
159,
|
2020-10-05 21:43:08 -04:00
|
|
|
Version.V_7_9_0),
|
|
|
|
NO_SEED_NODE_LEFT_EXCEPTION(
|
|
|
|
org.elasticsearch.transport.NoSeedNodeLeftException.class,
|
|
|
|
org.elasticsearch.transport.NoSeedNodeLeftException::new,
|
|
|
|
160,
|
|
|
|
Version.V_7_10_0);
|
2016-11-17 14:30:21 +00:00
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
final Class<? extends OpenSearchException> exceptionClass;
|
|
|
|
final CheckedFunction<StreamInput, ? extends OpenSearchException, IOException> constructor;
|
2015-09-25 14:32:44 -04:00
|
|
|
final int id;
|
2016-11-21 12:51:06 +01:00
|
|
|
final Version versionAdded;
|
2015-09-25 14:32:44 -04:00
|
|
|
|
2021-03-03 14:27:14 -06:00
|
|
|
<E extends OpenSearchException> OpenSearchExceptionHandle(Class<E> exceptionClass,
|
|
|
|
CheckedFunction<StreamInput, E, IOException> constructor, int id,
|
|
|
|
Version versionAdded) {
|
2016-02-02 18:14:41 -05:00
|
|
|
// We need the exceptionClass because you can't dig it out of the constructor reliably.
|
2015-09-25 14:32:44 -04:00
|
|
|
this.exceptionClass = exceptionClass;
|
|
|
|
this.constructor = constructor;
|
2016-11-21 12:51:06 +01:00
|
|
|
this.versionAdded = versionAdded;
|
2015-09-25 14:32:44 -04:00
|
|
|
this.id = id;
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
2015-09-25 14:32:44 -04:00
|
|
|
}
|
|
|
|
|
2017-04-04 00:17:05 -04:00
|
|
|
/**
|
|
|
|
* Returns an array of all registered handle IDs. These are the IDs for every registered
|
|
|
|
* exception.
|
|
|
|
*
|
|
|
|
* @return an array of all registered handle IDs
|
|
|
|
*/
|
|
|
|
static int[] ids() {
|
2021-03-03 14:27:14 -06:00
|
|
|
return Arrays.stream(OpenSearchExceptionHandle.values()).mapToInt(h -> h.id).toArray();
|
2017-04-04 00:17:05 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns an array of all registered pairs of handle IDs and exception classes. These pairs are
|
|
|
|
* provided for every registered exception.
|
|
|
|
*
|
|
|
|
* @return an array of all registered pairs of handle IDs and exception classes
|
|
|
|
*/
|
2021-03-03 14:27:14 -06:00
|
|
|
static Tuple<Integer, Class<? extends OpenSearchException>>[] classes() {
|
2017-04-04 00:17:05 -04:00
|
|
|
@SuppressWarnings("unchecked")
|
2021-03-03 14:27:14 -06:00
|
|
|
final Tuple<Integer, Class<? extends OpenSearchException>>[] ts =
|
|
|
|
Arrays.stream(OpenSearchExceptionHandle.values())
|
2017-04-04 00:17:05 -04:00
|
|
|
.map(h -> Tuple.tuple(h.id, h.exceptionClass)).toArray(Tuple[]::new);
|
|
|
|
return ts;
|
|
|
|
}
|
|
|
|
|
2015-09-25 14:32:44 -04:00
|
|
|
static {
|
2016-02-02 18:14:41 -05:00
|
|
|
ID_TO_SUPPLIER = unmodifiableMap(Arrays
|
2021-03-03 14:27:14 -06:00
|
|
|
.stream(OpenSearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.id, e -> e.constructor)));
|
2016-02-02 18:14:41 -05:00
|
|
|
CLASS_TO_ELASTICSEARCH_EXCEPTION_HANDLE = unmodifiableMap(Arrays
|
2021-03-03 14:27:14 -06:00
|
|
|
.stream(OpenSearchExceptionHandle.values()).collect(Collectors.toMap(e -> e.exceptionClass, e -> e)));
|
2015-06-29 12:53:08 +02:00
|
|
|
}
|
|
|
|
|
2016-01-24 22:47:38 +01:00
|
|
|
public Index getIndex() {
|
2017-01-24 16:12:45 +01:00
|
|
|
List<String> index = getMetadata(INDEX_METADATA_KEY);
|
2015-07-03 15:40:09 +02:00
|
|
|
if (index != null && index.isEmpty() == false) {
|
2017-01-24 16:12:45 +01:00
|
|
|
List<String> index_uuid = getMetadata(INDEX_METADATA_KEY_UUID);
|
2016-01-24 22:47:38 +01:00
|
|
|
return new Index(index.get(0), index_uuid.get(0));
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
public ShardId getShardId() {
|
2017-01-24 16:12:45 +01:00
|
|
|
List<String> shard = getMetadata(SHARD_METADATA_KEY);
|
2015-07-03 15:40:09 +02:00
|
|
|
if (shard != null && shard.isEmpty() == false) {
|
|
|
|
return new ShardId(getIndex(), Integer.parseInt(shard.get(0)));
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
public void setIndex(Index index) {
|
|
|
|
if (index != null) {
|
2017-01-24 16:12:45 +01:00
|
|
|
addMetadata(INDEX_METADATA_KEY, index.getName());
|
|
|
|
addMetadata(INDEX_METADATA_KEY_UUID, index.getUUID());
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void setIndex(String index) {
|
|
|
|
if (index != null) {
|
2016-01-24 22:47:38 +01:00
|
|
|
setIndex(new Index(index, INDEX_UUID_NA_VALUE));
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void setShard(ShardId shardId) {
|
|
|
|
if (shardId != null) {
|
2016-01-24 22:47:38 +01:00
|
|
|
setIndex(shardId.getIndex());
|
2017-01-24 16:12:45 +01:00
|
|
|
addMetadata(SHARD_METADATA_KEY, Integer.toString(shardId.id()));
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public void setResources(String type, String... id) {
|
|
|
|
assert type != null;
|
2017-01-24 16:12:45 +01:00
|
|
|
addMetadata(RESOURCE_METADATA_ID_KEY, id);
|
|
|
|
addMetadata(RESOURCE_METADATA_TYPE_KEY, type);
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public List<String> getResourceId() {
|
2017-01-24 16:12:45 +01:00
|
|
|
return getMetadata(RESOURCE_METADATA_ID_KEY);
|
2015-07-03 15:40:09 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public String getResourceType() {
|
2017-01-24 16:12:45 +01:00
|
|
|
List<String> header = getMetadata(RESOURCE_METADATA_TYPE_KEY);
|
2015-07-03 15:40:09 +02:00
|
|
|
if (header != null && header.isEmpty() == false) {
|
|
|
|
assert header.size() == 1;
|
|
|
|
return header.get(0);
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
2015-07-14 17:27:58 +02:00
|
|
|
|
2016-04-22 09:12:53 -07:00
|
|
|
// lower cases and adds underscores to transitions in a name
|
|
|
|
private static String toUnderscoreCase(String value) {
|
|
|
|
StringBuilder sb = new StringBuilder();
|
|
|
|
boolean changed = false;
|
|
|
|
for (int i = 0; i < value.length(); i++) {
|
|
|
|
char c = value.charAt(i);
|
|
|
|
if (Character.isUpperCase(c)) {
|
|
|
|
if (!changed) {
|
|
|
|
// copy it over here
|
|
|
|
for (int j = 0; j < i; j++) {
|
|
|
|
sb.append(value.charAt(j));
|
|
|
|
}
|
|
|
|
changed = true;
|
|
|
|
if (i == 0) {
|
|
|
|
sb.append(Character.toLowerCase(c));
|
|
|
|
} else {
|
|
|
|
sb.append('_');
|
|
|
|
sb.append(Character.toLowerCase(c));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
sb.append('_');
|
|
|
|
sb.append(Character.toLowerCase(c));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (changed) {
|
|
|
|
sb.append(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!changed) {
|
|
|
|
return value;
|
|
|
|
}
|
|
|
|
return sb.toString();
|
|
|
|
}
|
2016-11-23 15:49:05 -05:00
|
|
|
|
2010-02-08 15:30:06 +02:00
|
|
|
}
|