Prevent losing stacktraces when exceptions occur
This commit removes unnecesssary use of ExceptionHelpers where we actually should serialize / deserialize the actual exception. This commit also fixes one of the oddest problems where the actual exception was never rendered / printed if `all shards failed` due to a missing cause. This commit unfortunately doesn't fix Snapshot/Restore which is almost unfixable since it has to serialize XContent and read from it which can't transport exceptions.
This commit is contained in:
parent
c6da8d5e13
commit
bfa984e1c2
|
@ -291,7 +291,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
/**
|
||||
* Renders a cause exception as xcontent
|
||||
*/
|
||||
protected final void causeToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
final Throwable cause = getCause();
|
||||
if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) {
|
||||
builder.field("caused_by");
|
||||
|
|
|
@ -251,7 +251,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
|
|||
}
|
||||
|
||||
if (item.failed()) {
|
||||
shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, item.error().string()));
|
||||
shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, item.error()));
|
||||
} else {
|
||||
shardResults.set(shardId.id(), item.response());
|
||||
}
|
||||
|
|
|
@ -103,7 +103,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
|||
throw (ElasticsearchException) t;
|
||||
} else {
|
||||
logger.debug("{} failed to multi percolate", t, request.shardId());
|
||||
responseItem = new Response.Item(slot, new StringText(ExceptionsHelper.detailedMessage(t)));
|
||||
responseItem = new Response.Item(slot, t);
|
||||
}
|
||||
}
|
||||
response.items.add(responseItem);
|
||||
|
@ -231,7 +231,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
|||
item.response.writeTo(out);
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
out.writeText(item.error);
|
||||
out.writeThrowable(item.error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -248,7 +248,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
|||
shardResponse.readFrom(in);
|
||||
items.add(new Item(slot, shardResponse));
|
||||
} else {
|
||||
items.add(new Item(slot, in.readText()));
|
||||
items.add(new Item(slot, (Throwable)in.readThrowable()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -257,7 +257,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
|||
|
||||
private final int slot;
|
||||
private final PercolateShardResponse response;
|
||||
private final Text error;
|
||||
private final Throwable error;
|
||||
|
||||
public Item(Integer slot, PercolateShardResponse response) {
|
||||
this.slot = slot;
|
||||
|
@ -265,7 +265,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
|||
this.error = null;
|
||||
}
|
||||
|
||||
public Item(Integer slot, Text error) {
|
||||
public Item(Integer slot, Throwable error) {
|
||||
this.slot = slot;
|
||||
this.error = error;
|
||||
this.response = null;
|
||||
|
@ -279,7 +279,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
|
|||
return response;
|
||||
}
|
||||
|
||||
public Text error() {
|
||||
public Throwable error() {
|
||||
return error;
|
||||
}
|
||||
|
||||
|
|
|
@ -92,6 +92,17 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
|||
return shardFailures;
|
||||
}
|
||||
|
||||
public Throwable getCause() {
|
||||
Throwable cause = super.getCause();
|
||||
if (cause == null) {
|
||||
// fall back to guessed root cause
|
||||
for (ElasticsearchException rootCause : guessRootCauses()) {
|
||||
return rootCause;
|
||||
}
|
||||
}
|
||||
return cause;
|
||||
}
|
||||
|
||||
private static String buildMessage(String phaseName, String msg, ShardSearchFailure[] shardFailures) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("Failed to execute phase [").append(phaseName).append("], ").append(msg);
|
||||
|
@ -123,7 +134,14 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
|
|||
}
|
||||
builder.endArray();
|
||||
super.innerToXContent(builder, params);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (super.getCause() != null) {
|
||||
// if the cause is null we inject a guessed root cause that will then be rendered twice so wi disable it manually
|
||||
super.causeToXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.termvectors;
|
||||
|
||||
import com.google.common.collect.Iterators;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -40,17 +41,17 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
private String index;
|
||||
private String type;
|
||||
private String id;
|
||||
private String message;
|
||||
private Throwable cause;
|
||||
|
||||
Failure() {
|
||||
|
||||
}
|
||||
|
||||
public Failure(String index, String type, String id, String message) {
|
||||
public Failure(String index, String type, String id, Throwable cause) {
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.message = message;
|
||||
this.cause = cause;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -75,10 +76,10 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
}
|
||||
|
||||
/**
|
||||
* The failure message.
|
||||
* The failure cause.
|
||||
*/
|
||||
public String getMessage() {
|
||||
return this.message;
|
||||
public Throwable getCause() {
|
||||
return this.cause;
|
||||
}
|
||||
|
||||
public static Failure readFailure(StreamInput in) throws IOException {
|
||||
|
@ -92,7 +93,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
index = in.readString();
|
||||
type = in.readOptionalString();
|
||||
id = in.readString();
|
||||
message = in.readString();
|
||||
cause = in.readThrowable();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -100,7 +101,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
out.writeString(index);
|
||||
out.writeOptionalString(type);
|
||||
out.writeString(id);
|
||||
out.writeString(message);
|
||||
out.writeThrowable(cause);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,7 +133,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
builder.field(Fields._INDEX, failure.getIndex());
|
||||
builder.field(Fields._TYPE, failure.getType());
|
||||
builder.field(Fields._ID, failure.getId());
|
||||
builder.field(Fields.ERROR, failure.getMessage());
|
||||
ElasticsearchException.renderThrowable(builder, params, failure.getCause());
|
||||
builder.endObject();
|
||||
} else {
|
||||
TermVectorsResponse getResponse = response.getResponse();
|
||||
|
@ -150,7 +151,6 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
|
|||
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
|
||||
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
|
||||
static final XContentBuilderString _ID = new XContentBuilderString("_id");
|
||||
static final XContentBuilderString ERROR = new XContentBuilderString("error");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
@ -69,13 +70,13 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
|
|||
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.routing(), termVectorsRequest.index()));
|
||||
if (!clusterState.metaData().hasConcreteIndex(termVectorsRequest.index())) {
|
||||
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorsRequest.index(),
|
||||
termVectorsRequest.type(), termVectorsRequest.id(), "[" + termVectorsRequest.index() + "] missing")));
|
||||
termVectorsRequest.type(), termVectorsRequest.id(), new IndexNotFoundException(termVectorsRequest.index()))));
|
||||
continue;
|
||||
}
|
||||
String concreteSingleIndex = indexNameExpressionResolver.concreteSingleIndex(clusterState, (DocumentRequest) termVectorsRequest);
|
||||
if (termVectorsRequest.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, termVectorsRequest.type())) {
|
||||
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(concreteSingleIndex, termVectorsRequest.type(), termVectorsRequest.id(),
|
||||
"routing is required for [" + concreteSingleIndex + "]/[" + termVectorsRequest.type() + "]/[" + termVectorsRequest.id() + "]")));
|
||||
new IllegalArgumentException("routing is required for [" + concreteSingleIndex + "]/[" + termVectorsRequest.type() + "]/[" + termVectorsRequest.id() + "]"))));
|
||||
continue;
|
||||
}
|
||||
ShardId shardId = clusterService.operationRouting().getShards(clusterState, concreteSingleIndex,
|
||||
|
@ -111,12 +112,11 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
|
|||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
// create failures for all relevant requests
|
||||
String message = ExceptionsHelper.detailedMessage(e);
|
||||
for (int i = 0; i < shardRequest.locations.size(); i++) {
|
||||
TermVectorsRequest termVectorsRequest = shardRequest.requests.get(i);
|
||||
responses.set(shardRequest.locations.get(i), new MultiTermVectorsItemResponse(null,
|
||||
new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorsRequest.type(),
|
||||
termVectorsRequest.id(), message)));
|
||||
termVectorsRequest.id(), e)));
|
||||
}
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
finishHim();
|
||||
|
|
|
@ -90,7 +90,7 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
|
|||
} else {
|
||||
logger.debug("{} failed to execute multi term vectors for [{}]/[{}]", t, shardId, termVectorsRequest.type(), termVectorsRequest.id());
|
||||
response.add(request.locations.get(i),
|
||||
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), ExceptionsHelper.detailedMessage(t)));
|
||||
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,9 +28,11 @@ import org.elasticsearch.ElasticsearchException;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -83,11 +85,12 @@ import java.util.zip.Checksum;
|
|||
*/
|
||||
public class Store extends AbstractIndexShardComponent implements Closeable, RefCounted {
|
||||
|
||||
private static final String CODEC = "store";
|
||||
private static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0
|
||||
private static final int VERSION_START = 0;
|
||||
private static final int VERSION = VERSION_STACK_TRACE;
|
||||
private static final String CORRUPTED = "corrupted_";
|
||||
static final String CODEC = "store";
|
||||
static final int VERSION_WRITE_THROWABLE= 2; // we write throwable since 2.0
|
||||
static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0
|
||||
static final int VERSION_START = 0;
|
||||
static final int VERSION = VERSION_WRITE_THROWABLE;
|
||||
static final String CORRUPTED = "corrupted_";
|
||||
public static final String INDEX_STORE_STATS_REFRESH_INTERVAL = "index.store.stats_refresh_interval";
|
||||
|
||||
private final AtomicBoolean isClosed = new AtomicBoolean(false);
|
||||
|
@ -562,6 +565,20 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
if (file.startsWith(CORRUPTED)) {
|
||||
try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) {
|
||||
int version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION);
|
||||
|
||||
if (version == VERSION_WRITE_THROWABLE) {
|
||||
final int size = input.readVInt();
|
||||
final byte[] buffer = new byte[size];
|
||||
input.readBytes(buffer, 0, buffer.length);
|
||||
StreamInput in = StreamInput.wrap(buffer);
|
||||
Throwable t = in.readThrowable();
|
||||
if (t instanceof CorruptIndexException) {
|
||||
ex.add((CorruptIndexException) t);
|
||||
} else {
|
||||
ex.add(new CorruptIndexException(t.getMessage(), "preexisting_corruption", t));
|
||||
}
|
||||
} else {
|
||||
assert version == VERSION_START || version == VERSION_STACK_TRACE;
|
||||
String msg = input.readString();
|
||||
StringBuilder builder = new StringBuilder(shardId.toString());
|
||||
builder.append(" Preexisting corrupted index [");
|
||||
|
@ -572,6 +589,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
builder.append(input.readString());
|
||||
}
|
||||
ex.add(new CorruptIndexException(builder.toString(), "preexisting_corruption"));
|
||||
}
|
||||
CodecUtil.checkFooter(input);
|
||||
}
|
||||
}
|
||||
|
@ -1446,8 +1464,11 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
|||
String uuid = CORRUPTED + Strings.randomBase64UUID();
|
||||
try (IndexOutput output = this.directory().createOutput(uuid, IOContext.DEFAULT)) {
|
||||
CodecUtil.writeHeader(output, CODEC, VERSION);
|
||||
output.writeString(ExceptionsHelper.detailedMessage(exception, true, 0)); // handles null exception
|
||||
output.writeString(ExceptionsHelper.stackTrace(exception));
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
out.writeThrowable(exception);
|
||||
BytesReference bytes = out.bytes();
|
||||
output.writeVInt(bytes.length());
|
||||
output.writeBytes(bytes.array(), bytes.arrayOffset(), bytes.length());
|
||||
CodecUtil.writeFooter(output);
|
||||
} catch (IOException ex) {
|
||||
logger.warn("Can't mark store as corrupted", ex);
|
||||
|
|
|
@ -31,13 +31,13 @@ public class VerificationFailure implements Streamable {
|
|||
|
||||
private String nodeId;
|
||||
|
||||
private String cause;
|
||||
private Throwable cause;
|
||||
|
||||
VerificationFailure() {
|
||||
|
||||
}
|
||||
|
||||
public VerificationFailure(String nodeId, String cause) {
|
||||
public VerificationFailure(String nodeId, Throwable cause) {
|
||||
this.nodeId = nodeId;
|
||||
this.cause = cause;
|
||||
}
|
||||
|
@ -46,20 +46,20 @@ public class VerificationFailure implements Streamable {
|
|||
return nodeId;
|
||||
}
|
||||
|
||||
public String cause() {
|
||||
public Throwable cause() {
|
||||
return cause;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
nodeId = in.readOptionalString();
|
||||
cause = in.readOptionalString();
|
||||
cause = in.readThrowable();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalString(nodeId);
|
||||
out.writeOptionalString(cause);
|
||||
out.writeThrowable(cause);
|
||||
}
|
||||
|
||||
public static VerificationFailure readNode(StreamInput in) throws IOException {
|
||||
|
|
|
@ -86,7 +86,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent {
|
|||
doVerify(repository, verificationToken);
|
||||
} catch (Throwable t) {
|
||||
logger.warn("[{}] failed to verify repository", t, repository);
|
||||
errors.add(new VerificationFailure(node.id(), ExceptionsHelper.detailedMessage(t)));
|
||||
errors.add(new VerificationFailure(node.id(), t));
|
||||
}
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
finishVerification(listener, nodes, errors);
|
||||
|
@ -102,7 +102,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent {
|
|||
|
||||
@Override
|
||||
public void handleException(TransportException exp) {
|
||||
errors.add(new VerificationFailure(node.id(), ExceptionsHelper.detailedMessage(exp)));
|
||||
errors.add(new VerificationFailure(node.id(), exp));
|
||||
if (counter.decrementAndGet() == 0) {
|
||||
finishVerification(listener, nodes, errors);
|
||||
}
|
||||
|
|
|
@ -30,8 +30,8 @@ import java.io.IOException;
|
|||
* correctly serialized between nodes.
|
||||
*/
|
||||
public class GroovyScriptCompilationException extends ElasticsearchException {
|
||||
public GroovyScriptCompilationException(String message) {
|
||||
super(message);
|
||||
public GroovyScriptCompilationException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public GroovyScriptCompilationException(StreamInput in) throws IOException{
|
||||
|
|
|
@ -116,7 +116,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("exception compiling Groovy script:", e);
|
||||
}
|
||||
throw new GroovyScriptCompilationException(ExceptionsHelper.detailedMessage(e));
|
||||
throw new GroovyScriptCompilationException("failed to compile groovy script", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -250,7 +250,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
|||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("failed to run " + compiledScript, e);
|
||||
}
|
||||
throw new GroovyScriptExecutionException("failed to run " + compiledScript + ": " + ExceptionsHelper.detailedMessage(e));
|
||||
throw new GroovyScriptExecutionException("failed to run " + compiledScript, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,8 +30,9 @@ import java.io.IOException;
|
|||
* correctly serialized between nodes.
|
||||
*/
|
||||
public class GroovyScriptExecutionException extends ElasticsearchException {
|
||||
public GroovyScriptExecutionException(String message) {
|
||||
super(message);
|
||||
|
||||
public GroovyScriptExecutionException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.apache.lucene.index.TermsEnum;
|
|||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -60,7 +62,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
assertTrue(item.isFailed());
|
||||
continue;
|
||||
} else if (item.isFailed()) {
|
||||
fail(item.getFailure().getMessage());
|
||||
fail(item.getFailure().getCause().getMessage());
|
||||
}
|
||||
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
|
||||
validateResponse(item.getResponse(), luceneTermVectors, test);
|
||||
|
@ -78,7 +80,8 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
mtvBuilder.add(requestBuilder.request());
|
||||
MultiTermVectorsResponse response = mtvBuilder.execute().actionGet();
|
||||
assertThat(response.getResponses().length, equalTo(1));
|
||||
assertThat(response.getResponses()[0].getFailure().getMessage(), equalTo("[" + response.getResponses()[0].getIndex() + "] missing"));
|
||||
assertThat(response.getResponses()[0].getFailure().getCause(), instanceOf(IndexNotFoundException.class));
|
||||
assertThat(response.getResponses()[0].getFailure().getCause().getMessage(), equalTo("no such index"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -115,7 +118,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
checkTermTexts(response.getResponses()[1].getResponse().getFields().terms("field"), new String[]{"value1"});
|
||||
assertThat(response.getResponses()[2].getFailure(), notNullValue());
|
||||
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
|
||||
assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("VersionConflictEngineException"));
|
||||
assertThat(response.getResponses()[2].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||
|
||||
//Version from Lucene index
|
||||
refresh();
|
||||
|
@ -136,7 +139,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
checkTermTexts(response.getResponses()[1].getResponse().getFields().terms("field"), new String[]{"value1"});
|
||||
assertThat(response.getResponses()[2].getFailure(), notNullValue());
|
||||
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
|
||||
assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("VersionConflictEngineException"));
|
||||
assertThat(response.getResponses()[2].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
|
@ -159,7 +162,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
assertThat(response.getResponses()[1].getFailure(), notNullValue());
|
||||
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
|
||||
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
|
||||
assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("VersionConflictEngineException"));
|
||||
assertThat(response.getResponses()[1].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||
assertThat(response.getResponses()[2].getId(), equalTo("2"));
|
||||
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
|
||||
assertThat(response.getResponses()[2].getFailure(), nullValue());
|
||||
|
@ -184,7 +187,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
|
|||
assertThat(response.getResponses()[1].getFailure(), notNullValue());
|
||||
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
|
||||
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
|
||||
assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("VersionConflictEngineException"));
|
||||
assertThat(response.getResponses()[1].getFailure().getCause(), instanceOf(VersionConflictEngineException.class));
|
||||
assertThat(response.getResponses()[2].getId(), equalTo("2"));
|
||||
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
|
||||
assertThat(response.getResponses()[2].getFailure(), nullValue());
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.lucene.util.IOUtils;
|
|||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.Version;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
|
||||
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
|
||||
import org.elasticsearch.common.lucene.Lucene;
|
||||
|
@ -1267,4 +1268,116 @@ public class StoreTests extends ESTestCase {
|
|||
assertFalse(Store.canOpenIndex(logger, tempDir));
|
||||
store.close();
|
||||
}
|
||||
|
||||
public void testDeserializeCorruptionException() throws IOException {
|
||||
final ShardId shardId = new ShardId(new Index("index"), 1);
|
||||
final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA
|
||||
DirectoryService directoryService = new DirectoryService(shardId, Settings.EMPTY) {
|
||||
@Override
|
||||
public long throttleTimeInNanos() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Directory newDirectory() throws IOException {
|
||||
return dir;
|
||||
}
|
||||
};
|
||||
Store store = new Store(shardId, Settings.EMPTY, directoryService, new DummyShardLock(shardId));
|
||||
CorruptIndexException ex = new CorruptIndexException("foo", "bar");
|
||||
store.markStoreCorrupted(ex);
|
||||
try {
|
||||
store.failIfCorrupted();
|
||||
fail("should be corrupted");
|
||||
} catch (CorruptIndexException e) {
|
||||
assertEquals(ex.getMessage(), e.getMessage());
|
||||
assertEquals(ex.toString(), e.toString());
|
||||
assertEquals(ExceptionsHelper.stackTrace(ex), ExceptionsHelper.stackTrace(e));
|
||||
}
|
||||
|
||||
store.removeCorruptionMarker();
|
||||
assertFalse(store.isMarkedCorrupted());
|
||||
FileNotFoundException ioe = new FileNotFoundException("foobar");
|
||||
store.markStoreCorrupted(ioe);
|
||||
try {
|
||||
store.failIfCorrupted();
|
||||
fail("should be corrupted");
|
||||
} catch (CorruptIndexException e) {
|
||||
assertEquals("foobar (resource=preexisting_corruption)", e.getMessage());
|
||||
assertEquals(ExceptionsHelper.stackTrace(ioe), ExceptionsHelper.stackTrace(e.getCause()));
|
||||
}
|
||||
store.close();
|
||||
}
|
||||
|
||||
public void testCanReadOldCorruptionMarker() throws IOException {
|
||||
final ShardId shardId = new ShardId(new Index("index"), 1);
|
||||
final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA
|
||||
DirectoryService directoryService = new DirectoryService(shardId, Settings.EMPTY) {
|
||||
@Override
|
||||
public long throttleTimeInNanos() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Directory newDirectory() throws IOException {
|
||||
return dir;
|
||||
}
|
||||
};
|
||||
Store store = new Store(shardId, Settings.EMPTY, directoryService, new DummyShardLock(shardId));
|
||||
|
||||
CorruptIndexException exception = new CorruptIndexException("foo", "bar");
|
||||
String uuid = Store.CORRUPTED + Strings.randomBase64UUID();
|
||||
try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) {
|
||||
CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_STACK_TRACE);
|
||||
output.writeString(ExceptionsHelper.detailedMessage(exception, true, 0));
|
||||
output.writeString(ExceptionsHelper.stackTrace(exception));
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
try {
|
||||
store.failIfCorrupted();
|
||||
fail("should be corrupted");
|
||||
} catch (CorruptIndexException e) {
|
||||
assertTrue(e.getMessage().startsWith("[index][1] Preexisting corrupted index [" + uuid +"] caused by: CorruptIndexException[foo (resource=bar)]"));
|
||||
assertTrue(e.getMessage().contains(ExceptionsHelper.stackTrace(exception)));
|
||||
}
|
||||
|
||||
store.removeCorruptionMarker();
|
||||
|
||||
try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) {
|
||||
CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_START);
|
||||
output.writeString(ExceptionsHelper.detailedMessage(exception, true, 0));
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
try {
|
||||
store.failIfCorrupted();
|
||||
fail("should be corrupted");
|
||||
} catch (CorruptIndexException e) {
|
||||
assertTrue(e.getMessage().startsWith("[index][1] Preexisting corrupted index [" + uuid + "] caused by: CorruptIndexException[foo (resource=bar)]"));
|
||||
assertFalse(e.getMessage().contains(ExceptionsHelper.stackTrace(exception)));
|
||||
}
|
||||
|
||||
store.removeCorruptionMarker();
|
||||
|
||||
try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) {
|
||||
CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION_START - 1); // corrupted header
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
try {
|
||||
store.failIfCorrupted();
|
||||
fail("should be too old");
|
||||
} catch (IndexFormatTooOldException e) {
|
||||
}
|
||||
|
||||
store.removeCorruptionMarker();
|
||||
try (IndexOutput output = dir.createOutput(uuid, IOContext.DEFAULT)) {
|
||||
CodecUtil.writeHeader(output, Store.CODEC, Store.VERSION+1); // corrupted header
|
||||
CodecUtil.writeFooter(output);
|
||||
}
|
||||
try {
|
||||
store.failIfCorrupted();
|
||||
fail("should be too new");
|
||||
} catch (IndexFormatTooNewException e) {
|
||||
}
|
||||
store.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -312,7 +312,7 @@ public class MultiPercolatorIT extends ESIntegTestCase {
|
|||
assertThat(item.getResponse().getShardFailures().length, equalTo(test.numPrimaries));
|
||||
for (ShardOperationFailedException shardFailure : item.getResponse().getShardFailures()) {
|
||||
assertThat(shardFailure.reason(), containsString("Failed to derive xcontent"));
|
||||
assertThat(shardFailure.status().getStatus(), equalTo(500));
|
||||
assertThat(shardFailure.status().getStatus(), equalTo(400));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.rest;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -150,6 +151,8 @@ public class BytesRestResponseTests extends ESTestCase {
|
|||
String text = response.content().toUtf8();
|
||||
String expected = "{\"error\":{\"root_cause\":[{\"type\":\"test_query_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\"}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"test_query_parsing_exception\",\"reason\":\"foobar\",\"index\":\"foo\"}}]},\"status\":400}";
|
||||
assertEquals(expected.trim(), text.trim());
|
||||
String stackTrace = ExceptionsHelper.stackTrace(ex);
|
||||
assertTrue(stackTrace.contains("Caused by: [foo] TestQueryParsingException[foobar]"));
|
||||
}
|
||||
|
||||
public static class WithHeadersException extends ElasticsearchException {
|
||||
|
|
|
@ -449,12 +449,12 @@ public class SimpleRoutingIT extends ESIntegTestCase {
|
|||
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(true));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[0].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[0].getFailure().getCause().getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[0].getResponse(), nullValue());
|
||||
assertThat(multiTermVectorsResponse.getResponses()[1].getId(), equalTo("2"));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[1].isFailed(), equalTo(true));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse(),nullValue());
|
||||
assertThat(multiTermVectorsResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[2]"));
|
||||
assertThat(multiTermVectorsResponse.getResponses()[1].getFailure().getCause().getMessage(), equalTo("routing is required for [test]/[type1]/[2]"));
|
||||
}
|
||||
|
||||
private static String indexOrAlias() {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
|||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.groovy.GroovyScriptExecutionException;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -32,6 +33,7 @@ import java.util.Locale;
|
|||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
||||
/**
|
||||
* Tests for the Groovy security permissions
|
||||
|
@ -126,9 +128,10 @@ public class GroovySecurityIT extends ESIntegTestCase {
|
|||
// TODO: GroovyScriptExecutionException needs work:
|
||||
// fix it to preserve cause so we don't do this flaky string-check stuff
|
||||
for (ShardSearchFailure fail : fails) {
|
||||
assertThat(fail.getCause(), instanceOf(GroovyScriptExecutionException.class));
|
||||
assertTrue("unexpected exception" + fail.getCause(),
|
||||
// different casing, depending on jvm impl...
|
||||
fail.getCause().toString().toLowerCase(Locale.ROOT).contains("accesscontrolexception[access denied"));
|
||||
fail.getCause().toString().toLowerCase(Locale.ROOT).contains("[access denied"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.cloud.aws.network;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cloud.aws.AwsEc2Service;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.network.NetworkService.CustomNameResolver;
|
||||
|
@ -113,9 +112,9 @@ public class Ec2NameResolver extends AbstractComponent implements CustomNameReso
|
|||
return new InetAddress[] { InetAddress.getByName(metadataResult) };
|
||||
} catch (IOException e) {
|
||||
if (warnOnFailure) {
|
||||
logger.warn("failed to get metadata for [" + type.configName + "]: " + ExceptionsHelper.detailedMessage(e));
|
||||
logger.warn("failed to get metadata for [" + type.configName + "]", e);
|
||||
} else {
|
||||
logger.debug("failed to get metadata for [" + type.configName + "]: " + ExceptionsHelper.detailedMessage(e));
|
||||
logger.debug("failed to get metadata for [" + type.configName + "]", e);
|
||||
}
|
||||
return null;
|
||||
} finally {
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.cloud.aws.node;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cloud.aws.AwsEc2Service;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodeService;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
|
@ -68,7 +67,7 @@ public class Ec2CustomNodeAttributes extends AbstractComponent implements Discov
|
|||
}
|
||||
ec2Attributes.put("aws_availability_zone", metadataResult);
|
||||
} catch (IOException e) {
|
||||
logger.debug("failed to get metadata for [placement/availability-zone]: " + ExceptionsHelper.detailedMessage(e));
|
||||
logger.debug("failed to get metadata for [placement/availability-zone]", e);
|
||||
} finally {
|
||||
IOUtils.closeWhileHandlingException(in);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue