Merge branch 'master' into feature/query-refactoring

Conflicts:
    core/src/main/java/org/elasticsearch/common/unit/Fuzziness.java
    core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java
    core/src/main/java/org/elasticsearch/index/query/IdsQueryParser.java
    core/src/main/java/org/elasticsearch/index/query/functionscore/random/RandomScoreFunctionParser.java
    core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java
This commit is contained in:
Christoph Büscher 2015-09-16 12:41:54 +02:00
commit ff74e94260
206 changed files with 1775 additions and 947 deletions

View File

@ -18,8 +18,6 @@
*/ */
package org.apache.lucene.queries; package org.apache.lucene.queries;
import com.google.common.primitives.Ints;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
@ -141,7 +139,7 @@ public abstract class BlendedTermQuery extends Query {
} }
@Override @Override
protected int compare(int i, int j) { protected int compare(int i, int j) {
return Ints.compare(contexts[tieBreak[j]].docFreq(), contexts[tieBreak[i]].docFreq()); return Integer.compare(contexts[tieBreak[j]].docFreq(), contexts[tieBreak[i]].docFreq());
} }
}.sort(0, tieBreak.length); }.sort(0, tieBreak.length);
int prev = contexts[tieBreak[0]].docFreq(); int prev = contexts[tieBreak[0]].docFreq();

View File

@ -30,7 +30,7 @@ public final class XGeoUtils {
public static final short BITS = 31; public static final short BITS = 31;
private static final double LON_SCALE = (0x1L<<BITS)/360.0D; private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D; private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
public static final double TOLERANCE = 1E-6; public static final double TOLERANCE = 1E-5;
/** Minimum longitude value. */ /** Minimum longitude value. */
public static final double MIN_LON_INCL = -180.0D; public static final double MIN_LON_INCL = -180.0D;

View File

@ -291,7 +291,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
/** /**
* Renders a cause exception as xcontent * Renders a cause exception as xcontent
*/ */
protected final void causeToXContent(XContentBuilder builder, Params params) throws IOException { protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
final Throwable cause = getCause(); final Throwable cause = getCause();
if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) { if (cause != null && params.paramAsBoolean(REST_EXCEPTION_SKIP_CAUSE, REST_EXCEPTION_SKIP_CAUSE_DEFAULT) == false) {
builder.field("caused_by"); builder.field("caused_by");

View File

@ -19,9 +19,9 @@
package org.elasticsearch.action.admin.cluster.repositories.get; package org.elasticsearch.action.admin.cluster.repositories.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.util.ArrayUtils;
/** /**
* Get repository request builder * Get repository request builder
@ -60,7 +60,7 @@ public class GetRepositoriesRequestBuilder extends MasterNodeReadOperationReques
* @return builder * @return builder
*/ */
public GetRepositoriesRequestBuilder addRepositories(String... repositories) { public GetRepositoriesRequestBuilder addRepositories(String... repositories) {
request.repositories(ObjectArrays.concat(request.repositories(), repositories, String.class)); request.repositories(ArrayUtils.concat(request.repositories(), repositories));
return this; return this;
} }
} }

View File

@ -19,9 +19,9 @@
package org.elasticsearch.action.admin.cluster.snapshots.get; package org.elasticsearch.action.admin.cluster.snapshots.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.util.ArrayUtils;
/** /**
* Get snapshots request builder * Get snapshots request builder
@ -81,7 +81,7 @@ public class GetSnapshotsRequestBuilder extends MasterNodeOperationRequestBuilde
* @return this builder * @return this builder
*/ */
public GetSnapshotsRequestBuilder addSnapshots(String... snapshots) { public GetSnapshotsRequestBuilder addSnapshots(String... snapshots) {
request.snapshots(ObjectArrays.concat(request.snapshots(), snapshots, String.class)); request.snapshots(ArrayUtils.concat(request.snapshots(), snapshots));
return this; return this;
} }
} }

View File

@ -19,10 +19,9 @@
package org.elasticsearch.action.admin.cluster.snapshots.status; package org.elasticsearch.action.admin.cluster.snapshots.status;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.util.ArrayUtils;
/** /**
* Snapshots status request builder * Snapshots status request builder
@ -72,7 +71,7 @@ public class SnapshotsStatusRequestBuilder extends MasterNodeOperationRequestBui
* @return this builder * @return this builder
*/ */
public SnapshotsStatusRequestBuilder addSnapshots(String... snapshots) { public SnapshotsStatusRequestBuilder addSnapshots(String... snapshots) {
request.snapshots(ObjectArrays.concat(request.snapshots(), snapshots, String.class)); request.snapshots(ArrayUtils.concat(request.snapshots(), snapshots));
return this; return this;
} }
} }

View File

@ -19,13 +19,12 @@
package org.elasticsearch.action.admin.indices.alias.get; package org.elasticsearch.action.admin.indices.alias.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.util.ArrayUtils;
/** /**
*/ */
@ -43,7 +42,7 @@ public abstract class BaseAliasesRequestBuilder<Response extends ActionResponse,
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Builder addAliases(String... aliases) { public Builder addAliases(String... aliases) {
request.aliases(ObjectArrays.concat(request.aliases(), aliases, String.class)); request.aliases(ArrayUtils.concat(request.aliases(), aliases));
return (Builder) this; return (Builder) this;
} }
@ -55,7 +54,7 @@ public abstract class BaseAliasesRequestBuilder<Response extends ActionResponse,
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Builder addIndices(String... indices) { public Builder addIndices(String... indices) {
request.indices(ObjectArrays.concat(request.indices(), indices, String.class)); request.indices(ArrayUtils.concat(request.indices(), indices));
return (Builder) this; return (Builder) this;
} }

View File

@ -19,12 +19,11 @@
package org.elasticsearch.action.admin.indices.get; package org.elasticsearch.action.admin.indices.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.info.ClusterInfoRequest; import org.elasticsearch.action.support.master.info.ClusterInfoRequest;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.ArrayUtils;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -115,7 +114,7 @@ public class GetIndexRequest extends ClusterInfoRequest<GetIndexRequest> {
if (this.features == DEFAULT_FEATURES) { if (this.features == DEFAULT_FEATURES) {
return features(features); return features(features);
} else { } else {
return features(ObjectArrays.concat(featuresAsEnums(), features, Feature.class)); return features(ArrayUtils.concat(features(), features, Feature.class));
} }
} }

View File

@ -19,12 +19,10 @@
package org.elasticsearch.action.admin.indices.mapping.get; package org.elasticsearch.action.admin.indices.mapping.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.util.ArrayUtils;
/** A helper class to build {@link GetFieldMappingsRequest} objects */ /** A helper class to build {@link GetFieldMappingsRequest} objects */
public class GetFieldMappingsRequestBuilder extends ActionRequestBuilder<GetFieldMappingsRequest, GetFieldMappingsResponse, GetFieldMappingsRequestBuilder> { public class GetFieldMappingsRequestBuilder extends ActionRequestBuilder<GetFieldMappingsRequest, GetFieldMappingsResponse, GetFieldMappingsRequestBuilder> {
@ -39,7 +37,7 @@ public class GetFieldMappingsRequestBuilder extends ActionRequestBuilder<GetFiel
} }
public GetFieldMappingsRequestBuilder addIndices(String... indices) { public GetFieldMappingsRequestBuilder addIndices(String... indices) {
request.indices(ObjectArrays.concat(request.indices(), indices, String.class)); request.indices(ArrayUtils.concat(request.indices(), indices));
return this; return this;
} }
@ -49,7 +47,7 @@ public class GetFieldMappingsRequestBuilder extends ActionRequestBuilder<GetFiel
} }
public GetFieldMappingsRequestBuilder addTypes(String... types) { public GetFieldMappingsRequestBuilder addTypes(String... types) {
request.types(ObjectArrays.concat(request.types(), types, String.class)); request.types(ArrayUtils.concat(request.types(), types));
return this; return this;
} }

View File

@ -19,10 +19,10 @@
package org.elasticsearch.action.admin.indices.settings.get; package org.elasticsearch.action.admin.indices.settings.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.util.ArrayUtils;
/** /**
*/ */
@ -38,7 +38,7 @@ public class GetSettingsRequestBuilder extends MasterNodeReadOperationRequestBui
} }
public GetSettingsRequestBuilder addIndices(String... indices) { public GetSettingsRequestBuilder addIndices(String... indices) {
request.indices(ObjectArrays.concat(request.indices(), indices, String.class)); request.indices(ArrayUtils.concat(request.indices(), indices));
return this; return this;
} }

View File

@ -19,9 +19,9 @@
package org.elasticsearch.action.admin.indices.warmer.get; package org.elasticsearch.action.admin.indices.warmer.get;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.support.master.info.ClusterInfoRequestBuilder; import org.elasticsearch.action.support.master.info.ClusterInfoRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.util.ArrayUtils;
/** /**
* Builder for {@link GetWarmersRequest} * Builder for {@link GetWarmersRequest}
@ -40,7 +40,7 @@ public class GetWarmersRequestBuilder extends ClusterInfoRequestBuilder<GetWarme
} }
public GetWarmersRequestBuilder addWarmers(String... warmers) { public GetWarmersRequestBuilder addWarmers(String... warmers) {
request.warmers(ObjectArrays.concat(request.warmers(), warmers, String.class)); request.warmers(ArrayUtils.concat(request.warmers(), warmers));
return this; return this;
} }
} }

View File

@ -251,7 +251,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction<MultiP
} }
if (item.failed()) { if (item.failed()) {
shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, item.error().string())); shardResults.set(shardId.id(), new BroadcastShardOperationFailedException(shardId, item.error()));
} else { } else {
shardResults.set(shardId.id(), item.response()); shardResults.set(shardId.id(), item.response());
} }

View File

@ -103,7 +103,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
throw (ElasticsearchException) t; throw (ElasticsearchException) t;
} else { } else {
logger.debug("{} failed to multi percolate", t, request.shardId()); logger.debug("{} failed to multi percolate", t, request.shardId());
responseItem = new Response.Item(slot, new StringText(ExceptionsHelper.detailedMessage(t))); responseItem = new Response.Item(slot, t);
} }
} }
response.items.add(responseItem); response.items.add(responseItem);
@ -231,7 +231,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
item.response.writeTo(out); item.response.writeTo(out);
} else { } else {
out.writeBoolean(false); out.writeBoolean(false);
out.writeText(item.error); out.writeThrowable(item.error);
} }
} }
} }
@ -248,7 +248,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
shardResponse.readFrom(in); shardResponse.readFrom(in);
items.add(new Item(slot, shardResponse)); items.add(new Item(slot, shardResponse));
} else { } else {
items.add(new Item(slot, in.readText())); items.add(new Item(slot, (Throwable)in.readThrowable()));
} }
} }
} }
@ -257,7 +257,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
private final int slot; private final int slot;
private final PercolateShardResponse response; private final PercolateShardResponse response;
private final Text error; private final Throwable error;
public Item(Integer slot, PercolateShardResponse response) { public Item(Integer slot, PercolateShardResponse response) {
this.slot = slot; this.slot = slot;
@ -265,7 +265,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
this.error = null; this.error = null;
} }
public Item(Integer slot, Text error) { public Item(Integer slot, Throwable error) {
this.slot = slot; this.slot = slot;
this.error = error; this.error = error;
this.response = null; this.response = null;
@ -279,7 +279,7 @@ public class TransportShardMultiPercolateAction extends TransportSingleShardActi
return response; return response;
} }
public Text error() { public Throwable error() {
return error; return error;
} }

View File

@ -92,6 +92,17 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
return shardFailures; return shardFailures;
} }
public Throwable getCause() {
Throwable cause = super.getCause();
if (cause == null) {
// fall back to guessed root cause
for (ElasticsearchException rootCause : guessRootCauses()) {
return rootCause;
}
}
return cause;
}
private static String buildMessage(String phaseName, String msg, ShardSearchFailure[] shardFailures) { private static String buildMessage(String phaseName, String msg, ShardSearchFailure[] shardFailures) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("Failed to execute phase [").append(phaseName).append("], ").append(msg); sb.append("Failed to execute phase [").append(phaseName).append("], ").append(msg);
@ -123,7 +134,14 @@ public class SearchPhaseExecutionException extends ElasticsearchException {
} }
builder.endArray(); builder.endArray();
super.innerToXContent(builder, params); super.innerToXContent(builder, params);
}
@Override
protected void causeToXContent(XContentBuilder builder, Params params) throws IOException {
if (super.getCause() != null) {
// if the cause is null we inject a guessed root cause that will then be rendered twice so wi disable it manually
super.causeToXContent(builder, params);
}
} }
@Override @Override

View File

@ -18,14 +18,12 @@
*/ */
package org.elasticsearch.action.support.master.info; package org.elasticsearch.action.support.master.info;
import com.google.common.collect.ObjectArrays;
import org.elasticsearch.action.Action; import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder;
import org.elasticsearch.client.ClusterAdminClient;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.util.ArrayUtils;
/** /**
*/ */
@ -44,7 +42,7 @@ public abstract class ClusterInfoRequestBuilder<Request extends ClusterInfoReque
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Builder addIndices(String... indices) { public Builder addIndices(String... indices) {
request.indices(ObjectArrays.concat(request.indices(), indices, String.class)); request.indices(ArrayUtils.concat(request.indices(), indices));
return (Builder) this; return (Builder) this;
} }
@ -56,7 +54,7 @@ public abstract class ClusterInfoRequestBuilder<Request extends ClusterInfoReque
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Builder addTypes(String... types) { public Builder addTypes(String... types) {
request.types(ObjectArrays.concat(request.types(), types, String.class)); request.types(ArrayUtils.concat(request.types(), types));
return (Builder) this; return (Builder) this;
} }

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.termvectors; package org.elasticsearch.action.termvectors;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -40,17 +41,17 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
private String index; private String index;
private String type; private String type;
private String id; private String id;
private String message; private Throwable cause;
Failure() { Failure() {
} }
public Failure(String index, String type, String id, String message) { public Failure(String index, String type, String id, Throwable cause) {
this.index = index; this.index = index;
this.type = type; this.type = type;
this.id = id; this.id = id;
this.message = message; this.cause = cause;
} }
/** /**
@ -75,10 +76,10 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
} }
/** /**
* The failure message. * The failure cause.
*/ */
public String getMessage() { public Throwable getCause() {
return this.message; return this.cause;
} }
public static Failure readFailure(StreamInput in) throws IOException { public static Failure readFailure(StreamInput in) throws IOException {
@ -92,7 +93,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
index = in.readString(); index = in.readString();
type = in.readOptionalString(); type = in.readOptionalString();
id = in.readString(); id = in.readString();
message = in.readString(); cause = in.readThrowable();
} }
@Override @Override
@ -100,7 +101,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
out.writeString(index); out.writeString(index);
out.writeOptionalString(type); out.writeOptionalString(type);
out.writeString(id); out.writeString(id);
out.writeString(message); out.writeThrowable(cause);
} }
} }
@ -132,7 +133,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
builder.field(Fields._INDEX, failure.getIndex()); builder.field(Fields._INDEX, failure.getIndex());
builder.field(Fields._TYPE, failure.getType()); builder.field(Fields._TYPE, failure.getType());
builder.field(Fields._ID, failure.getId()); builder.field(Fields._ID, failure.getId());
builder.field(Fields.ERROR, failure.getMessage()); ElasticsearchException.renderThrowable(builder, params, failure.getCause());
builder.endObject(); builder.endObject();
} else { } else {
TermVectorsResponse getResponse = response.getResponse(); TermVectorsResponse getResponse = response.getResponse();
@ -150,7 +151,6 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
static final XContentBuilderString _INDEX = new XContentBuilderString("_index"); static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type"); static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id"); static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString ERROR = new XContentBuilderString("error");
} }
@Override @Override

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.action.termvectors; package org.elasticsearch.action.termvectors;
import com.google.common.util.concurrent.AtomicLongMap;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DefaultSimilarity;
@ -54,7 +53,7 @@ public class TermVectorsFilter {
private final Set<String> selectedFields; private final Set<String> selectedFields;
private AggregatedDfs dfs; private AggregatedDfs dfs;
private Map<Term, ScoreTerm> scoreTerms; private Map<Term, ScoreTerm> scoreTerms;
private AtomicLongMap<String> sizes; private Map<String, Integer> sizes = new HashMap<>();
private TFIDFSimilarity similarity; private TFIDFSimilarity similarity;
public TermVectorsFilter(Fields termVectorsByField, Fields topLevelFields, Set<String> selectedFields, @Nullable AggregatedDfs dfs) { public TermVectorsFilter(Fields termVectorsByField, Fields topLevelFields, Set<String> selectedFields, @Nullable AggregatedDfs dfs) {
@ -64,7 +63,6 @@ public class TermVectorsFilter {
this.dfs = dfs; this.dfs = dfs;
this.scoreTerms = new HashMap<>(); this.scoreTerms = new HashMap<>();
this.sizes = AtomicLongMap.create();
this.similarity = new DefaultSimilarity(); this.similarity = new DefaultSimilarity();
} }
@ -228,10 +226,12 @@ public class TermVectorsFilter {
// retain the best terms for quick lookups // retain the best terms for quick lookups
ScoreTerm scoreTerm; ScoreTerm scoreTerm;
int count = 0;
while ((scoreTerm = queue.pop()) != null) { while ((scoreTerm = queue.pop()) != null) {
scoreTerms.put(new Term(scoreTerm.field, scoreTerm.word), scoreTerm); scoreTerms.put(new Term(scoreTerm.field, scoreTerm.word), scoreTerm);
sizes.incrementAndGet(scoreTerm.field); count++;
} }
sizes.put(fieldName, count);
} }
} }

View File

@ -31,6 +31,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
@ -69,13 +70,13 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.routing(), termVectorsRequest.index())); termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.routing(), termVectorsRequest.index()));
if (!clusterState.metaData().hasConcreteIndex(termVectorsRequest.index())) { if (!clusterState.metaData().hasConcreteIndex(termVectorsRequest.index())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorsRequest.index(), responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorsRequest.index(),
termVectorsRequest.type(), termVectorsRequest.id(), "[" + termVectorsRequest.index() + "] missing"))); termVectorsRequest.type(), termVectorsRequest.id(), new IndexNotFoundException(termVectorsRequest.index()))));
continue; continue;
} }
String concreteSingleIndex = indexNameExpressionResolver.concreteSingleIndex(clusterState, (DocumentRequest) termVectorsRequest); String concreteSingleIndex = indexNameExpressionResolver.concreteSingleIndex(clusterState, (DocumentRequest) termVectorsRequest);
if (termVectorsRequest.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, termVectorsRequest.type())) { if (termVectorsRequest.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, termVectorsRequest.type())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(concreteSingleIndex, termVectorsRequest.type(), termVectorsRequest.id(), responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(concreteSingleIndex, termVectorsRequest.type(), termVectorsRequest.id(),
"routing is required for [" + concreteSingleIndex + "]/[" + termVectorsRequest.type() + "]/[" + termVectorsRequest.id() + "]"))); new IllegalArgumentException("routing is required for [" + concreteSingleIndex + "]/[" + termVectorsRequest.type() + "]/[" + termVectorsRequest.id() + "]"))));
continue; continue;
} }
ShardId shardId = clusterService.operationRouting().getShards(clusterState, concreteSingleIndex, ShardId shardId = clusterService.operationRouting().getShards(clusterState, concreteSingleIndex,
@ -111,12 +112,11 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
@Override @Override
public void onFailure(Throwable e) { public void onFailure(Throwable e) {
// create failures for all relevant requests // create failures for all relevant requests
String message = ExceptionsHelper.detailedMessage(e);
for (int i = 0; i < shardRequest.locations.size(); i++) { for (int i = 0; i < shardRequest.locations.size(); i++) {
TermVectorsRequest termVectorsRequest = shardRequest.requests.get(i); TermVectorsRequest termVectorsRequest = shardRequest.requests.get(i);
responses.set(shardRequest.locations.get(i), new MultiTermVectorsItemResponse(null, responses.set(shardRequest.locations.get(i), new MultiTermVectorsItemResponse(null,
new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorsRequest.type(), new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorsRequest.type(),
termVectorsRequest.id(), message))); termVectorsRequest.id(), e)));
} }
if (counter.decrementAndGet() == 0) { if (counter.decrementAndGet() == 0) {
finishHim(); finishHim();

View File

@ -90,7 +90,7 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
} else { } else {
logger.debug("{} failed to execute multi term vectors for [{}]/[{}]", t, shardId, termVectorsRequest.type(), termVectorsRequest.id()); logger.debug("{} failed to execute multi term vectors for [{}]/[{}]", t, shardId, termVectorsRequest.type(), termVectorsRequest.id());
response.add(request.locations.get(i), response.add(request.locations.get(i),
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), ExceptionsHelper.detailedMessage(t))); new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), t));
} }
} }
} }

View File

@ -19,6 +19,9 @@
package org.elasticsearch.bootstrap; package org.elasticsearch.bootstrap;
import java.util.Collections;
import java.util.Set;
/** /**
* Exposes system startup information * Exposes system startup information
*/ */
@ -43,4 +46,14 @@ public final class BootstrapInfo {
public static boolean isMemoryLocked() { public static boolean isMemoryLocked() {
return Natives.isMemoryLocked(); return Natives.isMemoryLocked();
} }
/**
* Returns set of insecure plugins.
* <p>
* These are plugins with unresolved issues in third-party libraries,
* that require additional privileges as a workaround.
*/
public static Set<String> getInsecurePluginList() {
return Collections.unmodifiableSet(Security.INSECURE_PLUGINS.keySet());
}
} }

View File

@ -20,11 +20,14 @@
package org.elasticsearch.bootstrap; package org.elasticsearch.bootstrap;
import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import java.io.*; import java.io.*;
import java.net.URL; import java.net.URL;
import java.nio.file.AccessMode; import java.nio.file.AccessMode;
import java.nio.file.DirectoryStream;
import java.nio.file.FileAlreadyExistsException; import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.NotDirectoryException; import java.nio.file.NotDirectoryException;
@ -32,6 +35,7 @@ import java.nio.file.Path;
import java.security.Permissions; import java.security.Permissions;
import java.security.Policy; import java.security.Policy;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.Map; import java.util.Map;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -95,6 +99,8 @@ final class Security {
static void configure(Environment environment) throws Exception { static void configure(Environment environment) throws Exception {
// set properties for jar locations // set properties for jar locations
setCodebaseProperties(); setCodebaseProperties();
// set properties for problematic plugins
setPluginCodebaseProperties(environment);
// enable security policy: union of template and environment-based paths. // enable security policy: union of template and environment-based paths.
Policy.setPolicy(new ESPolicy(createPermissions(environment))); Policy.setPolicy(new ESPolicy(createPermissions(environment)));
@ -121,6 +127,9 @@ final class Security {
static { static {
Map<Pattern,String> m = new IdentityHashMap<>(); Map<Pattern,String> m = new IdentityHashMap<>();
m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core"); m.put(Pattern.compile(".*lucene-core-.*\\.jar$"), "es.security.jar.lucene.core");
m.put(Pattern.compile(".*lucene-test-framework-.*\\.jar$"), "es.security.jar.lucene.testframework");
m.put(Pattern.compile(".*randomizedtesting-runner-.*\\.jar$"), "es.security.jar.randomizedtesting.runner");
m.put(Pattern.compile(".*junit4-ant-.*\\.jar$"), "es.security.jar.randomizedtesting.junit4");
m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock"); m.put(Pattern.compile(".*securemock-.*\\.jar$"), "es.security.jar.elasticsearch.securemock");
SPECIAL_JARS = Collections.unmodifiableMap(m); SPECIAL_JARS = Collections.unmodifiableMap(m);
} }
@ -150,6 +159,46 @@ final class Security {
} }
} }
// mapping of insecure plugins to codebase properties
// note that this is only read once, when policy is parsed.
static final Map<String,String> INSECURE_PLUGINS;
static {
Map<String,String> m = new HashMap<>();
m.put("repository-s3", "es.security.insecure.plugin.repository-s3");
m.put("discovery-ec2", "es.security.insecure.plugin.discovery-ec2");
m.put("cloud-gce", "es.security.insecure.plugin.cloud-gce" );
INSECURE_PLUGINS = Collections.unmodifiableMap(m);
}
/**
* Sets properties (codebase URLs) for policy files.
* we look for matching plugins and set URLs to fit
*/
@SuppressForbidden(reason = "proper use of URL")
static void setPluginCodebaseProperties(Environment environment) throws IOException {
if (Files.exists(environment.pluginsFile())) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(environment.pluginsFile())) {
for (Path plugin : stream) {
String prop = INSECURE_PLUGINS.get(plugin.getFileName().toString());
if (prop != null) {
if (System.getProperty(prop) != null) {
throw new IllegalStateException("property: " + prop + " is unexpectedly set: " + System.getProperty(prop));
}
System.setProperty(prop, plugin.toUri().toURL().toString() + "*");
ESLogger logger = Loggers.getLogger(Security.class);
logger.warn("Adding permissions for insecure plugin [{}]", plugin.getFileName());
logger.warn("There are unresolved issues with third-party code that may reduce the security of the system");
}
}
}
}
for (String prop : INSECURE_PLUGINS.values()) {
if (System.getProperty(prop) == null) {
System.setProperty(prop, "file:/dev/null"); // no chance to be interpreted as "all"
}
}
}
/** returns dynamic Permissions to configured paths */ /** returns dynamic Permissions to configured paths */
static Permissions createPermissions(Environment environment) throws IOException { static Permissions createPermissions(Environment environment) throws IOException {
Permissions policy = new Permissions(); Permissions policy = new Permissions();

View File

@ -19,7 +19,6 @@
package org.elasticsearch.cluster.metadata; package org.elasticsearch.cluster.metadata;
import com.google.common.collect.UnmodifiableIterator;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import java.util.ArrayList; import java.util.ArrayList;
@ -106,7 +105,7 @@ public interface AliasOrIndex {
return new Iterable<Tuple<String, AliasMetaData>>() { return new Iterable<Tuple<String, AliasMetaData>>() {
@Override @Override
public Iterator<Tuple<String, AliasMetaData>> iterator() { public Iterator<Tuple<String, AliasMetaData>> iterator() {
return new UnmodifiableIterator<Tuple<String,AliasMetaData>>() { return new Iterator<Tuple<String,AliasMetaData>>() {
int index = 0; int index = 0;
@ -121,6 +120,11 @@ public interface AliasOrIndex {
return new Tuple<>(indexMetaData.getIndex(), indexMetaData.getAliases().get(aliasName)); return new Tuple<>(indexMetaData.getIndex(), indexMetaData.getAliases().get(aliasName));
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }
}; };

View File

@ -21,7 +21,6 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.base.Preconditions;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diff;
@ -205,8 +204,12 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
private final boolean useTypeForRouting; private final boolean useTypeForRouting;
private IndexMetaData(String index, long version, State state, Settings settings, ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, Custom> customs) { private IndexMetaData(String index, long version, State state, Settings settings, ImmutableOpenMap<String, MappingMetaData> mappings, ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, Custom> customs) {
Preconditions.checkArgument(settings.getAsInt(SETTING_NUMBER_OF_SHARDS, null) != null, "must specify numberOfShards for index [" + index + "]"); if (settings.getAsInt(SETTING_NUMBER_OF_SHARDS, null) == null) {
Preconditions.checkArgument(settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, null) != null, "must specify numberOfReplicas for index [" + index + "]"); throw new IllegalArgumentException("must specify numberOfShards for index [" + index + "]");
}
if (settings.getAsInt(SETTING_NUMBER_OF_REPLICAS, null) == null) {
throw new IllegalArgumentException("must specify numberOfReplicas for index [" + index + "]");
}
this.index = index; this.index = index;
this.version = version; this.version = version;
this.state = state; this.state = state;

View File

@ -587,7 +587,7 @@ public class IndexNameExpressionResolver extends AbstractComponent {
if (Regex.isMatchAllPattern(expression)) { if (Regex.isMatchAllPattern(expression)) {
// Can only happen if the expressions was initially: '-*' // Can only happen if the expressions was initially: '-*'
matches = metaData.getAliasAndIndexLookup(); matches = metaData.getAliasAndIndexLookup();
} else if (expression.endsWith("*")) { } else if (expression.indexOf("*") == expression.length() - 1) {
// Suffix wildcard: // Suffix wildcard:
assert expression.length() >= 2 : "expression [" + expression + "] should have at least a length of 2"; assert expression.length() >= 2 : "expression [" + expression + "] should have at least a length of 2";
String fromPrefix = expression.substring(0, expression.length() - 1); String fromPrefix = expression.substring(0, expression.length() - 1);

View File

@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.Diffable;
@ -60,18 +59,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.search.warmer.IndexWarmersMetaData;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
@ -569,7 +557,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, Fr
} }
@Override @Override
public UnmodifiableIterator<IndexMetaData> iterator() { public Iterator<IndexMetaData> iterator() {
return indices.valuesIt(); return indices.valuesIt();
} }

View File

@ -26,7 +26,6 @@ import org.elasticsearch.cluster.routing.DjbHashFunction;
import org.elasticsearch.cluster.routing.HashFunction; import org.elasticsearch.cluster.routing.HashFunction;
import org.elasticsearch.cluster.routing.SimpleHashFunction; import org.elasticsearch.cluster.routing.SimpleHashFunction;
import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.Classes;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -219,7 +218,6 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
/** All known byte-sized settings for an index. */ /** All known byte-sized settings for an index. */
public static final Set<String> INDEX_BYTES_SIZE_SETTINGS = ImmutableSet.of( public static final Set<String> INDEX_BYTES_SIZE_SETTINGS = ImmutableSet.of(
"index.buffer_size",
"index.merge.policy.floor_segment", "index.merge.policy.floor_segment",
"index.merge.policy.max_merged_segment", "index.merge.policy.max_merged_segment",
"index.merge.policy.max_merge_size", "index.merge.policy.max_merge_size",

View File

@ -22,7 +22,6 @@ package org.elasticsearch.cluster.node;
import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.google.common.collect.UnmodifiableIterator;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
@ -34,11 +33,7 @@ import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.transport.TransportAddress;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** /**
* This class holds all {@link DiscoveryNode} in the cluster and provides convenience methods to * This class holds all {@link DiscoveryNode} in the cluster and provides convenience methods to
@ -69,7 +64,7 @@ public class DiscoveryNodes extends AbstractDiffable<DiscoveryNodes> implements
} }
@Override @Override
public UnmodifiableIterator<DiscoveryNode> iterator() { public Iterator<DiscoveryNode> iterator() {
return nodes.valuesIt(); return nodes.valuesIt();
} }

View File

@ -22,7 +22,6 @@ package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.IntSet;
import com.carrotsearch.hppc.cursors.IntCursor; import com.carrotsearch.hppc.cursors.IntCursor;
import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -33,12 +32,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
/** /**
@ -163,7 +157,7 @@ public class IndexRoutingTable extends AbstractDiffable<IndexRoutingTable> imple
} }
@Override @Override
public UnmodifiableIterator<IndexShardRoutingTable> iterator() { public Iterator<IndexShardRoutingTable> iterator() {
return shards.valuesIt(); return shards.valuesIt();
} }

View File

@ -21,7 +21,6 @@ package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.ObjectIntHashMap; import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
@ -31,15 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate; import java.util.function.Predicate;
/** /**
@ -308,7 +299,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
} }
for (ShardRoutingState s : state) { for (ShardRoutingState s : state) {
if (s == ShardRoutingState.UNASSIGNED) { if (s == ShardRoutingState.UNASSIGNED) {
Iterables.addAll(shards, unassigned()); unassigned().forEach(shards::add);
break; break;
} }
} }

View File

@ -21,8 +21,6 @@ package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.IntSet;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.UnmodifiableIterator;
import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable; import org.elasticsearch.cluster.Diffable;
import org.elasticsearch.cluster.DiffableUtils; import org.elasticsearch.cluster.DiffableUtils;
@ -30,14 +28,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate; import java.util.function.Predicate;
/** /**
@ -72,7 +67,7 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
} }
@Override @Override
public UnmodifiableIterator<IndexRoutingTable> iterator() { public Iterator<IndexRoutingTable> iterator() {
return indicesRouting.values().iterator(); return indicesRouting.values().iterator();
} }
@ -379,7 +374,10 @@ public class RoutingTable implements Iterable<IndexRoutingTable>, Diffable<Routi
indexBuilder.addShard(refData, shardRoutingEntry); indexBuilder.addShard(refData, shardRoutingEntry);
} }
} }
for (ShardRouting shardRoutingEntry : Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored())) {
Iterable<ShardRouting> shardRoutingEntries = Iterables.concat(routingNodes.unassigned(), routingNodes.unassigned().ignored());
for (ShardRouting shardRoutingEntry : shardRoutingEntries) {
String index = shardRoutingEntry.index(); String index = shardRoutingEntry.index();
IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index); IndexRoutingTable.Builder indexBuilder = indexRoutingTableBuilders.get(index);
if (indexBuilder == null) { if (indexBuilder == null) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.cluster.service; package org.elasticsearch.cluster.service;
import com.google.common.collect.Iterables;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.ClusterState.Builder; import org.elasticsearch.cluster.ClusterState.Builder;
@ -42,6 +41,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.util.concurrent.*; import org.elasticsearch.common.util.concurrent.*;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoveryService; import org.elasticsearch.discovery.DiscoveryService;
@ -89,10 +89,7 @@ public class InternalClusterService extends AbstractLifecycleComponent<ClusterSe
private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>(); private final Collection<ClusterStateListener> lastClusterStateListeners = new CopyOnWriteArrayList<>();
// TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API // TODO this is rather frequently changing I guess a Synced Set would be better here and a dedicated remove API
private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>(); private final Collection<ClusterStateListener> postAppliedListeners = new CopyOnWriteArrayList<>();
private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat( private final Iterable<ClusterStateListener> preAppliedListeners = Iterables.concat(priorityClusterStateListeners, clusterStateListeners, lastClusterStateListeners);
priorityClusterStateListeners,
clusterStateListeners,
lastClusterStateListeners);
private final LocalNodeMasterListeners localNodeMasterListeners; private final LocalNodeMasterListeners localNodeMasterListeners;

View File

@ -20,8 +20,6 @@
package org.elasticsearch.common; package org.elasticsearch.common;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.FastStringReader; import org.elasticsearch.common.io.FastStringReader;
@ -32,18 +30,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
/** /**
* *
@ -840,9 +827,6 @@ public class Strings {
} }
public static String collectionToDelimitedString(Iterable<?> coll, String delim, String prefix, String suffix, StringBuilder sb) { public static String collectionToDelimitedString(Iterable<?> coll, String delim, String prefix, String suffix, StringBuilder sb) {
if (Iterables.isEmpty(coll)) {
return "";
}
Iterator<?> it = coll.iterator(); Iterator<?> it = coll.iterator();
while (it.hasNext()) { while (it.hasNext()) {
sb.append(prefix).append(it.next()).append(suffix); sb.append(prefix).append(it.next()).append(suffix);

View File

@ -19,11 +19,9 @@
package org.elasticsearch.common.cli; package org.elasticsearch.common.cli;
import com.google.common.base.Preconditions;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.DefaultParser;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.node.internal.InternalSettingsPreparer;
@ -101,7 +99,9 @@ public abstract class CliTool {
} }
protected CliTool(CliToolConfig config, Terminal terminal) { protected CliTool(CliToolConfig config, Terminal terminal) {
Preconditions.checkArgument(config.cmds().size() != 0, "At least one command must be configured"); if (config.cmds().size() == 0) {
throw new IllegalArgumentException("At least one command must be configured");
}
this.config = config; this.config = config;
this.terminal = terminal; this.terminal = terminal;
env = InternalSettingsPreparer.prepareEnvironment(EMPTY_SETTINGS, terminal); env = InternalSettingsPreparer.prepareEnvironment(EMPTY_SETTINGS, terminal);

View File

@ -19,13 +19,14 @@
package org.elasticsearch.common.cli; package org.elasticsearch.common.cli;
import com.google.common.collect.ImmutableMap;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup; import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/** /**
* *
@ -38,18 +39,18 @@ public class CliToolConfig {
private final Class<? extends CliTool> toolType; private final Class<? extends CliTool> toolType;
private final String name; private final String name;
private final ImmutableMap<String, Cmd> cmds; private final Map<String, Cmd> cmds;
private static final HelpPrinter helpPrinter = new HelpPrinter(); private static final HelpPrinter helpPrinter = new HelpPrinter();
private CliToolConfig(String name, Class<? extends CliTool> toolType, Cmd[] cmds) { private CliToolConfig(String name, Class<? extends CliTool> toolType, Cmd[] cmds) {
this.name = name; this.name = name;
this.toolType = toolType; this.toolType = toolType;
ImmutableMap.Builder<String, Cmd> cmdsBuilder = ImmutableMap.builder(); final Map<String, Cmd> cmdsMapping = new HashMap<>();
for (int i = 0; i < cmds.length; i++) { for (int i = 0; i < cmds.length; i++) {
cmdsBuilder.put(cmds[i].name, cmds[i]); cmdsMapping.put(cmds[i].name, cmds[i]);
} }
this.cmds = cmdsBuilder.build(); this.cmds = Collections.unmodifiableMap(cmdsMapping);
} }
public boolean isSingle() { public boolean isSingle() {

View File

@ -18,26 +18,10 @@
*/ */
package org.elasticsearch.common.collect; package org.elasticsearch.common.collect;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.util.mutable.MutableValueInt; import org.apache.lucene.util.mutable.MutableValueInt;
import java.lang.reflect.Array; import java.lang.reflect.Array;
import java.util.AbstractMap; import java.util.*;
import java.util.AbstractSet;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collection;
import java.util.Deque;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
/** /**
@ -434,7 +418,7 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
} }
private static class EntryIterator<K, V> extends UnmodifiableIterator<Map.Entry<K, V>> { private static class EntryIterator<K, V> implements Iterator<Map.Entry<K, V>> {
private final Deque<Map.Entry<K, V>> entries; private final Deque<Map.Entry<K, V>> entries;
private final Deque<Node<K, V>> nodes; private final Deque<Node<K, V>> nodes;
@ -462,6 +446,11 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
return entries.pop(); return entries.pop();
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
} }
private final InnerNode<K, V> root; private final InnerNode<K, V> root;
@ -487,7 +476,9 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
@Override @Override
public V get(Object key) { public V get(Object key) {
Preconditions.checkArgument(key != null, "Null keys are not supported"); if (key == null) {
throw new IllegalArgumentException("null keys are not supported");
}
final int hash = key.hashCode(); final int hash = key.hashCode();
return root.get(key, hash); return root.get(key, hash);
} }
@ -503,8 +494,12 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
* of the hash table. The current hash table is not modified. * of the hash table. The current hash table is not modified.
*/ */
public CopyOnWriteHashMap<K, V> copyAndPut(K key, V value) { public CopyOnWriteHashMap<K, V> copyAndPut(K key, V value) {
Preconditions.checkArgument(key != null, "null keys are not supported"); if (key == null) {
Preconditions.checkArgument(value != null, "null values are not supported"); throw new IllegalArgumentException("null keys are not supported");
}
if (value == null) {
throw new IllegalArgumentException("null values are not supported");
}
final int hash = key.hashCode(); final int hash = key.hashCode();
final MutableValueInt newValue = new MutableValueInt(); final MutableValueInt newValue = new MutableValueInt();
final InnerNode<K, V> newRoot = root.put(key, hash, TOTAL_HASH_BITS, value, newValue); final InnerNode<K, V> newRoot = root.put(key, hash, TOTAL_HASH_BITS, value, newValue);
@ -535,7 +530,9 @@ public final class CopyOnWriteHashMap<K, V> extends AbstractMap<K, V> {
* Remove the given key from this map. The current hash table is not modified. * Remove the given key from this map. The current hash table is not modified.
*/ */
public CopyOnWriteHashMap<K, V> copyAndRemove(Object key) { public CopyOnWriteHashMap<K, V> copyAndRemove(Object key) {
Preconditions.checkArgument(key != null, "Null keys are not supported"); if (key == null) {
throw new IllegalArgumentException("null keys are not supported");
}
final int hash = key.hashCode(); final int hash = key.hashCode();
final InnerNode<K, V> newRoot = root.remove(key, hash); final InnerNode<K, V> newRoot = root.remove(key, hash);
if (root == newRoot) { if (root == newRoot) {

View File

@ -26,7 +26,6 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.predicates.IntObjectPredicate; import com.carrotsearch.hppc.predicates.IntObjectPredicate;
import com.carrotsearch.hppc.predicates.IntPredicate; import com.carrotsearch.hppc.predicates.IntPredicate;
import com.carrotsearch.hppc.procedures.IntObjectProcedure; import com.carrotsearch.hppc.procedures.IntObjectProcedure;
import com.google.common.collect.UnmodifiableIterator;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
@ -113,9 +112,9 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
/** /**
* Returns a direct iterator over the keys. * Returns a direct iterator over the keys.
*/ */
public UnmodifiableIterator<Integer> keysIt() { public Iterator<Integer> keysIt() {
final Iterator<IntCursor> iterator = map.keys().iterator(); final Iterator<IntCursor> iterator = map.keys().iterator();
return new UnmodifiableIterator<Integer>() { return new Iterator<Integer>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
return iterator.hasNext(); return iterator.hasNext();
@ -125,6 +124,11 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
public Integer next() { public Integer next() {
return iterator.next().value; return iterator.next().value;
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }
@ -138,9 +142,9 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
/** /**
* Returns a direct iterator over the keys. * Returns a direct iterator over the keys.
*/ */
public UnmodifiableIterator<VType> valuesIt() { public Iterator<VType> valuesIt() {
final Iterator<ObjectCursor<VType>> iterator = map.values().iterator(); final Iterator<ObjectCursor<VType>> iterator = map.values().iterator();
return new UnmodifiableIterator<VType>() { return new Iterator<VType>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() {
return iterator.hasNext(); return iterator.hasNext();
@ -150,6 +154,11 @@ public final class ImmutableOpenIntMap<VType> implements Iterable<IntObjectCurso
public VType next() { public VType next() {
return iterator.next().value; return iterator.next().value;
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }

View File

@ -25,7 +25,6 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import com.carrotsearch.hppc.predicates.ObjectObjectPredicate; import com.carrotsearch.hppc.predicates.ObjectObjectPredicate;
import com.carrotsearch.hppc.predicates.ObjectPredicate; import com.carrotsearch.hppc.predicates.ObjectPredicate;
import com.carrotsearch.hppc.procedures.ObjectObjectProcedure; import com.carrotsearch.hppc.procedures.ObjectObjectProcedure;
import com.google.common.collect.UnmodifiableIterator;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
@ -120,18 +119,21 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
/** /**
* Returns a direct iterator over the keys. * Returns a direct iterator over the keys.
*/ */
public UnmodifiableIterator<KType> keysIt() { public Iterator<KType> keysIt() {
final Iterator<ObjectCursor<KType>> iterator = map.keys().iterator(); final Iterator<ObjectCursor<KType>> iterator = map.keys().iterator();
return new UnmodifiableIterator<KType>() { return new Iterator<KType>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() { return iterator.hasNext(); }
return iterator.hasNext();
}
@Override @Override
public KType next() { public KType next() {
return iterator.next().value; return iterator.next().value;
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }
@ -145,18 +147,21 @@ public final class ImmutableOpenMap<KType, VType> implements Iterable<ObjectObje
/** /**
* Returns a direct iterator over the keys. * Returns a direct iterator over the keys.
*/ */
public UnmodifiableIterator<VType> valuesIt() { public Iterator<VType> valuesIt() {
final Iterator<ObjectCursor<VType>> iterator = map.values().iterator(); final Iterator<ObjectCursor<VType>> iterator = map.values().iterator();
return new UnmodifiableIterator<VType>() { return new Iterator<VType>() {
@Override @Override
public boolean hasNext() { public boolean hasNext() { return iterator.hasNext(); }
return iterator.hasNext();
}
@Override @Override
public VType next() { public VType next() {
return iterator.next().value; return iterator.next().value;
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }

View File

@ -27,8 +27,6 @@ import org.elasticsearch.common.inject.spi.TypeListener;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkState;
/** /**
* A support class for {@link Module}s which reduces repetition and results in * A support class for {@link Module}s which reduces repetition and results in
* a more readable configuration. Simply extend this class, implement {@link * a more readable configuration. Simply extend this class, implement {@link
@ -54,8 +52,9 @@ public abstract class AbstractModule implements Module {
@Override @Override
public final synchronized void configure(Binder builder) { public final synchronized void configure(Binder builder) {
checkState(this.binder == null, "Re-entry is not allowed."); if (this.binder != null) {
throw new IllegalStateException("Re-entry is not allowed.");
}
this.binder = Objects.requireNonNull(builder, "builder"); this.binder = Objects.requireNonNull(builder, "builder");
try { try {
configure(); configure();

View File

@ -21,8 +21,7 @@ import org.elasticsearch.common.inject.internal.Errors;
import org.elasticsearch.common.inject.spi.Message; import org.elasticsearch.common.inject.spi.Message;
import java.util.Collection; import java.util.Collection;
import java.util.Locale;
import static com.google.common.base.Preconditions.checkState;
/** /**
* Thrown when a programming error such as a misplaced annotation, illegal binding, or unsupported * Thrown when a programming error such as a misplaced annotation, illegal binding, or unsupported
@ -48,8 +47,10 @@ public final class ConfigurationException extends RuntimeException {
* Returns a copy of this configuration exception with the specified partial value. * Returns a copy of this configuration exception with the specified partial value.
*/ */
public ConfigurationException withPartialValue(Object partialValue) { public ConfigurationException withPartialValue(Object partialValue) {
checkState(this.partialValue == null, if (this.partialValue != null) {
"Can't clobber existing partial value %s with %s", this.partialValue, partialValue); String message = String.format(Locale.ROOT, "Can't clobber existing partial value %s with %s", this.partialValue, partialValue);
throw new IllegalStateException(message);
}
ConfigurationException result = new ConfigurationException(messages); ConfigurationException result = new ConfigurationException(messages);
result.partialValue = partialValue; result.partialValue = partialValue;
return result; return result;

View File

@ -25,8 +25,6 @@ import org.elasticsearch.common.inject.spi.InjectionPoint;
import java.util.Set; import java.util.Set;
import static com.google.common.base.Preconditions.checkState;
class ConstructorBindingImpl<T> extends BindingImpl<T> implements ConstructorBinding<T> { class ConstructorBindingImpl<T> extends BindingImpl<T> implements ConstructorBinding<T> {
private final Factory<T> factory; private final Factory<T> factory;
@ -52,19 +50,25 @@ class ConstructorBindingImpl<T> extends BindingImpl<T> implements ConstructorBin
@Override @Override
public <V> V acceptTargetVisitor(BindingTargetVisitor<? super T, V> visitor) { public <V> V acceptTargetVisitor(BindingTargetVisitor<? super T, V> visitor) {
checkState(factory.constructorInjector != null, "not initialized"); if (factory.constructorInjector == null) {
throw new IllegalStateException("not initialized");
}
return visitor.visit(this); return visitor.visit(this);
} }
@Override @Override
public InjectionPoint getConstructor() { public InjectionPoint getConstructor() {
checkState(factory.constructorInjector != null, "Binding is not ready"); if (factory.constructorInjector == null) {
throw new IllegalStateException("Binding is not ready");
}
return factory.constructorInjector.getConstructionProxy().getInjectionPoint(); return factory.constructorInjector.getConstructionProxy().getInjectionPoint();
} }
@Override @Override
public Set<InjectionPoint> getInjectableMembers() { public Set<InjectionPoint> getInjectableMembers() {
checkState(factory.constructorInjector != null, "Binding is not ready"); if (factory.constructorInjector == null) {
throw new IllegalStateException("Binding is not ready");
}
return factory.constructorInjector.getInjectableMembers(); return factory.constructorInjector.getInjectableMembers();
} }
@ -97,7 +101,9 @@ class ConstructorBindingImpl<T> extends BindingImpl<T> implements ConstructorBin
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public T get(Errors errors, InternalContext context, Dependency<?> dependency) public T get(Errors errors, InternalContext context, Dependency<?> dependency)
throws ErrorsException { throws ErrorsException {
checkState(constructorInjector != null, "Constructor not ready"); if (constructorInjector == null) {
throw new IllegalStateException("Constructor not ready");
}
// This may not actually be safe because it could return a super type of T (if that's all the // This may not actually be safe because it could return a super type of T (if that's all the
// client needs), but it should be OK in practice thanks to the wonders of erasure. // client needs), but it should be OK in practice thanks to the wonders of erasure.

View File

@ -22,8 +22,6 @@ import org.elasticsearch.common.inject.spi.Message;
import java.util.Collection; import java.util.Collection;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Thrown when errors occur while creating a {@link Injector}. Includes a list of encountered * Thrown when errors occur while creating a {@link Injector}. Includes a list of encountered
* errors. Clients should catch this exception, log it, and stop execution. * errors. Clients should catch this exception, log it, and stop execution.
@ -39,7 +37,9 @@ public class CreationException extends RuntimeException {
*/ */
public CreationException(Collection<Message> messages) { public CreationException(Collection<Message> messages) {
this.messages = ImmutableSet.copyOf(messages); this.messages = ImmutableSet.copyOf(messages);
checkArgument(!this.messages.isEmpty()); if (this.messages.isEmpty()) {
throw new IllegalArgumentException();
}
initCause(Errors.getOnlyCause(this.messages)); initCause(Errors.getOnlyCause(this.messages));
} }

View File

@ -25,8 +25,6 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static com.google.common.base.Preconditions.checkState;
/** /**
* @author jessewilson@google.com (Jesse Wilson) * @author jessewilson@google.com (Jesse Wilson)
*/ */
@ -61,7 +59,9 @@ final class EncounterImpl<T> implements TypeEncounter<T> {
@Override @Override
public void register(MembersInjector<? super T> membersInjector) { public void register(MembersInjector<? super T> membersInjector) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
if (membersInjectors == null) { if (membersInjectors == null) {
membersInjectors = new ArrayList<>(); membersInjectors = new ArrayList<>();
@ -72,7 +72,9 @@ final class EncounterImpl<T> implements TypeEncounter<T> {
@Override @Override
public void register(InjectionListener<? super T> injectionListener) { public void register(InjectionListener<? super T> injectionListener) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
if (injectionListeners == null) { if (injectionListeners == null) {
injectionListeners = new ArrayList<>(); injectionListeners = new ArrayList<>();
@ -83,25 +85,33 @@ final class EncounterImpl<T> implements TypeEncounter<T> {
@Override @Override
public void addError(String message, Object... arguments) { public void addError(String message, Object... arguments) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
errors.addMessage(message, arguments); errors.addMessage(message, arguments);
} }
@Override @Override
public void addError(Throwable t) { public void addError(Throwable t) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
errors.errorInUserCode(t, "An exception was caught and reported. Message: %s", t.getMessage()); errors.errorInUserCode(t, "An exception was caught and reported. Message: %s", t.getMessage());
} }
@Override @Override
public void addError(Message message) { public void addError(Message message) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
errors.addMessage(message); errors.addMessage(message);
} }
@Override @Override
public <T> Provider<T> getProvider(Key<T> key) { public <T> Provider<T> getProvider(Key<T> key) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
return lookups.getProvider(key); return lookups.getProvider(key);
} }
@ -112,7 +122,9 @@ final class EncounterImpl<T> implements TypeEncounter<T> {
@Override @Override
public <T> MembersInjector<T> getMembersInjector(TypeLiteral<T> typeLiteral) { public <T> MembersInjector<T> getMembersInjector(TypeLiteral<T> typeLiteral) {
checkState(valid, "Encounters may not be used after hear() returns."); if (!valid) {
throw new IllegalStateException("Encounters may not be used after hear() returns.");
}
return lookups.getMembersInjector(typeLiteral); return lookups.getMembersInjector(typeLiteral);
} }

View File

@ -17,9 +17,9 @@
package org.elasticsearch.common.inject; package org.elasticsearch.common.inject;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import org.elasticsearch.common.inject.internal.*; import org.elasticsearch.common.inject.internal.*;
import org.elasticsearch.common.inject.spi.Dependency; import org.elasticsearch.common.inject.spi.Dependency;
import org.elasticsearch.common.util.iterable.Iterables;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;

View File

@ -17,28 +17,14 @@
package org.elasticsearch.common.inject; package org.elasticsearch.common.inject;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.*;
import org.elasticsearch.common.inject.internal.ErrorsException; import org.elasticsearch.common.inject.spi.*;
import org.elasticsearch.common.inject.internal.InternalContext;
import org.elasticsearch.common.inject.internal.InternalFactory;
import org.elasticsearch.common.inject.internal.PrivateElementsImpl;
import org.elasticsearch.common.inject.internal.ProviderInstanceBindingImpl;
import org.elasticsearch.common.inject.internal.Scoping;
import org.elasticsearch.common.inject.internal.SourceProvider;
import org.elasticsearch.common.inject.internal.Stopwatch;
import org.elasticsearch.common.inject.spi.Dependency;
import org.elasticsearch.common.inject.spi.Element;
import org.elasticsearch.common.inject.spi.Elements;
import org.elasticsearch.common.inject.spi.InjectionPoint;
import org.elasticsearch.common.inject.spi.PrivateElements;
import org.elasticsearch.common.inject.spi.TypeListenerBinding;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.logging.Logger; import java.util.logging.Logger;
import static com.google.common.base.Preconditions.checkState;
import static org.elasticsearch.common.inject.Scopes.SINGLETON; import static org.elasticsearch.common.inject.Scopes.SINGLETON;
/** /**
@ -125,9 +111,15 @@ class InjectorShell {
*/ */
List<InjectorShell> build(Initializer initializer, BindingProcessor bindingProcessor, List<InjectorShell> build(Initializer initializer, BindingProcessor bindingProcessor,
Stopwatch stopwatch, Errors errors) { Stopwatch stopwatch, Errors errors) {
checkState(stage != null, "Stage not initialized"); if (stage == null) {
checkState(privateElements == null || parent != null, "PrivateElements with no parent"); throw new IllegalStateException("Stage not initialized");
checkState(state != null, "no state. Did you remember to lock() ?"); }
if (privateElements != null && parent == null) {
throw new IllegalStateException("PrivateElements with no parent");
}
if (state == null) {
throw new IllegalStateException("no state. Did you remember to lock() ?");
}
InjectorImpl injector = new InjectorImpl(parent, state, initializer); InjectorImpl injector = new InjectorImpl(parent, state, initializer);
if (privateElements != null) { if (privateElements != null) {

View File

@ -24,8 +24,6 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Binding key consisting of an injection type and an optional annotation. * Binding key consisting of an injection type and an optional annotation.
* Matches the type and annotation at a point of injection. * Matches the type and annotation at a point of injection.
@ -367,16 +365,20 @@ public class Key<T> {
private static void ensureRetainedAtRuntime( private static void ensureRetainedAtRuntime(
Class<? extends Annotation> annotationType) { Class<? extends Annotation> annotationType) {
checkArgument(Annotations.isRetainedAtRuntime(annotationType), if (!Annotations.isRetainedAtRuntime(annotationType)) {
"%s is not retained at runtime. Please annotate it with @Retention(RUNTIME).", throw new IllegalArgumentException(
annotationType.getName()); annotationType.getName() + " is not retained at runtime. Please annotate it with @Retention(RUNTIME)."
);
}
} }
private static void ensureIsBindingAnnotation( private static void ensureIsBindingAnnotation(
Class<? extends Annotation> annotationType) { Class<? extends Annotation> annotationType) {
checkArgument(isBindingAnnotation(annotationType), if (!isBindingAnnotation(annotationType)) {
"%s is not a binding annotation. Please annotate it with @BindingAnnotation.", throw new IllegalArgumentException(
annotationType.getName()); annotationType.getName() + " is not a binding annotation. Please annotate it with @BindingAnnotation."
);
}
} }
static enum NullAnnotationStrategy implements AnnotationStrategy { static enum NullAnnotationStrategy implements AnnotationStrategy {

View File

@ -27,8 +27,6 @@ import org.elasticsearch.common.inject.spi.TypeListener;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import static com.google.common.base.Preconditions.checkState;
/** /**
* A module whose configuration information is hidden from its environment by default. Only bindings * A module whose configuration information is hidden from its environment by default. Only bindings
* that are explicitly exposed will be available to other modules and to the users of the injector. * that are explicitly exposed will be available to other modules and to the users of the injector.
@ -93,7 +91,9 @@ public abstract class PrivateModule implements Module {
@Override @Override
public final synchronized void configure(Binder binder) { public final synchronized void configure(Binder binder) {
checkState(this.binder == null, "Re-entry is not allowed."); if (this.binder != null) {
throw new IllegalStateException("Re-entry is not allowed.");
}
// Guice treats PrivateModules specially and passes in a PrivateBinder automatically. // Guice treats PrivateModules specially and passes in a PrivateBinder automatically.
this.binder = (PrivateBinder) binder.skipSources(PrivateModule.class); this.binder = (PrivateBinder) binder.skipSources(PrivateModule.class);

View File

@ -23,8 +23,6 @@ import org.elasticsearch.common.inject.spi.Message;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Indicates that there was a runtime failure while providing an instance. * Indicates that there was a runtime failure while providing an instance.
* *
@ -41,7 +39,9 @@ public final class ProvisionException extends RuntimeException {
*/ */
public ProvisionException(Iterable<Message> messages) { public ProvisionException(Iterable<Message> messages) {
this.messages = ImmutableSet.copyOf(messages); this.messages = ImmutableSet.copyOf(messages);
checkArgument(!this.messages.isEmpty()); if (this.messages.isEmpty()) {
throw new IllegalArgumentException();
}
initCause(Errors.getOnlyCause(this.messages)); initCause(Errors.getOnlyCause(this.messages));
} }

View File

@ -24,7 +24,6 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
import static org.elasticsearch.common.inject.internal.MoreTypes.canonicalize; import static org.elasticsearch.common.inject.internal.MoreTypes.canonicalize;
/** /**
@ -257,8 +256,9 @@ public class TypeLiteral<T> {
* @since 2.0 * @since 2.0
*/ */
public TypeLiteral<?> getSupertype(Class<?> supertype) { public TypeLiteral<?> getSupertype(Class<?> supertype) {
checkArgument(supertype.isAssignableFrom(rawType), if (!supertype.isAssignableFrom(rawType)) {
"%s is not a supertype of %s", supertype, this.type); throw new IllegalArgumentException(supertype + " is not a supertype of " + type);
}
return resolve(MoreTypes.getGenericSupertype(type, rawType, supertype)); return resolve(MoreTypes.getGenericSupertype(type, rawType, supertype));
} }
@ -269,8 +269,9 @@ public class TypeLiteral<T> {
* @since 2.0 * @since 2.0
*/ */
public TypeLiteral<?> getFieldType(Field field) { public TypeLiteral<?> getFieldType(Field field) {
checkArgument(field.getDeclaringClass().isAssignableFrom(rawType), if (!field.getDeclaringClass().isAssignableFrom(rawType)) {
"%s is not defined by a supertype of %s", field, type); throw new IllegalArgumentException(field + " is not defined by a supertype of " + type);
}
return resolve(field.getGenericType()); return resolve(field.getGenericType());
} }
@ -285,14 +286,17 @@ public class TypeLiteral<T> {
if (methodOrConstructor instanceof Method) { if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor; Method method = (Method) methodOrConstructor;
checkArgument(method.getDeclaringClass().isAssignableFrom(rawType), if (!method.getDeclaringClass().isAssignableFrom(rawType)) {
"%s is not defined by a supertype of %s", method, type); throw new IllegalArgumentException(method + " is not defined by a supertype of " + type);
}
genericParameterTypes = method.getGenericParameterTypes(); genericParameterTypes = method.getGenericParameterTypes();
} else if (methodOrConstructor instanceof Constructor) { } else if (methodOrConstructor instanceof Constructor) {
Constructor constructor = (Constructor) methodOrConstructor; Constructor constructor = (Constructor) methodOrConstructor;
checkArgument(constructor.getDeclaringClass().isAssignableFrom(rawType), if (!constructor.getDeclaringClass().isAssignableFrom(rawType)) {
"%s does not construct a supertype of %s", constructor, type); throw new IllegalArgumentException(constructor + " does not construct a supertype of " + type);
}
genericParameterTypes = constructor.getGenericParameterTypes(); genericParameterTypes = constructor.getGenericParameterTypes();
} else { } else {
@ -313,14 +317,17 @@ public class TypeLiteral<T> {
if (methodOrConstructor instanceof Method) { if (methodOrConstructor instanceof Method) {
Method method = (Method) methodOrConstructor; Method method = (Method) methodOrConstructor;
checkArgument(method.getDeclaringClass().isAssignableFrom(rawType), if (!method.getDeclaringClass().isAssignableFrom(rawType)) {
"%s is not defined by a supertype of %s", method, type); throw new IllegalArgumentException(method + " is not defined by a supertype of " + type);
}
genericExceptionTypes = method.getGenericExceptionTypes(); genericExceptionTypes = method.getGenericExceptionTypes();
} else if (methodOrConstructor instanceof Constructor) { } else if (methodOrConstructor instanceof Constructor) {
Constructor<?> constructor = (Constructor<?>) methodOrConstructor; Constructor<?> constructor = (Constructor<?>) methodOrConstructor;
checkArgument(constructor.getDeclaringClass().isAssignableFrom(rawType), if (!constructor.getDeclaringClass().isAssignableFrom(rawType)) {
"%s does not construct a supertype of %s", constructor, type); throw new IllegalArgumentException(constructor + " does not construct a supertype of " + type);
}
genericExceptionTypes = constructor.getGenericExceptionTypes(); genericExceptionTypes = constructor.getGenericExceptionTypes();
} else { } else {
@ -337,8 +344,10 @@ public class TypeLiteral<T> {
* @since 2.0 * @since 2.0
*/ */
public TypeLiteral<?> getReturnType(Method method) { public TypeLiteral<?> getReturnType(Method method) {
checkArgument(method.getDeclaringClass().isAssignableFrom(rawType), if (!method.getDeclaringClass().isAssignableFrom(rawType)) {
"%s is not defined by a supertype of %s", method, type); throw new IllegalArgumentException(method + " is not defined by a supertype of " + type);
}
return resolve(method.getGenericReturnType()); return resolve(method.getGenericReturnType());
} }
} }

View File

@ -17,18 +17,7 @@
package org.elasticsearch.common.inject.assistedinject; package org.elasticsearch.common.inject.assistedinject;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables; import org.elasticsearch.common.inject.*;
import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Binding;
import org.elasticsearch.common.inject.ConfigurationException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.common.inject.Key;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.Provider;
import org.elasticsearch.common.inject.ProvisionException;
import org.elasticsearch.common.inject.TypeLiteral;
import org.elasticsearch.common.inject.internal.Errors; import org.elasticsearch.common.inject.internal.Errors;
import org.elasticsearch.common.inject.internal.ErrorsException; import org.elasticsearch.common.inject.internal.ErrorsException;
import org.elasticsearch.common.inject.spi.Message; import org.elasticsearch.common.inject.spi.Message;
@ -43,7 +32,6 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static com.google.common.base.Preconditions.checkState;
import static org.elasticsearch.common.inject.internal.Annotations.getKey; import static org.elasticsearch.common.inject.internal.Annotations.getKey;
/** /**
@ -192,8 +180,9 @@ public final class FactoryProvider2<F> implements InvocationHandler, Provider<F>
* Creates a child injector that binds the args, and returns the binding for the method's result. * Creates a child injector that binds the args, and returns the binding for the method's result.
*/ */
public Binding<?> getBindingFromNewInjector(final Method method, final Object[] args) { public Binding<?> getBindingFromNewInjector(final Method method, final Object[] args) {
checkState(injector != null, if (injector == null) {
"Factories.create() factories cannot be used until they're initialized by Guice."); throw new IllegalStateException("Factories.create() factories cannot be used until they're initialized by Guice.");
}
final Key<?> returnType = returnTypesByMethod.get(method); final Key<?> returnType = returnTypesByMethod.get(method);
@ -237,7 +226,7 @@ public final class FactoryProvider2<F> implements InvocationHandler, Provider<F>
} catch (ProvisionException e) { } catch (ProvisionException e) {
// if this is an exception declared by the factory method, throw it as-is // if this is an exception declared by the factory method, throw it as-is
if (e.getErrorMessages().size() == 1) { if (e.getErrorMessages().size() == 1) {
Message onlyError = Iterables.getOnlyElement(e.getErrorMessages()); Message onlyError = e.getErrorMessages().iterator().next();
Throwable cause = onlyError.getCause(); Throwable cause = onlyError.getCause();
if (cause != null && canRethrow(method, cause)) { if (cause != null && canRethrow(method, cause)) {
throw cause; throw cause;

View File

@ -22,8 +22,6 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.ParameterizedType; import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Models a method or constructor parameter. * Models a method or constructor parameter.
* *
@ -144,8 +142,9 @@ class Parameter {
Annotation bindingAnnotation = null; Annotation bindingAnnotation = null;
for (Annotation a : annotations) { for (Annotation a : annotations) {
if (a.annotationType().getAnnotation(BindingAnnotation.class) != null) { if (a.annotationType().getAnnotation(BindingAnnotation.class) != null) {
checkArgument(bindingAnnotation == null, if (bindingAnnotation != null) {
"Parameter has multiple binding annotations: %s and %s", bindingAnnotation, a); throw new IllegalArgumentException("Parameter has multiple binding annotations: " + bindingAnnotation + " and " + a);
}
bindingAnnotation = a; bindingAnnotation = a;
} }
} }

View File

@ -30,8 +30,6 @@ import java.util.Map;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Static methods for working with types that we aren't publishing in the * Static methods for working with types that we aren't publishing in the
* public {@code Types} API. * public {@code Types} API.
@ -152,8 +150,11 @@ public class MoreTypes {
// Neal isn't either but suspects some pathological case related // Neal isn't either but suspects some pathological case related
// to nested classes exists. // to nested classes exists.
Type rawType = parameterizedType.getRawType(); Type rawType = parameterizedType.getRawType();
checkArgument(rawType instanceof Class, if (!(rawType instanceof Class)) {
"Expected a Class, but <%s> is of type %s", type, type.getClass().getName()); throw new IllegalArgumentException(
"Expected a Class, but <" + type +"> is of type " + type.getClass().getName()
);
}
return (Class<?>) rawType; return (Class<?>) rawType;
} else if (type instanceof GenericArrayType) { } else if (type instanceof GenericArrayType) {
@ -445,10 +446,13 @@ public class MoreTypes {
// require an owner type if the raw type needs it // require an owner type if the raw type needs it
if (rawType instanceof Class<?>) { if (rawType instanceof Class<?>) {
Class rawTypeAsClass = (Class) rawType; Class rawTypeAsClass = (Class) rawType;
checkArgument(ownerType != null || rawTypeAsClass.getEnclosingClass() == null, if (ownerType == null && rawTypeAsClass.getEnclosingClass() != null) {
"No owner type for enclosed %s", rawType); throw new IllegalArgumentException("No owner type for enclosed " + rawType);
checkArgument(ownerType == null || rawTypeAsClass.getEnclosingClass() != null, }
"Owner type for unenclosed %s", rawType); if (ownerType != null && rawTypeAsClass.getEnclosingClass() == null) {
throw new IllegalArgumentException("Owner type for unenclosed " + rawType);
}
} }
this.ownerType = ownerType == null ? null : canonicalize(ownerType); this.ownerType = ownerType == null ? null : canonicalize(ownerType);
@ -561,13 +565,18 @@ public class MoreTypes {
private final Type lowerBound; private final Type lowerBound;
public WildcardTypeImpl(Type[] upperBounds, Type[] lowerBounds) { public WildcardTypeImpl(Type[] upperBounds, Type[] lowerBounds) {
checkArgument(lowerBounds.length <= 1, "Must have at most one lower bound."); if (lowerBounds.length > 1) {
checkArgument(upperBounds.length == 1, "Must have exactly one upper bound."); throw new IllegalArgumentException("Must have at most one lower bound.");
}
if (upperBounds.length != 1) {
throw new IllegalArgumentException("Must have exactly one upper bound.");
}
if (lowerBounds.length == 1) { if (lowerBounds.length == 1) {
Objects.requireNonNull(lowerBounds[0], "lowerBound"); Objects.requireNonNull(lowerBounds[0], "lowerBound");
checkNotPrimitive(lowerBounds[0], "wildcard bounds"); checkNotPrimitive(lowerBounds[0], "wildcard bounds");
checkArgument(upperBounds[0] == Object.class, "bounded both ways"); if (upperBounds[0] != Object.class) {
throw new IllegalArgumentException("bounded both ways");
}
this.lowerBound = canonicalize(lowerBounds[0]); this.lowerBound = canonicalize(lowerBounds[0]);
this.upperBound = Object.class; this.upperBound = Object.class;
@ -615,8 +624,9 @@ public class MoreTypes {
} }
private static void checkNotPrimitive(Type type, String use) { private static void checkNotPrimitive(Type type, String use) {
checkArgument(!(type instanceof Class<?>) || !((Class) type).isPrimitive(), if (type instanceof Class<?> && ((Class) type).isPrimitive()) {
"Primitive types are not allowed in %s: %s", use, type); throw new IllegalArgumentException("Primitive types are not allowed in " + use + ": " + type);
}
} }
/** /**

View File

@ -25,16 +25,7 @@ import org.elasticsearch.common.inject.spi.Element;
import org.elasticsearch.common.inject.spi.ElementVisitor; import org.elasticsearch.common.inject.spi.ElementVisitor;
import org.elasticsearch.common.inject.spi.PrivateElements; import org.elasticsearch.common.inject.spi.PrivateElements;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
/** /**
* @author jessewilson@google.com (Jesse Wilson) * @author jessewilson@google.com (Jesse Wilson)
@ -88,7 +79,9 @@ public final class PrivateElementsImpl implements PrivateElements {
} }
public void initInjector(Injector injector) { public void initInjector(Injector injector) {
checkState(this.injector == null, "injector already initialized"); if (this.injector != null) {
throw new IllegalStateException("injector already initialized");
}
this.injector = Objects.requireNonNull(injector, "injector"); this.injector = Objects.requireNonNull(injector, "injector");
} }
@ -137,7 +130,9 @@ public final class PrivateElementsImpl implements PrivateElements {
public Object getExposedSource(Key<?> key) { public Object getExposedSource(Key<?> key) {
getExposedKeys(); // ensure exposedKeysToSources is populated getExposedKeys(); // ensure exposedKeysToSources is populated
Object source = exposedKeysToSources.get(key); Object source = exposedKeysToSources.get(key);
checkArgument(source != null, "%s not exposed by %s.", key, this); if (source == null) {
throw new IllegalArgumentException(key + " not exposed by " + ".");
}
return source; return source;
} }

View File

@ -17,7 +17,7 @@
package org.elasticsearch.common.inject.internal; package org.elasticsearch.common.inject.internal;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables; import org.elasticsearch.common.util.iterable.Iterables;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@ -24,8 +24,6 @@ import java.lang.reflect.AnnotatedElement;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Matcher implementations. Supports matching classes and methods. * Matcher implementations. Supports matching classes and methods.
* *
@ -103,8 +101,9 @@ public class Matchers {
private static void checkForRuntimeRetention( private static void checkForRuntimeRetention(
Class<? extends Annotation> annotationType) { Class<? extends Annotation> annotationType) {
Retention retention = annotationType.getAnnotation(Retention.class); Retention retention = annotationType.getAnnotation(Retention.class);
checkArgument(retention != null && retention.value() == RetentionPolicy.RUNTIME, if (retention == null || retention.value() != RetentionPolicy.RUNTIME) {
"Annotation " + annotationType.getSimpleName() + " is missing RUNTIME retention"); throw new IllegalArgumentException("Annotation " + annotationType.getSimpleName() + " is missing RUNTIME retention");
}
} }
/** /**

View File

@ -16,43 +16,17 @@
package org.elasticsearch.common.inject.spi; package org.elasticsearch.common.inject.spi;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.*;
import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Binding;
import org.elasticsearch.common.inject.Key;
import org.elasticsearch.common.inject.MembersInjector;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.inject.PrivateBinder;
import org.elasticsearch.common.inject.PrivateModule;
import org.elasticsearch.common.inject.Provider;
import org.elasticsearch.common.inject.Scope;
import org.elasticsearch.common.inject.Stage;
import org.elasticsearch.common.inject.TypeLiteral;
import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedBindingBuilder;
import org.elasticsearch.common.inject.binder.AnnotatedConstantBindingBuilder; import org.elasticsearch.common.inject.binder.AnnotatedConstantBindingBuilder;
import org.elasticsearch.common.inject.binder.AnnotatedElementBuilder; import org.elasticsearch.common.inject.binder.AnnotatedElementBuilder;
import org.elasticsearch.common.inject.internal.AbstractBindingBuilder; import org.elasticsearch.common.inject.internal.*;
import org.elasticsearch.common.inject.internal.BindingBuilder;
import org.elasticsearch.common.inject.internal.ConstantBindingBuilderImpl;
import org.elasticsearch.common.inject.internal.Errors;
import org.elasticsearch.common.inject.internal.ExposureBuilder;
import org.elasticsearch.common.inject.internal.PrivateElementsImpl;
import org.elasticsearch.common.inject.internal.ProviderMethodsModule;
import org.elasticsearch.common.inject.internal.SourceProvider;
import org.elasticsearch.common.inject.matcher.Matcher; import org.elasticsearch.common.inject.matcher.Matcher;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
/** /**
* Exposes elements of a module so they can be inspected, validated or {@link * Exposes elements of a module so they can be inspected, validated or {@link
@ -156,7 +130,9 @@ public final class Elements {
*/ */
private RecordingBinder( private RecordingBinder(
RecordingBinder prototype, Object source, SourceProvider sourceProvider) { RecordingBinder prototype, Object source, SourceProvider sourceProvider) {
checkArgument(source == null ^ sourceProvider == null); if (!(source == null ^ sourceProvider == null)) {
throw new IllegalArgumentException();
}
this.stage = prototype.stage; this.stage = prototype.stage;
this.modules = prototype.modules; this.modules = prototype.modules;

View File

@ -22,8 +22,6 @@ import org.elasticsearch.common.inject.TypeLiteral;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkState;
/** /**
* A lookup of the members injector for a type. Lookups are created explicitly in a module using * A lookup of the members injector for a type. Lookups are created explicitly in a module using
* {@link org.elasticsearch.common.inject.Binder#getMembersInjector(Class) getMembersInjector()} statements: * {@link org.elasticsearch.common.inject.Binder#getMembersInjector(Class) getMembersInjector()} statements:
@ -68,7 +66,9 @@ public final class MembersInjectorLookup<T> implements Element {
* @throws IllegalStateException if the delegate is already set * @throws IllegalStateException if the delegate is already set
*/ */
public void initializeDelegate(MembersInjector<T> delegate) { public void initializeDelegate(MembersInjector<T> delegate) {
checkState(this.delegate == null, "delegate already initialized"); if (this.delegate != null) {
throw new IllegalStateException("delegate already initialized");
}
this.delegate = Objects.requireNonNull(delegate, "delegate"); this.delegate = Objects.requireNonNull(delegate, "delegate");
} }
@ -95,8 +95,9 @@ public final class MembersInjectorLookup<T> implements Element {
return new MembersInjector<T>() { return new MembersInjector<T>() {
@Override @Override
public void injectMembers(T instance) { public void injectMembers(T instance) {
checkState(delegate != null, if (delegate == null) {
"This MembersInjector cannot be used until the Injector has been created."); throw new IllegalStateException("This MembersInjector cannot be used until the Injector has been created.");
}
delegate.injectMembers(instance); delegate.injectMembers(instance);
} }

View File

@ -22,8 +22,6 @@ import org.elasticsearch.common.inject.Provider;
import java.util.Objects; import java.util.Objects;
import static com.google.common.base.Preconditions.checkState;
/** /**
* A lookup of the provider for a type. Lookups are created explicitly in a module using * A lookup of the provider for a type. Lookups are created explicitly in a module using
* {@link org.elasticsearch.common.inject.Binder#getProvider(Class) getProvider()} statements: * {@link org.elasticsearch.common.inject.Binder#getProvider(Class) getProvider()} statements:
@ -46,8 +44,9 @@ public final class ProviderLookup<T> implements Element {
@Override @Override
public T get() { public T get() {
checkState(lookup.delegate != null, if (lookup.delegate == null) {
"This Provider cannot be used until the Injector has been created."); throw new IllegalStateException( "This Provider cannot be used until the Injector has been created.");
}
return lookup.delegate.get(); return lookup.delegate.get();
} }
@ -89,7 +88,9 @@ public final class ProviderLookup<T> implements Element {
* @throws IllegalStateException if the delegate is already set * @throws IllegalStateException if the delegate is already set
*/ */
public void initializeDelegate(Provider<T> delegate) { public void initializeDelegate(Provider<T> delegate) {
checkState(this.delegate == null, "delegate already initialized"); if (this.delegate != null) {
throw new IllegalStateException("delegate already initialized");
}
this.delegate = Objects.requireNonNull(delegate, "delegate"); this.delegate = Objects.requireNonNull(delegate, "delegate");
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.lucene; package org.elasticsearch.common.lucene;
import com.google.common.collect.Iterables;
import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
@ -27,7 +26,10 @@ import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.store.*; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.Lock;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
@ -40,6 +42,7 @@ import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
@ -107,7 +110,7 @@ public class Lucene {
for (SegmentCommitInfo info : infos) { for (SegmentCommitInfo info : infos) {
list.add(info.files()); list.add(info.files());
} }
return Iterables.concat(list); return Iterables.flatten(list);
} }
/** /**

View File

@ -19,19 +19,11 @@
package org.elasticsearch.common.lucene; package org.elasticsearch.common.lucene;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReader.CoreClosedListener;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.index.shard.ShardUtils;
import java.io.IOException; import java.util.*;
import java.util.IdentityHashMap;
import java.util.Map;
import java.util.Set;
/** /**
* A map between segment core cache keys and the shard that these segments * A map between segment core cache keys and the shard that these segments
@ -47,16 +39,16 @@ import java.util.Set;
public final class ShardCoreKeyMap { public final class ShardCoreKeyMap {
private final Map<Object, ShardId> coreKeyToShard; private final Map<Object, ShardId> coreKeyToShard;
private final Multimap<String, Object> indexToCoreKey; private final Map<String, Set<Object>> indexToCoreKey;
public ShardCoreKeyMap() { public ShardCoreKeyMap() {
coreKeyToShard = new IdentityHashMap<>(); coreKeyToShard = new IdentityHashMap<>();
indexToCoreKey = HashMultimap.create(); indexToCoreKey = new HashMap<>();
} }
/** /**
* Register a {@link LeafReader}. This is necessary so that the core cache * Register a {@link LeafReader}. This is necessary so that the core cache
* key of this reader can be found later using {@link #getCoreCacheKeys(ShardId)}. * key of this reader can be found later using {@link #getCoreKeysForIndex(String)}.
*/ */
public void add(LeafReader reader) { public void add(LeafReader reader) {
final ShardId shardId = ShardUtils.extractShardId(reader); final ShardId shardId = ShardUtils.extractShardId(reader);
@ -67,15 +59,22 @@ public final class ShardCoreKeyMap {
final String index = shardId.getIndex(); final String index = shardId.getIndex();
synchronized (this) { synchronized (this) {
if (coreKeyToShard.put(coreKey, shardId) == null) { if (coreKeyToShard.put(coreKey, shardId) == null) {
final boolean added = indexToCoreKey.put(index, coreKey); Set<Object> objects = indexToCoreKey.get(index);
if (objects == null) {
objects = new HashSet<>();
indexToCoreKey.put(index, objects);
}
final boolean added = objects.add(coreKey);
assert added; assert added;
reader.addCoreClosedListener(new CoreClosedListener() { reader.addCoreClosedListener(ownerCoreCacheKey -> {
@Override
public void onClose(Object ownerCoreCacheKey) throws IOException {
assert coreKey == ownerCoreCacheKey; assert coreKey == ownerCoreCacheKey;
synchronized (ShardCoreKeyMap.this) { synchronized (ShardCoreKeyMap.this) {
coreKeyToShard.remove(ownerCoreCacheKey); coreKeyToShard.remove(ownerCoreCacheKey);
indexToCoreKey.remove(index, coreKey); final Set<Object> coreKeys = indexToCoreKey.get(index);
final boolean removed = coreKeys.remove(coreKey);
assert removed;
if (coreKeys.isEmpty()) {
indexToCoreKey.remove(index);
} }
} }
}); });
@ -95,15 +94,35 @@ public final class ShardCoreKeyMap {
* Get the set of core cache keys associated with the given index. * Get the set of core cache keys associated with the given index.
*/ */
public synchronized Set<Object> getCoreKeysForIndex(String index) { public synchronized Set<Object> getCoreKeysForIndex(String index) {
return ImmutableSet.copyOf(indexToCoreKey.get(index)); final Set<Object> objects = indexToCoreKey.get(index);
if (objects == null) {
return Collections.emptySet();
}
// we have to copy otherwise we risk ConcurrentModificationException
return Collections.unmodifiableSet(new HashSet<>(objects));
} }
/** /**
* Return the number of tracked segments. * Return the number of tracked segments.
*/ */
public synchronized int size() { public synchronized int size() {
assert indexToCoreKey.size() == coreKeyToShard.size(); assert assertSize();
return coreKeyToShard.size(); return coreKeyToShard.size();
} }
private synchronized boolean assertSize() {
// this is heavy and should only used in assertions
boolean assertionsEnabled = false;
assert assertionsEnabled = true;
if (assertionsEnabled == false) {
throw new AssertionError("only run this if assertions are enabled");
}
Collection<Set<Object>> values = indexToCoreKey.values();
int size = 0;
for (Set<Object> value : values) {
size += value.size();
}
return size == coreKeyToShard.size();
}
} }

View File

@ -18,8 +18,6 @@
*/ */
package org.elasticsearch.common.unit; package org.elasticsearch.common.unit;
import com.google.common.base.Preconditions;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -54,7 +52,9 @@ public final class Fuzziness implements ToXContent, Writeable<Fuzziness> {
static final Fuzziness PROTOTYPE = AUTO; static final Fuzziness PROTOTYPE = AUTO;
private Fuzziness(int fuzziness) { private Fuzziness(int fuzziness) {
Preconditions.checkArgument(fuzziness >= 0 && fuzziness <= 2, "Valid edit distances are [0, 1, 2] but was [" + fuzziness + "]"); if (fuzziness != 0 && fuzziness != 1 && fuzziness != 2) {
throw new IllegalArgumentException("Valid edit distances are [0, 1, 2] but was [" + fuzziness + "]");
}
this.fuzziness = Integer.toString(fuzziness); this.fuzziness = Integer.toString(fuzziness);
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.unit; package org.elasticsearch.common.unit;
import com.google.common.base.Preconditions;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
@ -46,7 +45,9 @@ public class SizeValue implements Streamable {
} }
public SizeValue(long size, SizeUnit sizeUnit) { public SizeValue(long size, SizeUnit sizeUnit) {
Preconditions.checkArgument(size >= 0, "size in SizeValue may not be negative"); if (size < 0) {
throw new IllegalArgumentException("size in SizeValue may not be negative");
}
this.size = size; this.size = size;
this.sizeUnit = sizeUnit; this.sizeUnit = sizeUnit;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cache.recycler.PageCacheRecycler;
@ -42,8 +41,12 @@ abstract class AbstractBigArray extends AbstractArray {
protected AbstractBigArray(int pageSize, BigArrays bigArrays, boolean clearOnResize) { protected AbstractBigArray(int pageSize, BigArrays bigArrays, boolean clearOnResize) {
super(bigArrays, clearOnResize); super(bigArrays, clearOnResize);
this.recycler = bigArrays.recycler; this.recycler = bigArrays.recycler;
Preconditions.checkArgument(pageSize >= 128, "pageSize must be >= 128"); if (pageSize < 128) {
Preconditions.checkArgument((pageSize & (pageSize - 1)) == 0, "pageSize must be a power of two"); throw new IllegalArgumentException("pageSize must be >= 128");
}
if ((pageSize & (pageSize - 1)) != 0) {
throw new IllegalArgumentException("pageSize must be a power of two");
}
this.pageShift = Integer.numberOfTrailingZeros(pageSize); this.pageShift = Integer.numberOfTrailingZeros(pageSize);
this.pageMask = pageSize - 1; this.pageMask = pageSize - 1;
size = 0; size = 0;
@ -56,7 +59,9 @@ abstract class AbstractBigArray extends AbstractArray {
final int numPages(long capacity) { final int numPages(long capacity) {
final long numPages = (capacity + pageMask) >>> pageShift; final long numPages = (capacity + pageMask) >>> pageShift;
Preconditions.checkArgument(numPages <= Integer.MAX_VALUE, "pageSize=" + (pageMask + 1) + " is too small for such as capacity: " + capacity); if (numPages > Integer.MAX_VALUE) {
throw new IllegalArgumentException("pageSize=" + (pageMask + 1) + " is too small for such as capacity: " + capacity);
}
return (int) numPages; return (int) numPages;
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.carrotsearch.hppc.BitMixer; import com.carrotsearch.hppc.BitMixer;
import com.google.common.base.Preconditions;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
/** /**
@ -48,8 +47,12 @@ abstract class AbstractPagedHashMap implements Releasable {
long mask; long mask;
AbstractPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) { AbstractPagedHashMap(long capacity, float maxLoadFactor, BigArrays bigArrays) {
Preconditions.checkArgument(capacity >= 0, "capacity must be >= 0"); if (capacity < 0) {
Preconditions.checkArgument(maxLoadFactor > 0 && maxLoadFactor < 1, "maxLoadFactor must be > 0 and < 1"); throw new IllegalArgumentException("capacity must be >= 0");
}
if (maxLoadFactor <= 0 || maxLoadFactor >= 1) {
throw new IllegalArgumentException("maxLoadFactor must be > 0 and < 1");
}
this.bigArrays = bigArrays; this.bigArrays = bigArrays;
this.maxLoadFactor = maxLoadFactor; this.maxLoadFactor = maxLoadFactor;
long buckets = 1L + (long) (capacity / maxLoadFactor); long buckets = 1L + (long) (capacity / maxLoadFactor);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import java.lang.reflect.Array;
import java.util.Arrays; import java.util.Arrays;
/** /**
@ -69,4 +70,22 @@ public class ArrayUtils {
} }
return index; return index;
} }
/**
* Concatenates 2 arrays
*/
public static String[] concat(String[] one, String[] other) {
return concat(one, other, String.class);
}
/**
* Concatenates 2 arrays
*/
public static <T> T[] concat(T[] one, T[] other, Class<T> clazz) {
T[] target = (T[]) Array.newInstance(clazz, one.length + other.length);
System.arraycopy(one, 0, target, 0, one.length);
System.arraycopy(other, 0, target, one.length, other.length);
return target;
}
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -55,9 +54,15 @@ public class BigArrays {
/** Return the next size to grow to that is &gt;= <code>minTargetSize</code>. /** Return the next size to grow to that is &gt;= <code>minTargetSize</code>.
* Inspired from {@link ArrayUtil#oversize(int, int)} and adapted to play nicely with paging. */ * Inspired from {@link ArrayUtil#oversize(int, int)} and adapted to play nicely with paging. */
public static long overSize(long minTargetSize, int pageSize, int bytesPerElement) { public static long overSize(long minTargetSize, int pageSize, int bytesPerElement) {
Preconditions.checkArgument(minTargetSize >= 0, "minTargetSize must be >= 0"); if (minTargetSize < 0) {
Preconditions.checkArgument(pageSize >= 0, "pageSize must be > 0"); throw new IllegalArgumentException("minTargetSize must be >= 0");
Preconditions.checkArgument(bytesPerElement > 0, "bytesPerElement must be > 0"); }
if (pageSize < 0) {
throw new IllegalArgumentException("pageSize must be > 0");
}
if (bytesPerElement <= 0) {
throw new IllegalArgumentException("bytesPerElement must be > 0");
}
long newSize; long newSize;
if (minTargetSize < pageSize) { if (minTargetSize < pageSize) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -110,7 +109,9 @@ final class BigByteArray extends AbstractBigArray implements ByteArray {
@Override @Override
public void fill(long fromIndex, long toIndex, byte value) { public void fill(long fromIndex, long toIndex, byte value) {
Preconditions.checkArgument(fromIndex <= toIndex); if (fromIndex > toIndex) {
throw new IllegalArgumentException();
}
final int fromPage = pageIndex(fromIndex); final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1); final int toPage = pageIndex(toIndex - 1);
if (fromPage == toPage) { if (fromPage == toPage) {

View File

@ -19,8 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -95,7 +93,9 @@ final class BigDoubleArray extends AbstractBigArray implements DoubleArray {
@Override @Override
public void fill(long fromIndex, long toIndex, double value) { public void fill(long fromIndex, long toIndex, double value) {
Preconditions.checkArgument(fromIndex <= toIndex); if (fromIndex > toIndex) {
throw new IllegalArgumentException();
}
final long longBits = Double.doubleToRawLongBits(value); final long longBits = Double.doubleToRawLongBits(value);
final int fromPage = pageIndex(fromIndex); final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1); final int toPage = pageIndex(toIndex - 1);

View File

@ -19,8 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -95,7 +93,9 @@ final class BigFloatArray extends AbstractBigArray implements FloatArray {
@Override @Override
public void fill(long fromIndex, long toIndex, float value) { public void fill(long fromIndex, long toIndex, float value) {
Preconditions.checkArgument(fromIndex <= toIndex); if (fromIndex > toIndex) {
throw new IllegalArgumentException();
}
final int intBits = Float.floatToRawIntBits(value); final int intBits = Float.floatToRawIntBits(value);
final int fromPage = pageIndex(fromIndex); final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1); final int toPage = pageIndex(toIndex - 1);

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -71,7 +70,9 @@ final class BigIntArray extends AbstractBigArray implements IntArray {
@Override @Override
public void fill(long fromIndex, long toIndex, int value) { public void fill(long fromIndex, long toIndex, int value) {
Preconditions.checkArgument(fromIndex <= toIndex); if (fromIndex > toIndex) {
throw new IllegalArgumentException();
}
final int fromPage = pageIndex(fromIndex); final int fromPage = pageIndex(fromIndex);
final int toPage = pageIndex(toIndex - 1); final int toPage = pageIndex(toIndex - 1);
if (fromPage == toPage) { if (fromPage == toPage) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.base.Preconditions;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
@ -93,7 +92,9 @@ final class BigLongArray extends AbstractBigArray implements LongArray {
@Override @Override
public void fill(long fromIndex, long toIndex, long value) { public void fill(long fromIndex, long toIndex, long value) {
Preconditions.checkArgument(fromIndex <= toIndex); if (fromIndex > toIndex) {
throw new IllegalArgumentException();
}
if (fromIndex == toIndex) { if (fromIndex == toIndex) {
return; // empty range return; // empty range
} }

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.math.LongMath;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
@ -33,7 +32,6 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.SizeValue;
import java.io.IOException; import java.io.IOException;
import java.math.RoundingMode;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
@ -321,7 +319,13 @@ public class BloomFilter {
long bitCount; long bitCount;
BitArray(long bits) { BitArray(long bits) {
this(new long[Ints.checkedCast(LongMath.divide(bits, 64, RoundingMode.CEILING))]); this(new long[size(bits)]);
}
private static int size(long bits) {
long quotient = bits / 64;
long remainder = bits - quotient * 64;
return Ints.checkedCast(remainder == 0 ? quotient : 1 + quotient);
} }
// Used by serialization // Used by serialization

View File

@ -23,24 +23,10 @@ import com.carrotsearch.hppc.DoubleArrayList;
import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.LongArrayList; import com.carrotsearch.hppc.LongArrayList;
import com.carrotsearch.hppc.ObjectArrayList; import com.carrotsearch.hppc.ObjectArrayList;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.*;
import org.apache.lucene.util.BytesRefArray;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.IntroSorter;
import java.util.AbstractList; import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.RandomAccess;
/** Collections-related utility methods. */ /** Collections-related utility methods. */
public enum CollectionUtils { public enum CollectionUtils {
@ -297,8 +283,12 @@ public enum CollectionUtils {
private final int distance; private final int distance;
public RotatedList(List<T> list, int distance) { public RotatedList(List<T> list, int distance) {
Preconditions.checkArgument(distance >= 0 && distance < list.size()); if (distance < 0 || distance >= list.size()) {
Preconditions.checkArgument(list instanceof RandomAccess); throw new IllegalArgumentException();
}
if (!(list instanceof RandomAccess)) {
throw new IllegalArgumentException();
}
this.in = list; this.in = list;
this.distance = distance; this.distance = distance;
} }
@ -462,4 +452,5 @@ public enum CollectionUtils {
return result; return result;
} }
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import com.google.common.collect.UnmodifiableIterator;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
@ -125,7 +124,7 @@ public class LongObjectPagedHashMap<T> extends AbstractPagedHashMap implements I
@Override @Override
public Iterator<Cursor<T>> iterator() { public Iterator<Cursor<T>> iterator() {
return new UnmodifiableIterator<Cursor<T>>() { return new Iterator<Cursor<T>>() {
boolean cached; boolean cached;
final Cursor<T> cursor; final Cursor<T> cursor;
@ -162,6 +161,11 @@ public class LongObjectPagedHashMap<T> extends AbstractPagedHashMap implements I
return cursor; return cursor;
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }

View File

@ -19,11 +19,12 @@
package org.elasticsearch.common.util.concurrent; package org.elasticsearch.common.util.concurrent;
import com.google.common.base.Joiner;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import java.util.Arrays;
import java.util.concurrent.*; import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
/** /**
* *
@ -81,7 +82,12 @@ public class EsExecutors {
} }
public static String threadName(Settings settings, String ... names) { public static String threadName(Settings settings, String ... names) {
return threadName(settings, "[" + Joiner.on(".").skipNulls().join(names) + "]"); String namePrefix =
Arrays
.stream(names)
.filter(name -> name != null)
.collect(Collectors.joining(".", "[", "]"));
return threadName(settings, namePrefix);
} }
public static String threadName(Settings settings, String namePrefix) { public static String threadName(Settings settings, String namePrefix) {

View File

@ -0,0 +1,140 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util.iterable;
import java.util.*;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class Iterables {
public Iterables() {
}
public static <T> Iterable<T> concat(Iterable<T>... inputs) {
Objects.requireNonNull(inputs);
return new ConcatenatedIterable(inputs);
}
static class ConcatenatedIterable<T> implements Iterable<T> {
private final Iterable<T>[] inputs;
ConcatenatedIterable(Iterable<T>[] inputs) {
this.inputs = Arrays.copyOf(inputs, inputs.length);
}
@Override
public Iterator<T> iterator() {
return Stream
.of(inputs)
.map(it -> StreamSupport.stream(it.spliterator(), false))
.reduce(Stream::concat)
.orElseGet(Stream::empty).iterator();
}
}
public static <T> Iterable<T> flatten(Iterable<? extends Iterable<T>> inputs) {
Objects.requireNonNull(inputs);
return new FlattenedIterables<>(inputs);
}
static class FlattenedIterables<T> implements Iterable<T> {
private final Iterable<? extends Iterable<T>> inputs;
FlattenedIterables(Iterable<? extends Iterable<T>> inputs) {
List<Iterable<T>> list = new ArrayList<>();
for (Iterable<T> iterable : inputs) {
list.add(iterable);
}
this.inputs = list;
}
@Override
public Iterator<T> iterator() {
return StreamSupport
.stream(inputs.spliterator(), false)
.flatMap(s -> StreamSupport.stream(s.spliterator(), false)).iterator();
}
}
public static boolean allElementsAreEqual(Iterable<?> left, Iterable<?> right) {
Objects.requireNonNull(left);
Objects.requireNonNull(right);
if (left instanceof Collection && right instanceof Collection) {
Collection collection1 = (Collection) left;
Collection collection2 = (Collection) right;
if (collection1.size() != collection2.size()) {
return false;
}
}
Iterator<?> leftIt = left.iterator();
Iterator<?> rightIt = right.iterator();
while (true) {
if (leftIt.hasNext()) {
if (!rightIt.hasNext()) {
return false;
}
Object o1 = leftIt.next();
Object o2 = rightIt.next();
if (Objects.equals(o1, o2)) {
continue;
}
return false;
}
return !rightIt.hasNext();
}
}
public static <T> T getFirst(Iterable<T> collection, T defaultValue) {
Objects.requireNonNull(collection);
Iterator<T> iterator = collection.iterator();
return iterator.hasNext() ? iterator.next() : defaultValue;
}
public static <T> T get(Iterable<T> iterable, int position) {
Objects.requireNonNull(iterable);
if (position < 0) {
throw new IllegalArgumentException("position >= 0");
}
if (iterable instanceof List) {
List<T> list = (List<T>)iterable;
if (position >= list.size()) {
throw new IndexOutOfBoundsException(Integer.toString(position));
}
return list.get(position);
} else {
Iterator<T> it = iterable.iterator();
for (int index = 0; index < position; index++) {
if (!it.hasNext()) {
throw new IndexOutOfBoundsException(Integer.toString(position));
}
it.next();
}
if (!it.hasNext()) {
throw new IndexOutOfBoundsException(Integer.toString(position));
}
return it.next();
}
}
}

View File

@ -18,28 +18,18 @@
*/ */
package org.elasticsearch.gateway; package org.elasticsearch.gateway;
import com.google.common.base.Preconditions;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.*;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.OutputStreamIndexOutput;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.*;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
@ -99,8 +89,12 @@ public abstract class MetaDataStateFormat<T> {
* @throws IOException if an IOException occurs * @throws IOException if an IOException occurs
*/ */
public final void write(final T state, final long version, final Path... locations) throws IOException { public final void write(final T state, final long version, final Path... locations) throws IOException {
Preconditions.checkArgument(locations != null, "Locations must not be null"); if (locations == null) {
Preconditions.checkArgument(locations.length > 0, "One or more locations required"); throw new IllegalArgumentException("Locations must not be null");
}
if (locations.length <= 0) {
throw new IllegalArgumentException("One or more locations required");
}
final long maxStateId = findMaxStateId(prefix, locations)+1; final long maxStateId = findMaxStateId(prefix, locations)+1;
assert maxStateId >= 0 : "maxStateId must be positive but was: [" + maxStateId + "]"; assert maxStateId >= 0 : "maxStateId must be positive but was: [" + maxStateId + "]";
final String fileName = prefix + maxStateId + STATE_FILE_EXTENSION; final String fileName = prefix + maxStateId + STATE_FILE_EXTENSION;

View File

@ -20,16 +20,17 @@
package org.elasticsearch.http; package org.elasticsearch.http;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.node.service.NodeService; import org.elasticsearch.node.service.NodeService;
import org.elasticsearch.rest.*; import org.elasticsearch.rest.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.file.*; import java.nio.file.*;
@ -141,8 +142,9 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
if (request.method() == RestRequest.Method.GET) { if (request.method() == RestRequest.Method.GET) {
try { try {
try (InputStream stream = getClass().getResourceAsStream("/config/favicon.ico")) { try (InputStream stream = getClass().getResourceAsStream("/config/favicon.ico")) {
byte[] content = ByteStreams.toByteArray(stream); ByteArrayOutputStream out = new ByteArrayOutputStream();
BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, "image/x-icon", content); Streams.copy(stream, out);
BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, "image/x-icon", out.toByteArray());
channel.sendResponse(restResponse); channel.sendResponse(restResponse);
} }
} catch (IOException e) { } catch (IOException e) {

View File

@ -93,12 +93,6 @@ public final class EngineConfig {
*/ */
public static final String INDEX_GC_DELETES_SETTING = "index.gc_deletes"; public static final String INDEX_GC_DELETES_SETTING = "index.gc_deletes";
/**
* Index setting to control the initial index buffer size.
* This setting is <b>not</b> realtime updateable.
*/
public static final String INDEX_BUFFER_SIZE_SETTING = "index.buffer_size";
/** /**
* Index setting to change the low level lucene codec used for writing new segments. * Index setting to change the low level lucene codec used for writing new segments.
* This setting is <b>not</b> realtime updateable. * This setting is <b>not</b> realtime updateable.
@ -152,7 +146,7 @@ public final class EngineConfig {
this.optimizeAutoGenerateId = indexSettings.getAsBoolean(EngineConfig.INDEX_OPTIMIZE_AUTOGENERATED_ID_SETTING, false); this.optimizeAutoGenerateId = indexSettings.getAsBoolean(EngineConfig.INDEX_OPTIMIZE_AUTOGENERATED_ID_SETTING, false);
this.compoundOnFlush = indexSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush); this.compoundOnFlush = indexSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, compoundOnFlush);
codecName = indexSettings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME); codecName = indexSettings.get(EngineConfig.INDEX_CODEC_SETTING, EngineConfig.DEFAULT_CODEC_NAME);
indexingBufferSize = indexSettings.getAsBytesSize(INDEX_BUFFER_SIZE_SETTING, DEFAULT_INDEX_BUFFER_SIZE); indexingBufferSize = DEFAULT_INDEX_BUFFER_SIZE;
gcDeletesInMillis = indexSettings.getAsTime(INDEX_GC_DELETES_SETTING, EngineConfig.DEFAULT_GC_DELETES).millis(); gcDeletesInMillis = indexSettings.getAsTime(INDEX_GC_DELETES_SETTING, EngineConfig.DEFAULT_GC_DELETES).millis();
versionMapSizeSetting = indexSettings.get(INDEX_VERSION_MAP_SIZE, DEFAULT_VERSION_MAP_SIZE); versionMapSizeSetting = indexSettings.get(INDEX_VERSION_MAP_SIZE, DEFAULT_VERSION_MAP_SIZE);
updateVersionMapSize(); updateVersionMapSize();
@ -167,7 +161,7 @@ public final class EngineConfig {
private void updateVersionMapSize() { private void updateVersionMapSize() {
if (versionMapSizeSetting.endsWith("%")) { if (versionMapSizeSetting.endsWith("%")) {
double percent = Double.parseDouble(versionMapSizeSetting.substring(0, versionMapSizeSetting.length() - 1)); double percent = Double.parseDouble(versionMapSizeSetting.substring(0, versionMapSizeSetting.length() - 1));
versionMapSize = new ByteSizeValue((long) (((double) indexingBufferSize.bytes() * (percent / 100)))); versionMapSize = new ByteSizeValue((long) ((double) indexingBufferSize.bytes() * (percent / 100)));
} else { } else {
versionMapSize = ByteSizeValue.parseBytesSizeValue(versionMapSizeSetting, INDEX_VERSION_MAP_SIZE); versionMapSize = ByteSizeValue.parseBytesSizeValue(versionMapSizeSetting, INDEX_VERSION_MAP_SIZE);
} }

View File

@ -666,7 +666,6 @@ public class InternalEngine extends Engine {
// since it flushes the index as well (though, in terms of concurrency, we are allowed to do it) // since it flushes the index as well (though, in terms of concurrency, we are allowed to do it)
try (ReleasableLock lock = readLock.acquire()) { try (ReleasableLock lock = readLock.acquire()) {
ensureOpen(); ensureOpen();
updateIndexWriterSettings();
searcherManager.maybeRefreshBlocking(); searcherManager.maybeRefreshBlocking();
} catch (AlreadyClosedException e) { } catch (AlreadyClosedException e) {
ensureOpen(); ensureOpen();
@ -736,7 +735,6 @@ public class InternalEngine extends Engine {
*/ */
try (ReleasableLock lock = readLock.acquire()) { try (ReleasableLock lock = readLock.acquire()) {
ensureOpen(); ensureOpen();
updateIndexWriterSettings();
if (flushLock.tryLock() == false) { if (flushLock.tryLock() == false) {
// if we can't get the lock right away we block if needed otherwise barf // if we can't get the lock right away we block if needed otherwise barf
if (waitIfOngoing) { if (waitIfOngoing) {
@ -954,7 +952,6 @@ public class InternalEngine extends Engine {
} }
} }
/** /**
* Closes the engine without acquiring the write lock. This should only be * Closes the engine without acquiring the write lock. This should only be
* called while the write lock is hold or in a disaster condition ie. if the engine * called while the write lock is hold or in a disaster condition ie. if the engine
@ -1168,8 +1165,6 @@ public class InternalEngine extends Engine {
return indexWriter.getConfig(); return indexWriter.getConfig();
} }
private final class EngineMergeScheduler extends ElasticsearchConcurrentMergeScheduler { private final class EngineMergeScheduler extends ElasticsearchConcurrentMergeScheduler {
private final AtomicInteger numMergesInFlight = new AtomicInteger(0); private final AtomicInteger numMergesInFlight = new AtomicInteger(0);
private final AtomicBoolean isThrottling = new AtomicBoolean(); private final AtomicBoolean isThrottling = new AtomicBoolean();
@ -1245,11 +1240,14 @@ public class InternalEngine extends Engine {
public void onSettingsChanged() { public void onSettingsChanged() {
mergeScheduler.refreshConfig(); mergeScheduler.refreshConfig();
updateIndexWriterSettings();
// config().getVersionMapSize() may have changed:
checkVersionMapRefresh();
// config().isEnableGcDeletes() or config.getGcDeletesInMillis() may have changed:
maybePruneDeletedTombstones();
} }
public MergeStats getMergeStats() { public MergeStats getMergeStats() {
return mergeScheduler.stats(); return mergeScheduler.stats();
} }
} }

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import com.google.common.base.Preconditions;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
@ -52,7 +50,9 @@ public class BinaryDVNumericIndexFieldData extends DocValuesIndexFieldData imple
public BinaryDVNumericIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) { public BinaryDVNumericIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType); super(index, fieldNames, fieldDataType);
Preconditions.checkArgument(numericType != null, "numericType must be non-null"); if (numericType == null) {
throw new IllegalArgumentException("numericType must be non-null");
}
this.numericType = numericType; this.numericType = numericType;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import com.google.common.base.Preconditions;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.util.*; import org.apache.lucene.util.*;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
@ -73,7 +72,9 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
CircuitBreakerService breakerService) { CircuitBreakerService breakerService) {
super(index, indexSettings, fieldNames, fieldDataType, cache); super(index, indexSettings, fieldNames, fieldDataType, cache);
Objects.requireNonNull(numericType); Objects.requireNonNull(numericType);
Preconditions.checkArgument(EnumSet.of(NumericType.BOOLEAN, NumericType.BYTE, NumericType.SHORT, NumericType.INT, NumericType.LONG).contains(numericType), getClass().getSimpleName() + " only supports integer types, not " + numericType); if (!EnumSet.of(NumericType.BOOLEAN, NumericType.BYTE, NumericType.SHORT, NumericType.INT, NumericType.LONG).contains(numericType)) {
throw new IllegalArgumentException(getClass().getSimpleName() + " only supports integer types, not " + numericType);
}
this.numericType = numericType; this.numericType = numericType;
this.breakerService = breakerService; this.breakerService = breakerService;
} }

View File

@ -19,24 +19,12 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import com.google.common.base.Preconditions; import org.apache.lucene.index.*;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource; import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
@ -56,7 +44,9 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
public SortedNumericDVIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) { public SortedNumericDVIndexFieldData(Index index, Names fieldNames, NumericType numericType, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType); super(index, fieldNames, fieldDataType);
Preconditions.checkArgument(numericType != null, "numericType must be non-null"); if (numericType == null) {
throw new IllegalArgumentException("numericType must be non-null");
}
this.numericType = numericType; this.numericType = numericType;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper.internal; package org.elasticsearch.index.mapper.internal;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
@ -240,7 +239,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
return new Iterable<String>() { return new Iterable<String>() {
@Override @Override
public Iterator<String> iterator() { public Iterator<String> iterator() {
return new UnmodifiableIterator<String>() { return new Iterator<String>() {
int endIndex = nextEndIndex(0); int endIndex = nextEndIndex(0);
@ -263,6 +262,11 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
return result; return result;
} }
@Override
public final void remove() {
throw new UnsupportedOperationException();
}
}; };
} }
}; };

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper.internal; package org.elasticsearch.index.mapper.internal;
import com.google.common.collect.Iterables;
import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
@ -38,6 +37,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.fielddata.FieldDataType;

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper.object; package org.elasticsearch.index.mapper.object;
import com.google.common.collect.Iterables;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
@ -33,30 +32,13 @@ import org.elasticsearch.common.collect.CopyOnWriteHashMap;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.settings.IndexSettings; import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
import static org.elasticsearch.index.mapper.MapperBuilders.object; import static org.elasticsearch.index.mapper.MapperBuilders.object;
@ -583,7 +565,7 @@ public class ObjectMapper extends Mapper implements AllFieldMapper.IncludeInAll,
doXContent(builder, params); doXContent(builder, params);
// sort the mappers so we get consistent serialization format // sort the mappers so we get consistent serialization format
Mapper[] sortedMappers = Iterables.toArray(mappers.values(), Mapper.class); Mapper[] sortedMappers = mappers.values().stream().toArray(size -> new Mapper[size]);
Arrays.sort(sortedMappers, new Comparator<Mapper>() { Arrays.sort(sortedMappers, new Comparator<Mapper>() {
@Override @Override
public int compare(Mapper o1, Mapper o2) { public int compare(Mapper o1, Mapper o2) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query.functionscore.random; package org.elasticsearch.index.query.functionscore.random;
import com.google.common.primitives.Longs;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
@ -66,7 +65,7 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
if (parser.numberType() == XContentParser.NumberType.INT) { if (parser.numberType() == XContentParser.NumberType.INT) {
seed = parser.intValue(); seed = parser.intValue();
} else if (parser.numberType() == XContentParser.NumberType.LONG) { } else if (parser.numberType() == XContentParser.NumberType.LONG) {
seed = Longs.hashCode(parser.longValue()); seed = hash(parser.longValue());
} else { } else {
throw new QueryParsingException(parseContext, "random_score seed must be an int, long or string, not '" throw new QueryParsingException(parseContext, "random_score seed must be an int, long or string, not '"
+ token.toString() + "'"); + token.toString() + "'");
@ -90,7 +89,7 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
} }
if (seed == -1) { if (seed == -1) {
seed = Longs.hashCode(context.nowInMillis()); seed = hash(context.nowInMillis());
} }
final ShardId shardId = SearchContext.current().indexShard().shardId(); final ShardId shardId = SearchContext.current().indexShard().shardId();
final int salt = (shardId.index().name().hashCode() << 10) | shardId.id(); final int salt = (shardId.index().name().hashCode() << 10) | shardId.id();
@ -98,4 +97,8 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
return new RandomScoreFunction(seed, salt, uidFieldData); return new RandomScoreFunction(seed, salt, uidFieldData);
} }
private static final int hash(long value) {
return (int) (value ^ (value >>> 32));
}
} }

View File

@ -116,9 +116,6 @@ import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
/**
*
*/
public class IndexShard extends AbstractIndexShardComponent { public class IndexShard extends AbstractIndexShardComponent {
private final ThreadPool threadPool; private final ThreadPool threadPool;
@ -986,15 +983,27 @@ public class IndexShard extends AbstractIndexShardComponent {
} }
public void updateBufferSize(ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) { public void updateBufferSize(ByteSizeValue shardIndexingBufferSize, ByteSizeValue shardTranslogBufferSize) {
final EngineConfig config = engineConfig; final EngineConfig config = engineConfig;
final ByteSizeValue preValue = config.getIndexingBufferSize(); final ByteSizeValue preValue = config.getIndexingBufferSize();
config.setIndexingBufferSize(shardIndexingBufferSize); config.setIndexingBufferSize(shardIndexingBufferSize);
Engine engine = engineUnsafe();
if (engine == null) {
logger.debug("updateBufferSize: engine is closed; skipping");
return;
}
// update engine if it is already started. // update engine if it is already started.
if (preValue.bytes() != shardIndexingBufferSize.bytes() && engineUnsafe() != null) { if (preValue.bytes() != shardIndexingBufferSize.bytes()) {
// its inactive, make sure we do a refresh / full IW flush in this case, since the memory // so we push changes these changes down to IndexWriter:
engine.onSettingsChanged();
if (shardIndexingBufferSize == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER) {
// it's inactive: make sure we do a refresh / full IW flush in this case, since the memory
// changes only after a "data" change has happened to the writer // changes only after a "data" change has happened to the writer
// the index writer lazily allocates memory and a refresh will clean it all up. // the index writer lazily allocates memory and a refresh will clean it all up.
if (shardIndexingBufferSize == EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER && preValue != EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER) {
logger.debug("updating index_buffer_size from [{}] to (inactive) [{}]", preValue, shardIndexingBufferSize); logger.debug("updating index_buffer_size from [{}] to (inactive) [{}]", preValue, shardIndexingBufferSize);
try { try {
refresh("update index buffer"); refresh("update index buffer");
@ -1005,11 +1014,9 @@ public class IndexShard extends AbstractIndexShardComponent {
logger.debug("updating index_buffer_size from [{}] to [{}]", preValue, shardIndexingBufferSize); logger.debug("updating index_buffer_size from [{}] to [{}]", preValue, shardIndexingBufferSize);
} }
} }
Engine engine = engineUnsafe();
if (engine != null) {
engine.getTranslog().updateBuffer(shardTranslogBufferSize); engine.getTranslog().updateBuffer(shardTranslogBufferSize);
} }
}
public void markAsInactive() { public void markAsInactive() {
updateBufferSize(EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER, TranslogConfig.INACTIVE_SHARD_TRANSLOG_BUFFER); updateBufferSize(EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER, TranslogConfig.INACTIVE_SHARD_TRANSLOG_BUFFER);
@ -1130,7 +1137,7 @@ public class IndexShard extends AbstractIndexShardComponent {
searchService.onRefreshSettings(settings); searchService.onRefreshSettings(settings);
indexingService.onRefreshSettings(settings); indexingService.onRefreshSettings(settings);
if (change) { if (change) {
refresh("apply settings"); engine().onSettingsChanged();
} }
} }
} }
@ -1268,6 +1275,8 @@ public class IndexShard extends AbstractIndexShardComponent {
return engine; return engine;
} }
/** NOTE: returns null if engine is not yet started (e.g. recovery phase 1, copying over index files, is still running), or if engine is
* closed. */
protected Engine engineUnsafe() { protected Engine engineUnsafe() {
return this.currentEngineReference.get(); return this.currentEngineReference.get();
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.snapshots.blobstore; package org.elasticsearch.index.snapshots.blobstore;
import com.google.common.collect.Iterables;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexFormatTooOldException;
@ -48,14 +47,11 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit; import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.*;
import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException;
import org.elasticsearch.index.snapshots.IndexShardSnapshotException;
import org.elasticsearch.index.snapshots.IndexShardSnapshotFailedException;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo;
import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.store.StoreFileMetaData;
@ -71,11 +67,7 @@ import org.elasticsearch.repositories.blobstore.LegacyBlobStoreFormat;
import java.io.FilterInputStream; import java.io.FilterInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.util.ArrayList; import java.util.*;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.testBlobPrefix; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.testBlobPrefix;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.store; package org.elasticsearch.index.store;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.store.*; import org.apache.lucene.store.*;
@ -29,9 +28,11 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -42,6 +43,7 @@ import org.elasticsearch.common.lucene.store.InputStreamIndexInput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.Callback; import org.elasticsearch.common.util.Callback;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.util.SingleObjectCache; import org.elasticsearch.common.util.SingleObjectCache;
import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
import org.elasticsearch.common.util.concurrent.RefCounted; import org.elasticsearch.common.util.concurrent.RefCounted;
@ -83,11 +85,12 @@ import java.util.zip.Checksum;
*/ */
public class Store extends AbstractIndexShardComponent implements Closeable, RefCounted { public class Store extends AbstractIndexShardComponent implements Closeable, RefCounted {
private static final String CODEC = "store"; static final String CODEC = "store";
private static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0 static final int VERSION_WRITE_THROWABLE= 2; // we write throwable since 2.0
private static final int VERSION_START = 0; static final int VERSION_STACK_TRACE = 1; // we write the stack trace too since 1.4.0
private static final int VERSION = VERSION_STACK_TRACE; static final int VERSION_START = 0;
private static final String CORRUPTED = "corrupted_"; static final int VERSION = VERSION_WRITE_THROWABLE;
static final String CORRUPTED = "corrupted_";
public static final String INDEX_STORE_STATS_REFRESH_INTERVAL = "index.store.stats_refresh_interval"; public static final String INDEX_STORE_STATS_REFRESH_INTERVAL = "index.store.stats_refresh_interval";
private final AtomicBoolean isClosed = new AtomicBoolean(false); private final AtomicBoolean isClosed = new AtomicBoolean(false);
@ -562,6 +565,20 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
if (file.startsWith(CORRUPTED)) { if (file.startsWith(CORRUPTED)) {
try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) {
int version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION); int version = CodecUtil.checkHeader(input, CODEC, VERSION_START, VERSION);
if (version == VERSION_WRITE_THROWABLE) {
final int size = input.readVInt();
final byte[] buffer = new byte[size];
input.readBytes(buffer, 0, buffer.length);
StreamInput in = StreamInput.wrap(buffer);
Throwable t = in.readThrowable();
if (t instanceof CorruptIndexException) {
ex.add((CorruptIndexException) t);
} else {
ex.add(new CorruptIndexException(t.getMessage(), "preexisting_corruption", t));
}
} else {
assert version == VERSION_START || version == VERSION_STACK_TRACE;
String msg = input.readString(); String msg = input.readString();
StringBuilder builder = new StringBuilder(shardId.toString()); StringBuilder builder = new StringBuilder(shardId.toString());
builder.append(" Preexisting corrupted index ["); builder.append(" Preexisting corrupted index [");
@ -572,6 +589,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
builder.append(input.readString()); builder.append(input.readString());
} }
ex.add(new CorruptIndexException(builder.toString(), "preexisting_corruption")); ex.add(new CorruptIndexException(builder.toString(), "preexisting_corruption"));
}
CodecUtil.checkFooter(input); CodecUtil.checkFooter(input);
} }
} }
@ -1446,8 +1464,11 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
String uuid = CORRUPTED + Strings.randomBase64UUID(); String uuid = CORRUPTED + Strings.randomBase64UUID();
try (IndexOutput output = this.directory().createOutput(uuid, IOContext.DEFAULT)) { try (IndexOutput output = this.directory().createOutput(uuid, IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, CODEC, VERSION); CodecUtil.writeHeader(output, CODEC, VERSION);
output.writeString(ExceptionsHelper.detailedMessage(exception, true, 0)); // handles null exception BytesStreamOutput out = new BytesStreamOutput();
output.writeString(ExceptionsHelper.stackTrace(exception)); out.writeThrowable(exception);
BytesReference bytes = out.bytes();
output.writeVInt(bytes.length());
output.writeBytes(bytes.array(), bytes.arrayOffset(), bytes.length());
CodecUtil.writeFooter(output); CodecUtil.writeFooter(output);
} catch (IOException ex) { } catch (IOException ex) {
logger.warn("Can't mark store as corrupted", ex); logger.warn("Can't mark store as corrupted", ex);

View File

@ -19,8 +19,8 @@
package org.elasticsearch.indices.flush; package org.elasticsearch.indices.flush;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString; import org.elasticsearch.common.xcontent.XContentBuilderString;
@ -41,7 +41,7 @@ public class IndicesSyncedFlushResult implements ToXContent {
public IndicesSyncedFlushResult(Map<String, List<ShardsSyncedFlushResult>> shardsResultPerIndex) { public IndicesSyncedFlushResult(Map<String, List<ShardsSyncedFlushResult>> shardsResultPerIndex) {
this.shardsResultPerIndex = ImmutableMap.copyOf(shardsResultPerIndex); this.shardsResultPerIndex = ImmutableMap.copyOf(shardsResultPerIndex);
this.shardCounts = calculateShardCounts(Iterables.concat(shardsResultPerIndex.values())); this.shardCounts = calculateShardCounts(Iterables.flatten(shardsResultPerIndex.values()));
} }
/** total number shards, including replicas, both assigned and unassigned */ /** total number shards, including replicas, both assigned and unassigned */

View File

@ -51,6 +51,42 @@ import java.util.concurrent.ScheduledFuture;
*/ */
public class IndexingMemoryController extends AbstractLifecycleComponent<IndexingMemoryController> { public class IndexingMemoryController extends AbstractLifecycleComponent<IndexingMemoryController> {
/** How much heap (% or bytes) we will share across all actively indexing shards on this node (default: 10%). */
public static final String INDEX_BUFFER_SIZE_SETTING = "indices.memory.index_buffer_size";
/** Only applies when <code>indices.memory.index_buffer_size</code> is a %, to set a floor on the actual size in bytes (default: 48 MB). */
public static final String MIN_INDEX_BUFFER_SIZE_SETTING = "indices.memory.min_index_buffer_size";
/** Only applies when <code>indices.memory.index_buffer_size</code> is a %, to set a ceiling on the actual size in bytes (default: not set). */
public static final String MAX_INDEX_BUFFER_SIZE_SETTING = "indices.memory.max_index_buffer_size";
/** Sets a floor on the per-shard index buffer size (default: 4 MB). */
public static final String MIN_SHARD_INDEX_BUFFER_SIZE_SETTING = "indices.memory.min_shard_index_buffer_size";
/** Sets a ceiling on the per-shard index buffer size (default: 512 MB). */
public static final String MAX_SHARD_INDEX_BUFFER_SIZE_SETTING = "indices.memory.max_shard_index_buffer_size";
/** How much heap (% or bytes) we will share across all actively indexing shards for the translog buffer (default: 1%). */
public static final String TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.translog_buffer_size";
/** Only applies when <code>indices.memory.translog_buffer_size</code> is a %, to set a floor on the actual size in bytes (default: 256 KB). */
public static final String MIN_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.min_translog_buffer_size";
/** Only applies when <code>indices.memory.translog_buffer_size</code> is a %, to set a ceiling on the actual size in bytes (default: not set). */
public static final String MAX_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.max_translog_buffer_size";
/** Sets a floor on the per-shard translog buffer size (default: 2 KB). */
public static final String MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.min_shard_translog_buffer_size";
/** Sets a ceiling on the per-shard translog buffer size (default: 64 KB). */
public static final String MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING = "indices.memory.max_shard_translog_buffer_size";
/** If we see no indexing operations after this much time for a given shard, we consider that shard inactive (default: 5 minutes). */
public static final String SHARD_INACTIVE_TIME_SETTING = "indices.memory.shard_inactive_time";
/** How frequently we check shards to find inactive ones (default: 30 seconds). */
public static final String SHARD_INACTIVE_INTERVAL_TIME_SETTING = "indices.memory.interval";
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final IndicesService indicesService; private final IndicesService indicesService;
@ -77,12 +113,12 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
this.indicesService = indicesService; this.indicesService = indicesService;
ByteSizeValue indexingBuffer; ByteSizeValue indexingBuffer;
String indexingBufferSetting = this.settings.get("indices.memory.index_buffer_size", "10%"); String indexingBufferSetting = this.settings.get(INDEX_BUFFER_SIZE_SETTING, "10%");
if (indexingBufferSetting.endsWith("%")) { if (indexingBufferSetting.endsWith("%")) {
double percent = Double.parseDouble(indexingBufferSetting.substring(0, indexingBufferSetting.length() - 1)); double percent = Double.parseDouble(indexingBufferSetting.substring(0, indexingBufferSetting.length() - 1));
indexingBuffer = new ByteSizeValue((long) (((double) JvmInfo.jvmInfo().getMem().getHeapMax().bytes()) * (percent / 100))); indexingBuffer = new ByteSizeValue((long) (((double) JvmInfo.jvmInfo().getMem().getHeapMax().bytes()) * (percent / 100)));
ByteSizeValue minIndexingBuffer = this.settings.getAsBytesSize("indices.memory.min_index_buffer_size", new ByteSizeValue(48, ByteSizeUnit.MB)); ByteSizeValue minIndexingBuffer = this.settings.getAsBytesSize(MIN_INDEX_BUFFER_SIZE_SETTING, new ByteSizeValue(48, ByteSizeUnit.MB));
ByteSizeValue maxIndexingBuffer = this.settings.getAsBytesSize("indices.memory.max_index_buffer_size", null); ByteSizeValue maxIndexingBuffer = this.settings.getAsBytesSize(MAX_INDEX_BUFFER_SIZE_SETTING, null);
if (indexingBuffer.bytes() < minIndexingBuffer.bytes()) { if (indexingBuffer.bytes() < minIndexingBuffer.bytes()) {
indexingBuffer = minIndexingBuffer; indexingBuffer = minIndexingBuffer;
@ -91,20 +127,20 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
indexingBuffer = maxIndexingBuffer; indexingBuffer = maxIndexingBuffer;
} }
} else { } else {
indexingBuffer = ByteSizeValue.parseBytesSizeValue(indexingBufferSetting, null); indexingBuffer = ByteSizeValue.parseBytesSizeValue(indexingBufferSetting, INDEX_BUFFER_SIZE_SETTING);
} }
this.indexingBuffer = indexingBuffer; this.indexingBuffer = indexingBuffer;
this.minShardIndexBufferSize = this.settings.getAsBytesSize("indices.memory.min_shard_index_buffer_size", new ByteSizeValue(4, ByteSizeUnit.MB)); this.minShardIndexBufferSize = this.settings.getAsBytesSize(MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, new ByteSizeValue(4, ByteSizeUnit.MB));
// LUCENE MONITOR: Based on this thread, currently (based on Mike), having a large buffer does not make a lot of sense: https://issues.apache.org/jira/browse/LUCENE-2324?focusedCommentId=13005155&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-13005155 // LUCENE MONITOR: Based on this thread, currently (based on Mike), having a large buffer does not make a lot of sense: https://issues.apache.org/jira/browse/LUCENE-2324?focusedCommentId=13005155&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-13005155
this.maxShardIndexBufferSize = this.settings.getAsBytesSize("indices.memory.max_shard_index_buffer_size", new ByteSizeValue(512, ByteSizeUnit.MB)); this.maxShardIndexBufferSize = this.settings.getAsBytesSize(MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, new ByteSizeValue(512, ByteSizeUnit.MB));
ByteSizeValue translogBuffer; ByteSizeValue translogBuffer;
String translogBufferSetting = this.settings.get("indices.memory.translog_buffer_size", "1%"); String translogBufferSetting = this.settings.get(TRANSLOG_BUFFER_SIZE_SETTING, "1%");
if (translogBufferSetting.endsWith("%")) { if (translogBufferSetting.endsWith("%")) {
double percent = Double.parseDouble(translogBufferSetting.substring(0, translogBufferSetting.length() - 1)); double percent = Double.parseDouble(translogBufferSetting.substring(0, translogBufferSetting.length() - 1));
translogBuffer = new ByteSizeValue((long) (((double) JvmInfo.jvmInfo().getMem().getHeapMax().bytes()) * (percent / 100))); translogBuffer = new ByteSizeValue((long) (((double) JvmInfo.jvmInfo().getMem().getHeapMax().bytes()) * (percent / 100)));
ByteSizeValue minTranslogBuffer = this.settings.getAsBytesSize("indices.memory.min_translog_buffer_size", new ByteSizeValue(256, ByteSizeUnit.KB)); ByteSizeValue minTranslogBuffer = this.settings.getAsBytesSize(MIN_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(256, ByteSizeUnit.KB));
ByteSizeValue maxTranslogBuffer = this.settings.getAsBytesSize("indices.memory.max_translog_buffer_size", null); ByteSizeValue maxTranslogBuffer = this.settings.getAsBytesSize(MAX_TRANSLOG_BUFFER_SIZE_SETTING, null);
if (translogBuffer.bytes() < minTranslogBuffer.bytes()) { if (translogBuffer.bytes() < minTranslogBuffer.bytes()) {
translogBuffer = minTranslogBuffer; translogBuffer = minTranslogBuffer;
@ -116,15 +152,19 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
translogBuffer = ByteSizeValue.parseBytesSizeValue(translogBufferSetting, null); translogBuffer = ByteSizeValue.parseBytesSizeValue(translogBufferSetting, null);
} }
this.translogBuffer = translogBuffer; this.translogBuffer = translogBuffer;
this.minShardTranslogBufferSize = this.settings.getAsBytesSize("indices.memory.min_shard_translog_buffer_size", new ByteSizeValue(2, ByteSizeUnit.KB)); this.minShardTranslogBufferSize = this.settings.getAsBytesSize(MIN_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(2, ByteSizeUnit.KB));
this.maxShardTranslogBufferSize = this.settings.getAsBytesSize("indices.memory.max_shard_translog_buffer_size", new ByteSizeValue(64, ByteSizeUnit.KB)); this.maxShardTranslogBufferSize = this.settings.getAsBytesSize(MAX_SHARD_TRANSLOG_BUFFER_SIZE_SETTING, new ByteSizeValue(64, ByteSizeUnit.KB));
this.inactiveTime = this.settings.getAsTime("indices.memory.shard_inactive_time", TimeValue.timeValueMinutes(5)); this.inactiveTime = this.settings.getAsTime(SHARD_INACTIVE_TIME_SETTING, TimeValue.timeValueMinutes(5));
// we need to have this relatively small to move a shard from inactive to active fast (enough) // we need to have this relatively small to move a shard from inactive to active fast (enough)
this.interval = this.settings.getAsTime("indices.memory.interval", TimeValue.timeValueSeconds(30)); this.interval = this.settings.getAsTime(SHARD_INACTIVE_INTERVAL_TIME_SETTING, TimeValue.timeValueSeconds(30));
logger.debug("using index_buffer_size [{}], with min_shard_index_buffer_size [{}], max_shard_index_buffer_size [{}], shard_inactive_time [{}]", this.indexingBuffer, this.minShardIndexBufferSize, this.maxShardIndexBufferSize, this.inactiveTime);
logger.debug("using indexing buffer size [{}], with {} [{}], {} [{}], {} [{}], {} [{}]",
this.indexingBuffer,
MIN_SHARD_INDEX_BUFFER_SIZE_SETTING, this.minShardIndexBufferSize,
MAX_SHARD_INDEX_BUFFER_SIZE_SETTING, this.maxShardIndexBufferSize,
SHARD_INACTIVE_TIME_SETTING, this.inactiveTime,
SHARD_INACTIVE_INTERVAL_TIME_SETTING, this.interval);
} }
@Override @Override
@ -155,12 +195,9 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
private final Map<ShardId, ShardIndexingStatus> shardsIndicesStatus = new HashMap<>(); private final Map<ShardId, ShardIndexingStatus> shardsIndicesStatus = new HashMap<>();
@Override @Override
public void run() { public void run() {
EnumSet<ShardStatusChangeType> changes = EnumSet.noneOf(ShardStatusChangeType.class); EnumSet<ShardStatusChangeType> changes = purgeDeletedAndClosedShards();
changes.addAll(purgeDeletedAndClosedShards());
final List<IndexShard> activeToInactiveIndexingShards = new ArrayList<>(); final List<IndexShard> activeToInactiveIndexingShards = new ArrayList<>();
final int activeShards = updateShardStatuses(changes, activeToInactiveIndexingShards); final int activeShards = updateShardStatuses(changes, activeToInactiveIndexingShards);
@ -170,11 +207,15 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
indexShard.markAsInactive(); indexShard.markAsInactive();
} catch (EngineClosedException e) { } catch (EngineClosedException e) {
// ignore // ignore
logger.trace("ignore EngineClosedException while marking shard [{}][{}] as inactive", indexShard.shardId().index().name(), indexShard.shardId().id());
} catch (FlushNotAllowedEngineException e) { } catch (FlushNotAllowedEngineException e) {
// ignore // ignore
logger.trace("ignore FlushNotAllowedException while marking shard [{}][{}] as inactive", indexShard.shardId().index().name(), indexShard.shardId().id());
} }
} }
if (!changes.isEmpty()) {
if (changes.isEmpty() == false) {
// Something changed: recompute indexing buffers:
calcAndSetShardBuffers(activeShards, "[" + changes + "]"); calcAndSetShardBuffers(activeShards, "[" + changes + "]");
} }
} }
@ -190,23 +231,24 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
for (IndexShard indexShard : indexService) { for (IndexShard indexShard : indexService) {
if (!CAN_UPDATE_INDEX_BUFFER_STATES.contains(indexShard.state())) { if (!CAN_UPDATE_INDEX_BUFFER_STATES.contains(indexShard.state())) {
// not ready to be updated yet. // not ready to be updated yet
continue; continue;
} }
if (indexShard.canIndex() == false) { if (indexShard.canIndex() == false) {
// not relevant for memory related issues. // shadow replica doesn't have an indexing buffer
continue; continue;
} }
final Translog translog; final Translog translog;
try { try {
translog = indexShard.engine().getTranslog(); translog = indexShard.engine().getTranslog();
} catch (EngineClosedException e) { } catch (EngineClosedException e) {
// not ready yet to be checked for in activity // not ready yet to be checked for activity
continue; continue;
} }
final long time = threadPool.estimatedTimeInMillis(); final long timeMS = threadPool.estimatedTimeInMillis();
ShardIndexingStatus status = shardsIndicesStatus.get(indexShard.shardId()); ShardIndexingStatus status = shardsIndicesStatus.get(indexShard.shardId());
if (status == null) { if (status == null) {
@ -214,21 +256,22 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
shardsIndicesStatus.put(indexShard.shardId(), status); shardsIndicesStatus.put(indexShard.shardId(), status);
changes.add(ShardStatusChangeType.ADDED); changes.add(ShardStatusChangeType.ADDED);
} }
// check if it is deemed to be inactive (sam translogFileGeneration and numberOfOperations over a long period of time)
// consider shard inactive if it has same translogFileGeneration and no operations for a long time
if (status.translogId == translog.currentFileGeneration() && translog.totalOperations() == 0) { if (status.translogId == translog.currentFileGeneration() && translog.totalOperations() == 0) {
if (status.time == -1) { // first time if (status.timeMS == -1) {
status.time = time; // first time we noticed the shard become idle
status.timeMS = timeMS;
} }
// inactive? // mark it as inactive only if enough time has passed
if (status.activeIndexing) { if (status.activeIndexing && (timeMS - status.timeMS) > inactiveTime.millis()) {
// mark it as inactive only if enough time has passed and there are no ongoing merges going on...
if ((time - status.time) > inactiveTime.millis() && indexShard.mergeStats().getCurrent() == 0) {
// inactive for this amount of time, mark it // inactive for this amount of time, mark it
activeToInactiveIndexingShards.add(indexShard); activeToInactiveIndexingShards.add(indexShard);
status.activeIndexing = false; status.activeIndexing = false;
changes.add(ShardStatusChangeType.BECAME_INACTIVE); changes.add(ShardStatusChangeType.BECAME_INACTIVE);
logger.debug("marking shard [{}][{}] as inactive (inactive_time[{}]) indexing wise, setting size to [{}]", indexShard.shardId().index().name(), indexShard.shardId().id(), inactiveTime, EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER); logger.debug("marking shard [{}][{}] as inactive (inactive_time[{}]) indexing wise, setting size to [{}]",
} indexShard.shardId().index().name(), indexShard.shardId().id(),
inactiveTime, EngineConfig.INACTIVE_SHARD_INDEXING_BUFFER);
} }
} else { } else {
if (!status.activeIndexing) { if (!status.activeIndexing) {
@ -236,10 +279,9 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
changes.add(ShardStatusChangeType.BECAME_ACTIVE); changes.add(ShardStatusChangeType.BECAME_ACTIVE);
logger.debug("marking shard [{}][{}] as active indexing wise", indexShard.shardId().index().name(), indexShard.shardId().id()); logger.debug("marking shard [{}][{}] as active indexing wise", indexShard.shardId().index().name(), indexShard.shardId().id());
} }
status.time = -1; status.timeMS = -1;
} }
status.translogId = translog.currentFileGeneration(); status.translogId = translog.currentFileGeneration();
status.translogNumberOfOperations = translog.totalOperations();
if (status.activeIndexing) { if (status.activeIndexing) {
activeShards++; activeShards++;
@ -261,31 +303,28 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
while (statusShardIdIterator.hasNext()) { while (statusShardIdIterator.hasNext()) {
ShardId statusShardId = statusShardIdIterator.next(); ShardId statusShardId = statusShardIdIterator.next();
IndexService indexService = indicesService.indexService(statusShardId.getIndex()); IndexService indexService = indicesService.indexService(statusShardId.getIndex());
boolean remove = false; boolean remove;
try {
if (indexService == null) { if (indexService == null) {
remove = true; remove = true;
continue; } else {
}
IndexShard indexShard = indexService.shard(statusShardId.id()); IndexShard indexShard = indexService.shard(statusShardId.id());
if (indexShard == null) { if (indexShard == null) {
remove = true; remove = true;
continue; } else {
}
remove = !CAN_UPDATE_INDEX_BUFFER_STATES.contains(indexShard.state()); remove = !CAN_UPDATE_INDEX_BUFFER_STATES.contains(indexShard.state());
}
} finally { }
if (remove) { if (remove) {
changes.add(ShardStatusChangeType.DELETED); changes.add(ShardStatusChangeType.DELETED);
statusShardIdIterator.remove(); statusShardIdIterator.remove();
} }
} }
}
return changes; return changes;
} }
private void calcAndSetShardBuffers(int activeShards, String reason) { private void calcAndSetShardBuffers(int activeShards, String reason) {
if (activeShards == 0) { if (activeShards == 0) {
logger.debug("no active shards (reason={})", reason);
return; return;
} }
ByteSizeValue shardIndexingBufferSize = new ByteSizeValue(indexingBuffer.bytes() / activeShards); ByteSizeValue shardIndexingBufferSize = new ByteSizeValue(indexingBuffer.bytes() / activeShards);
@ -335,11 +374,9 @@ public class IndexingMemoryController extends AbstractLifecycleComponent<Indexin
ADDED, DELETED, BECAME_ACTIVE, BECAME_INACTIVE ADDED, DELETED, BECAME_ACTIVE, BECAME_INACTIVE
} }
private static class ShardIndexingStatus {
static class ShardIndexingStatus {
long translogId = -1; long translogId = -1;
int translogNumberOfOperations = -1;
boolean activeIndexing = true; boolean activeIndexing = true;
long time = -1; // contains the first time we saw this shard with no operations done on it long timeMS = -1; // contains the first time we saw this shard with no operations done on it
} }
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.indices.recovery; package org.elasticsearch.indices.recovery;
import com.google.common.collect.Iterables;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexFormatTooOldException;
@ -39,6 +38,7 @@ import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.CancellableThreads;
import org.elasticsearch.common.util.CancellableThreads.Interruptable; import org.elasticsearch.common.util.CancellableThreads.Interruptable;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit; import org.elasticsearch.index.deletionpolicy.SnapshotIndexCommit;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
@ -64,6 +64,7 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.StreamSupport;
/** /**
* RecoverySourceHandler handles the three phases of shard recovery, which is * RecoverySourceHandler handles the three phases of shard recovery, which is
@ -411,7 +412,8 @@ public class RecoverySourceHandler {
if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(remoteException)) != null) { if ((corruptIndexException = ExceptionsHelper.unwrapCorruption(remoteException)) != null) {
try { try {
final Store.MetadataSnapshot recoverySourceMetadata = store.getMetadata(snapshot); final Store.MetadataSnapshot recoverySourceMetadata = store.getMetadata(snapshot);
StoreFileMetaData[] metadata = Iterables.toArray(recoverySourceMetadata, StoreFileMetaData.class); StoreFileMetaData[] metadata =
StreamSupport.stream(recoverySourceMetadata.spliterator(), false).toArray(size -> new StoreFileMetaData[size]);
ArrayUtil.timSort(metadata, new Comparator<StoreFileMetaData>() { ArrayUtil.timSort(metadata, new Comparator<StoreFileMetaData>() {
@Override @Override
public int compare(StoreFileMetaData o1, StoreFileMetaData o2) { public int compare(StoreFileMetaData o1, StoreFileMetaData o2) {

View File

@ -19,14 +19,9 @@
package org.elasticsearch.repositories; package org.elasticsearch.repositories;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest; import org.elasticsearch.cluster.ack.ClusterStateUpdateRequest;
import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse; import org.elasticsearch.cluster.ack.ClusterStateUpdateResponse;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
@ -45,10 +40,8 @@ import org.elasticsearch.snapshots.SnapshotsService;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.*;
import java.util.HashMap; import java.util.stream.Collectors;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
@ -571,9 +564,10 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta
} }
public String failureDescription() { public String failureDescription() {
StringBuilder builder = new StringBuilder('['); return Arrays
Joiner.on(", ").appendTo(builder, failures); .stream(failures)
return builder.append(']').toString(); .map(failure -> failure.toString())
.collect(Collectors.joining(", ", "[", "]"));
} }
} }

View File

@ -31,13 +31,13 @@ public class VerificationFailure implements Streamable {
private String nodeId; private String nodeId;
private String cause; private Throwable cause;
VerificationFailure() { VerificationFailure() {
} }
public VerificationFailure(String nodeId, String cause) { public VerificationFailure(String nodeId, Throwable cause) {
this.nodeId = nodeId; this.nodeId = nodeId;
this.cause = cause; this.cause = cause;
} }
@ -46,20 +46,20 @@ public class VerificationFailure implements Streamable {
return nodeId; return nodeId;
} }
public String cause() { public Throwable cause() {
return cause; return cause;
} }
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
nodeId = in.readOptionalString(); nodeId = in.readOptionalString();
cause = in.readOptionalString(); cause = in.readThrowable();
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId); out.writeOptionalString(nodeId);
out.writeOptionalString(cause); out.writeThrowable(cause);
} }
public static VerificationFailure readNode(StreamInput in) throws IOException { public static VerificationFailure readNode(StreamInput in) throws IOException {

View File

@ -86,7 +86,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent {
doVerify(repository, verificationToken); doVerify(repository, verificationToken);
} catch (Throwable t) { } catch (Throwable t) {
logger.warn("[{}] failed to verify repository", t, repository); logger.warn("[{}] failed to verify repository", t, repository);
errors.add(new VerificationFailure(node.id(), ExceptionsHelper.detailedMessage(t))); errors.add(new VerificationFailure(node.id(), t));
} }
if (counter.decrementAndGet() == 0) { if (counter.decrementAndGet() == 0) {
finishVerification(listener, nodes, errors); finishVerification(listener, nodes, errors);
@ -102,7 +102,7 @@ public class VerifyNodeRepositoryAction extends AbstractComponent {
@Override @Override
public void handleException(TransportException exp) { public void handleException(TransportException exp) {
errors.add(new VerificationFailure(node.id(), ExceptionsHelper.detailedMessage(exp))); errors.add(new VerificationFailure(node.id(), exp));
if (counter.decrementAndGet() == 0) { if (counter.decrementAndGet() == 0) {
finishVerification(listener, nodes, errors); finishVerification(listener, nodes, errors);
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.repositories.blobstore; package org.elasticsearch.repositories.blobstore;
import com.google.common.io.ByteStreams;
import org.apache.lucene.store.RateLimiter; import org.apache.lucene.store.RateLimiter;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
@ -36,6 +35,7 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.NotXContentException; import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -66,6 +66,7 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException; import java.nio.file.NoSuchFileException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
@ -590,9 +591,10 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent<Rep
*/ */
protected List<SnapshotId> readSnapshotList() throws IOException { protected List<SnapshotId> readSnapshotList() throws IOException {
try (InputStream blob = snapshotsBlobContainer.readBlob(SNAPSHOTS_FILE)) { try (InputStream blob = snapshotsBlobContainer.readBlob(SNAPSHOTS_FILE)) {
final byte[] data = ByteStreams.toByteArray(blob); BytesStreamOutput out = new BytesStreamOutput();
Streams.copy(blob, out);
ArrayList<SnapshotId> snapshots = new ArrayList<>(); ArrayList<SnapshotId> snapshots = new ArrayList<>();
try (XContentParser parser = XContentHelper.createParser(new BytesArray(data))) { try (XContentParser parser = XContentHelper.createParser(out.bytes())) {
if (parser.nextToken() == XContentParser.Token.START_OBJECT) { if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
String currentFieldName = parser.currentName(); String currentFieldName = parser.currentName();

View File

@ -18,7 +18,6 @@
*/ */
package org.elasticsearch.repositories.blobstore; package org.elasticsearch.repositories.blobstore;
import com.google.common.io.ByteStreams;
import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooNewException;
@ -29,6 +28,7 @@ import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.store.ByteArrayIndexInput; import org.elasticsearch.common.lucene.store.ByteArrayIndexInput;
@ -93,7 +93,9 @@ public class ChecksumBlobStoreFormat<T extends ToXContent> extends BlobStoreForm
*/ */
public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { public T readBlob(BlobContainer blobContainer, String blobName) throws IOException {
try (InputStream inputStream = blobContainer.readBlob(blobName)) { try (InputStream inputStream = blobContainer.readBlob(blobName)) {
byte[] bytes = ByteStreams.toByteArray(inputStream); ByteArrayOutputStream out = new ByteArrayOutputStream();
Streams.copy(inputStream, out);
final byte[] bytes = out.toByteArray();
final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")"; final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")";
try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) { try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) {
CodecUtil.checksumEntireFile(indexInput); CodecUtil.checksumEntireFile(indexInput);

View File

@ -18,10 +18,10 @@
*/ */
package org.elasticsearch.repositories.blobstore; package org.elasticsearch.repositories.blobstore;
import com.google.common.io.ByteStreams;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobContainer;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.xcontent.FromXContentBuilder; import org.elasticsearch.common.xcontent.FromXContentBuilder;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -53,7 +53,9 @@ public class LegacyBlobStoreFormat<T extends ToXContent> extends BlobStoreFormat
*/ */
public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { public T readBlob(BlobContainer blobContainer, String blobName) throws IOException {
try (InputStream inputStream = blobContainer.readBlob(blobName)) { try (InputStream inputStream = blobContainer.readBlob(blobName)) {
return read(new BytesArray(ByteStreams.toByteArray(inputStream))); BytesStreamOutput out = new BytesStreamOutput();
Streams.copy(inputStream, out);
return read(out.bytes());
} }
} }
} }

View File

@ -30,8 +30,8 @@ import java.io.IOException;
* correctly serialized between nodes. * correctly serialized between nodes.
*/ */
public class GroovyScriptCompilationException extends ElasticsearchException { public class GroovyScriptCompilationException extends ElasticsearchException {
public GroovyScriptCompilationException(String message) { public GroovyScriptCompilationException(String message, Throwable cause) {
super(message); super(message, cause);
} }
public GroovyScriptCompilationException(StreamInput in) throws IOException{ public GroovyScriptCompilationException(StreamInput in) throws IOException{

Some files were not shown because too many files have changed in this diff Show More