Reduce number of raw types warnings (#31523)
A first attempt to reduce the number of raw type warnings, most of the time by using the unbounded wildcard.
This commit is contained in:
parent
8e4768890a
commit
86ab3a2d1a
|
@ -329,7 +329,7 @@ public class NoopSearchRequestBuilder extends ActionRequestBuilder<SearchRequest
|
|||
*
|
||||
* @see org.elasticsearch.search.sort.SortBuilders
|
||||
*/
|
||||
public NoopSearchRequestBuilder addSort(SortBuilder sort) {
|
||||
public NoopSearchRequestBuilder addSort(SortBuilder<?> sort) {
|
||||
sourceBuilder().sort(sort);
|
||||
return this;
|
||||
}
|
||||
|
@ -415,7 +415,7 @@ public class NoopSearchRequestBuilder extends ActionRequestBuilder<SearchRequest
|
|||
* @param window rescore window
|
||||
* @return this for chaining
|
||||
*/
|
||||
public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) {
|
||||
public NoopSearchRequestBuilder setRescorer(RescorerBuilder<?> rescorer, int window) {
|
||||
sourceBuilder().clearRescorers();
|
||||
return addRescorer(rescorer.windowSize(window));
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ public class AppendProcessorTests extends ESTestCase {
|
|||
appendProcessor = createAppendProcessor(field, values);
|
||||
}
|
||||
appendProcessor.execute(ingestDocument);
|
||||
List list = ingestDocument.getFieldValue(field, List.class);
|
||||
List<?> list = ingestDocument.getFieldValue(field, List.class);
|
||||
assertThat(list, not(sameInstance(values)));
|
||||
assertThat(list, equalTo(values));
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ public class AppendProcessorTests extends ESTestCase {
|
|||
appendProcessor = createAppendProcessor(field, values);
|
||||
}
|
||||
appendProcessor.execute(ingestDocument);
|
||||
List fieldValue = ingestDocument.getFieldValue(field, List.class);
|
||||
List<?> fieldValue = ingestDocument.getFieldValue(field, List.class);
|
||||
assertThat(fieldValue.size(), equalTo(values.size() + 1));
|
||||
assertThat(fieldValue.get(0), equalTo(initialValue));
|
||||
for (int i = 1; i < values.size() + 1; i++) {
|
||||
|
@ -144,7 +144,7 @@ public class AppendProcessorTests extends ESTestCase {
|
|||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName());
|
||||
appendProcessor.execute(ingestDocument);
|
||||
List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class);
|
||||
List<?> list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class);
|
||||
if (initialValue == null) {
|
||||
assertThat(list, equalTo(values));
|
||||
} else {
|
||||
|
|
|
@ -57,7 +57,7 @@ public abstract class Action<Response extends ActionResponse> {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return o instanceof Action && name.equals(((Action) o).name());
|
||||
return o instanceof Action && name.equals(((Action<?>) o).name());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.Objects;
|
|||
public class BulkItemRequest implements Streamable {
|
||||
|
||||
private int id;
|
||||
private DocWriteRequest request;
|
||||
private DocWriteRequest<?> request;
|
||||
private volatile BulkItemResponse primaryResponse;
|
||||
|
||||
BulkItemRequest() {
|
||||
|
@ -39,7 +39,7 @@ public class BulkItemRequest implements Streamable {
|
|||
}
|
||||
|
||||
// NOTE: public for testing only
|
||||
public BulkItemRequest(int id, DocWriteRequest request) {
|
||||
public BulkItemRequest(int id, DocWriteRequest<?> request) {
|
||||
this.id = id;
|
||||
this.request = request;
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ public class BulkItemRequest implements Streamable {
|
|||
return id;
|
||||
}
|
||||
|
||||
public DocWriteRequest request() {
|
||||
public DocWriteRequest<?> request() {
|
||||
return request;
|
||||
}
|
||||
|
||||
|
|
|
@ -248,24 +248,24 @@ public class BulkProcessor implements Closeable {
|
|||
* (for example, if no id is provided, one will be generated, or usage of the create flag).
|
||||
*/
|
||||
public BulkProcessor add(IndexRequest request) {
|
||||
return add((DocWriteRequest) request);
|
||||
return add((DocWriteRequest<?>) request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link DeleteRequest} to the list of actions to execute.
|
||||
*/
|
||||
public BulkProcessor add(DeleteRequest request) {
|
||||
return add((DocWriteRequest) request);
|
||||
return add((DocWriteRequest<?>) request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds either a delete or an index request.
|
||||
*/
|
||||
public BulkProcessor add(DocWriteRequest request) {
|
||||
public BulkProcessor add(DocWriteRequest<?> request) {
|
||||
return add(request, null);
|
||||
}
|
||||
|
||||
public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) {
|
||||
public BulkProcessor add(DocWriteRequest<?> request, @Nullable Object payload) {
|
||||
internalAdd(request, payload);
|
||||
return this;
|
||||
}
|
||||
|
@ -280,7 +280,7 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) {
|
||||
private synchronized void internalAdd(DocWriteRequest<?> request, @Nullable Object payload) {
|
||||
ensureOpen();
|
||||
bulkRequest.add(request, payload);
|
||||
executeIfNeeded();
|
||||
|
|
|
@ -83,7 +83,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
* {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare
|
||||
* the one with the least casts.
|
||||
*/
|
||||
final List<DocWriteRequest> requests = new ArrayList<>();
|
||||
final List<DocWriteRequest<?>> requests = new ArrayList<>();
|
||||
private final Set<String> indices = new HashSet<>();
|
||||
List<Object> payloads = null;
|
||||
|
||||
|
@ -99,14 +99,14 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
/**
|
||||
* Adds a list of requests to be executed. Either index or delete requests.
|
||||
*/
|
||||
public BulkRequest add(DocWriteRequest... requests) {
|
||||
for (DocWriteRequest request : requests) {
|
||||
public BulkRequest add(DocWriteRequest<?>... requests) {
|
||||
for (DocWriteRequest<?> request : requests) {
|
||||
add(request, null);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public BulkRequest add(DocWriteRequest request) {
|
||||
public BulkRequest add(DocWriteRequest<?> request) {
|
||||
return add(request, null);
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
* @param payload Optional payload
|
||||
* @return the current bulk request
|
||||
*/
|
||||
public BulkRequest add(DocWriteRequest request, @Nullable Object payload) {
|
||||
public BulkRequest add(DocWriteRequest<?> request, @Nullable Object payload) {
|
||||
if (request instanceof IndexRequest) {
|
||||
add((IndexRequest) request, payload);
|
||||
} else if (request instanceof DeleteRequest) {
|
||||
|
@ -133,8 +133,8 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
/**
|
||||
* Adds a list of requests to be executed. Either index or delete requests.
|
||||
*/
|
||||
public BulkRequest add(Iterable<DocWriteRequest> requests) {
|
||||
for (DocWriteRequest request : requests) {
|
||||
public BulkRequest add(Iterable<DocWriteRequest<?>> requests) {
|
||||
for (DocWriteRequest<?> request : requests) {
|
||||
add(request);
|
||||
}
|
||||
return this;
|
||||
|
@ -223,7 +223,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
/**
|
||||
* The list of requests in this bulk request.
|
||||
*/
|
||||
public List<DocWriteRequest> requests() {
|
||||
public List<DocWriteRequest<?>> requests() {
|
||||
return this.requests;
|
||||
}
|
||||
|
||||
|
@ -527,7 +527,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
* @return Whether this bulk request contains index request with an ingest pipeline enabled.
|
||||
*/
|
||||
public boolean hasIndexRequestsWithPipelines() {
|
||||
for (DocWriteRequest actionRequest : requests) {
|
||||
for (DocWriteRequest<?> actionRequest : requests) {
|
||||
if (actionRequest instanceof IndexRequest) {
|
||||
IndexRequest indexRequest = (IndexRequest) actionRequest;
|
||||
if (Strings.hasText(indexRequest.getPipeline())) {
|
||||
|
@ -545,7 +545,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
if (requests.isEmpty()) {
|
||||
validationException = addValidationError("no requests added", validationException);
|
||||
}
|
||||
for (DocWriteRequest request : requests) {
|
||||
for (DocWriteRequest<?> request : requests) {
|
||||
// We first check if refresh has been set
|
||||
if (((WriteRequest<?>) request).getRefreshPolicy() != RefreshPolicy.NONE) {
|
||||
validationException = addValidationError(
|
||||
|
@ -580,7 +580,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
|
|||
super.writeTo(out);
|
||||
waitForActiveShards.writeTo(out);
|
||||
out.writeVInt(requests.size());
|
||||
for (DocWriteRequest request : requests) {
|
||||
for (DocWriteRequest<?> request : requests) {
|
||||
DocWriteRequest.writeDocumentRequest(out, request);
|
||||
}
|
||||
refreshPolicy.writeTo(out);
|
||||
|
|
|
@ -143,8 +143,8 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
final Set<String> indices = bulkRequest.requests.stream()
|
||||
// delete requests should not attempt to create the index (if the index does not
|
||||
// exists), unless an external versioning is used
|
||||
.filter(request -> request.opType() != DocWriteRequest.OpType.DELETE
|
||||
|| request.versionType() == VersionType.EXTERNAL
|
||||
.filter(request -> request.opType() != DocWriteRequest.OpType.DELETE
|
||||
|| request.versionType() == VersionType.EXTERNAL
|
||||
|| request.versionType() == VersionType.EXTERNAL_GTE)
|
||||
.map(DocWriteRequest::index)
|
||||
.collect(Collectors.toSet());
|
||||
|
@ -184,7 +184,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
if (!(ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException)) {
|
||||
// fail all requests involving this index, if create didn't work
|
||||
for (int i = 0; i < bulkRequest.requests.size(); i++) {
|
||||
DocWriteRequest request = bulkRequest.requests.get(i);
|
||||
DocWriteRequest<?> request = bulkRequest.requests.get(i);
|
||||
if (request != null && setResponseFailureIfIndexMatches(responses, i, request, index, e)) {
|
||||
bulkRequest.requests.set(i, null);
|
||||
}
|
||||
|
@ -221,7 +221,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
client.admin().indices().create(createIndexRequest, listener);
|
||||
}
|
||||
|
||||
private boolean setResponseFailureIfIndexMatches(AtomicArray<BulkItemResponse> responses, int idx, DocWriteRequest request, String index, Exception e) {
|
||||
private boolean setResponseFailureIfIndexMatches(AtomicArray<BulkItemResponse> responses, int idx, DocWriteRequest<?> request, String index, Exception e) {
|
||||
if (index.equals(request.index())) {
|
||||
responses.set(idx, new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(), request.type(), request.id(), e)));
|
||||
return true;
|
||||
|
@ -271,7 +271,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver);
|
||||
MetaData metaData = clusterState.metaData();
|
||||
for (int i = 0; i < bulkRequest.requests.size(); i++) {
|
||||
DocWriteRequest docWriteRequest = bulkRequest.requests.get(i);
|
||||
DocWriteRequest<?> docWriteRequest = bulkRequest.requests.get(i);
|
||||
//the request can only be null because we set it to null in the previous step, so it gets ignored
|
||||
if (docWriteRequest == null) {
|
||||
continue;
|
||||
|
@ -315,7 +315,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
// first, go over all the requests and create a ShardId -> Operations mapping
|
||||
Map<ShardId, List<BulkItemRequest>> requestsByShard = new HashMap<>();
|
||||
for (int i = 0; i < bulkRequest.requests.size(); i++) {
|
||||
DocWriteRequest request = bulkRequest.requests.get(i);
|
||||
DocWriteRequest<?> request = bulkRequest.requests.get(i);
|
||||
if (request == null) {
|
||||
continue;
|
||||
}
|
||||
|
@ -362,7 +362,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
// create failures for all relevant requests
|
||||
for (BulkItemRequest request : requests) {
|
||||
final String indexName = concreteIndices.getConcreteIndex(request.index()).getName();
|
||||
DocWriteRequest docWriteRequest = request.request();
|
||||
DocWriteRequest<?> docWriteRequest = request.request();
|
||||
responses.set(request.id(), new BulkItemResponse(request.id(), docWriteRequest.opType(),
|
||||
new BulkItemResponse.Failure(indexName, docWriteRequest.type(), docWriteRequest.id(), e)));
|
||||
}
|
||||
|
@ -418,7 +418,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
});
|
||||
}
|
||||
|
||||
private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices,
|
||||
private boolean addFailureIfIndexIsUnavailable(DocWriteRequest<?> request, int idx, final ConcreteIndices concreteIndices,
|
||||
final MetaData metaData) {
|
||||
IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index());
|
||||
if (cannotCreate != null) {
|
||||
|
@ -442,7 +442,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
return false;
|
||||
}
|
||||
|
||||
private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) {
|
||||
private void addFailure(DocWriteRequest<?> request, int idx, Exception unavailableException) {
|
||||
BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.type(), request.id(),
|
||||
unavailableException);
|
||||
BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, request.opType(), failure);
|
||||
|
@ -471,7 +471,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
return indices.get(indexOrAlias);
|
||||
}
|
||||
|
||||
Index resolveIfAbsent(DocWriteRequest request) {
|
||||
Index resolveIfAbsent(DocWriteRequest<?> request) {
|
||||
Index concreteIndex = indices.get(request.index());
|
||||
if (concreteIndex == null) {
|
||||
concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request);
|
||||
|
@ -512,7 +512,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
});
|
||||
}
|
||||
|
||||
static final class BulkRequestModifier implements Iterator<DocWriteRequest> {
|
||||
static final class BulkRequestModifier implements Iterator<DocWriteRequest<?>> {
|
||||
|
||||
final BulkRequest bulkRequest;
|
||||
final SparseFixedBitSet failedSlots;
|
||||
|
@ -528,7 +528,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
}
|
||||
|
||||
@Override
|
||||
public DocWriteRequest next() {
|
||||
public DocWriteRequest<?> next() {
|
||||
return bulkRequest.requests().get(++currentSlot);
|
||||
}
|
||||
|
||||
|
@ -547,10 +547,10 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
modifiedBulkRequest.timeout(bulkRequest.timeout());
|
||||
|
||||
int slot = 0;
|
||||
List<DocWriteRequest> requests = bulkRequest.requests();
|
||||
List<DocWriteRequest<?>> requests = bulkRequest.requests();
|
||||
originalSlots = new int[requests.size()]; // oversize, but that's ok
|
||||
for (int i = 0; i < requests.size(); i++) {
|
||||
DocWriteRequest request = requests.get(i);
|
||||
DocWriteRequest<?> request = requests.get(i);
|
||||
if (failedSlots.get(i) == false) {
|
||||
modifiedBulkRequest.add(request);
|
||||
originalSlots[slot++] = i;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.support.replication;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
|
|
|
@ -39,7 +39,7 @@ public abstract class AbstractComponent {
|
|||
this.settings = settings;
|
||||
}
|
||||
|
||||
public AbstractComponent(Settings settings, Class customClass) {
|
||||
public AbstractComponent(Settings settings, Class<?> customClass) {
|
||||
this.logger = LogManager.getLogger(customClass);
|
||||
this.deprecationLogger = new DeprecationLogger(logger);
|
||||
this.settings = settings;
|
||||
|
|
|
@ -35,7 +35,7 @@ public abstract class AbstractLifecycleComponent extends AbstractComponent imple
|
|||
super(settings);
|
||||
}
|
||||
|
||||
protected AbstractLifecycleComponent(Settings settings, Class customClass) {
|
||||
protected AbstractLifecycleComponent(Settings settings, Class<?> customClass) {
|
||||
super(settings, customClass);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,23 +19,22 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.geo.parsers.GeoWKTParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.XShapeCollection;
|
||||
import org.elasticsearch.common.geo.parsers.GeoWKTParser;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeometryCollectionBuilder extends ShapeBuilder {
|
||||
public class GeometryCollectionBuilder extends ShapeBuilder<Shape, GeometryCollectionBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION;
|
||||
|
||||
|
|
|
@ -20,15 +20,14 @@
|
|||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.geo.parsers.GeoWKTParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.geo.XShapeCollection;
|
||||
import org.elasticsearch.common.geo.parsers.GeoWKTParser;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -36,7 +35,7 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.Objects;
|
||||
|
||||
public class MultiPolygonBuilder extends ShapeBuilder {
|
||||
public class MultiPolygonBuilder extends ShapeBuilder<Shape, MultiPolygonBuilder> {
|
||||
|
||||
public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON;
|
||||
|
||||
|
|
|
@ -85,7 +85,7 @@ abstract class AbstractProcessor implements ElementVisitor<Boolean> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Boolean visit(InjectionRequest injectionRequest) {
|
||||
public Boolean visit(InjectionRequest<?> injectionRequest) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -360,7 +360,7 @@ public interface Binder {
|
|||
* @return a binder that shares its configuration with this binder.
|
||||
* @since 2.0
|
||||
*/
|
||||
Binder skipSources(Class... classesToSkip);
|
||||
Binder skipSources(Class<?>... classesToSkip);
|
||||
|
||||
/**
|
||||
* Creates a new private child environment for bindings and other configuration. The returned
|
||||
|
|
|
@ -72,7 +72,7 @@ class BindingProcessor extends AbstractProcessor {
|
|||
|
||||
if (Void.class.equals(command.getKey().getRawType())) {
|
||||
if (command instanceof ProviderInstanceBinding
|
||||
&& ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) {
|
||||
&& ((ProviderInstanceBinding<?>) command).getProviderInstance() instanceof ProviderMethod) {
|
||||
errors.voidProviderMethod();
|
||||
} else {
|
||||
errors.missingConstantValues();
|
||||
|
@ -274,7 +274,7 @@ class BindingProcessor extends AbstractProcessor {
|
|||
*/
|
||||
private boolean isOkayDuplicate(Binding<?> original, BindingImpl<?> binding) {
|
||||
if (original instanceof ExposedBindingImpl) {
|
||||
ExposedBindingImpl exposed = (ExposedBindingImpl) original;
|
||||
ExposedBindingImpl<?> exposed = (ExposedBindingImpl<?>) original;
|
||||
InjectorImpl exposedFrom = (InjectorImpl) exposed.getPrivateElements().getInjector();
|
||||
return (exposedFrom == binding.getInjector());
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public interface ElementVisitor<V> {
|
|||
/**
|
||||
* Visit a request to inject the instance fields and methods of an instance.
|
||||
*/
|
||||
V visit(InjectionRequest request);
|
||||
V visit(InjectionRequest<?> request);
|
||||
|
||||
/**
|
||||
* Visit a request to inject the static fields and methods of type.
|
||||
|
|
|
@ -83,7 +83,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
this.keySettings = Collections.unmodifiableMap(keySettings);
|
||||
}
|
||||
|
||||
protected void validateSettingKey(Setting setting) {
|
||||
protected void validateSettingKey(Setting<?> setting) {
|
||||
if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())
|
||||
|| isValidAffixKey(setting.getKey())) == false || setting.getKey().endsWith(".0")) {
|
||||
throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]");
|
||||
|
@ -366,7 +366,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
* @throws IllegalArgumentException if the setting is invalid
|
||||
*/
|
||||
void validate(final String key, final Settings settings, final boolean validateDependencies, final boolean validateInternalIndex) {
|
||||
Setting setting = getRaw(key);
|
||||
Setting<?> setting = getRaw(key);
|
||||
if (setting == null) {
|
||||
LevensteinDistance ld = new LevensteinDistance();
|
||||
List<Tuple<Float, String>> scoredKeys = new ArrayList<>();
|
||||
|
|
|
@ -32,7 +32,7 @@ import static org.elasticsearch.common.util.BigArrays.OBJECT_PAGE_SIZE;
|
|||
*/
|
||||
final class BigObjectArray<T> extends AbstractBigArray implements ObjectArray<T> {
|
||||
|
||||
private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE);
|
||||
private static final BigObjectArray<Long> ESTIMATOR = new BigObjectArray<Long>(0, BigArrays.NON_RECYCLING_INSTANCE);
|
||||
|
||||
private Object[][] pages;
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ abstract class AtomicDoubleFieldData implements AtomicNumericFieldData {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final ScriptDocValues getScriptValues() {
|
||||
public final ScriptDocValues<Double> getScriptValues() {
|
||||
return new ScriptDocValues.Doubles(getDoubleValues());
|
||||
}
|
||||
|
||||
|
@ -69,7 +69,7 @@ abstract class AtomicDoubleFieldData implements AtomicNumericFieldData {
|
|||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
return FieldData.emptySortedNumericDoubles();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
|
|
|
@ -80,7 +80,8 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
public BinaryFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
|
||||
throws MapperParsingException {
|
||||
BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
return builder;
|
||||
|
|
|
@ -94,7 +94,8 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
public BooleanFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
|
||||
throws MapperParsingException {
|
||||
BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder(name);
|
||||
parseField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
|
|
|
@ -163,7 +163,7 @@ public abstract class BaseTermQueryBuilder<QB extends BaseTermQueryBuilder<QB>>
|
|||
}
|
||||
|
||||
@Override
|
||||
protected final boolean doEquals(BaseTermQueryBuilder other) {
|
||||
protected final boolean doEquals(QB other) {
|
||||
return Objects.equals(fieldName, other.fieldName) &&
|
||||
Objects.equals(value, other.value);
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ public class PipelineExecutionService implements ClusterStateApplier {
|
|||
this.threadPool = threadPool;
|
||||
}
|
||||
|
||||
public void executeBulkRequest(Iterable<DocWriteRequest> actionRequests,
|
||||
public void executeBulkRequest(Iterable<DocWriteRequest<?>> actionRequests,
|
||||
BiConsumer<IndexRequest, Exception> itemFailureHandler,
|
||||
Consumer<Exception> completionHandler) {
|
||||
threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() {
|
||||
|
@ -65,7 +65,7 @@ public class PipelineExecutionService implements ClusterStateApplier {
|
|||
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
for (DocWriteRequest actionRequest : actionRequests) {
|
||||
for (DocWriteRequest<?> actionRequest : actionRequests) {
|
||||
IndexRequest indexRequest = null;
|
||||
if (actionRequest instanceof IndexRequest) {
|
||||
indexRequest = (IndexRequest) actionRequest;
|
||||
|
|
|
@ -19,9 +19,9 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.support.ActionFilter;
|
||||
import org.elasticsearch.action.support.TransportAction;
|
||||
import org.elasticsearch.action.support.TransportActions;
|
||||
|
@ -68,7 +68,7 @@ public interface ActionPlugin {
|
|||
* Client actions added by this plugin. This defaults to all of the {@linkplain Action} in
|
||||
* {@linkplain ActionPlugin#getActions()}.
|
||||
*/
|
||||
default List<Action> getClientActions() {
|
||||
default List<Action<? extends ActionResponse>> getClientActions() {
|
||||
return getActions().stream().map(a -> a.action).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
|
|
@ -115,7 +115,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
|
|||
public AbstractHighlighterBuilder() {
|
||||
}
|
||||
|
||||
protected AbstractHighlighterBuilder(AbstractHighlighterBuilder template, QueryBuilder queryBuilder) {
|
||||
protected AbstractHighlighterBuilder(AbstractHighlighterBuilder<?> template, QueryBuilder queryBuilder) {
|
||||
preTags = template.preTags;
|
||||
postTags = template.postTags;
|
||||
fragmentSize = template.fragmentSize;
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.search.suggest.completion.context;
|
|||
/**
|
||||
* Builder for {@link ContextMapping}
|
||||
*/
|
||||
public abstract class ContextBuilder<E extends ContextMapping> {
|
||||
public abstract class ContextBuilder<E extends ContextMapping<?>> {
|
||||
|
||||
protected String name;
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ public abstract class ContextMapping<T extends ToXContent> implements ToXContent
|
|||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
ContextMapping that = (ContextMapping) o;
|
||||
ContextMapping<?> that = (ContextMapping<?>) o;
|
||||
if (type != that.type) return false;
|
||||
return name.equals(that.name);
|
||||
}
|
||||
|
|
|
@ -57,10 +57,10 @@ public class ContextMappings implements ToXContent {
|
|||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(ContextMappings.class));
|
||||
|
||||
private final List<ContextMapping> contextMappings;
|
||||
private final Map<String, ContextMapping> contextNameMap;
|
||||
private final List<ContextMapping<?>> contextMappings;
|
||||
private final Map<String, ContextMapping<?>> contextNameMap;
|
||||
|
||||
public ContextMappings(List<ContextMapping> contextMappings) {
|
||||
public ContextMappings(List<ContextMapping<?>> contextMappings) {
|
||||
if (contextMappings.size() > 255) {
|
||||
// we can support more, but max of 255 (1 byte) unique context types per suggest field
|
||||
// seems reasonable?
|
||||
|
@ -68,7 +68,7 @@ public class ContextMappings implements ToXContent {
|
|||
}
|
||||
this.contextMappings = contextMappings;
|
||||
contextNameMap = new HashMap<>(contextMappings.size());
|
||||
for (ContextMapping mapping : contextMappings) {
|
||||
for (ContextMapping<?> mapping : contextMappings) {
|
||||
contextNameMap.put(mapping.name(), mapping);
|
||||
}
|
||||
}
|
||||
|
@ -84,8 +84,8 @@ public class ContextMappings implements ToXContent {
|
|||
/**
|
||||
* Returns a context mapping by its name
|
||||
*/
|
||||
public ContextMapping get(String name) {
|
||||
ContextMapping contextMapping = contextNameMap.get(name);
|
||||
public ContextMapping<?> get(String name) {
|
||||
ContextMapping<?> contextMapping = contextNameMap.get(name);
|
||||
if (contextMapping == null) {
|
||||
List<String> keys = new ArrayList<>(contextNameMap.keySet());
|
||||
Collections.sort(keys);
|
||||
|
@ -138,7 +138,7 @@ public class ContextMappings implements ToXContent {
|
|||
for (int typeId = 0; typeId < contextMappings.size(); typeId++) {
|
||||
scratch.setCharAt(0, (char) typeId);
|
||||
scratch.setLength(1);
|
||||
ContextMapping mapping = contextMappings.get(typeId);
|
||||
ContextMapping<?> mapping = contextMappings.get(typeId);
|
||||
Set<CharSequence> contexts = new HashSet<>(mapping.parseContext(document));
|
||||
if (this.contexts.get(mapping.name()) != null) {
|
||||
contexts.addAll(this.contexts.get(mapping.name()));
|
||||
|
@ -173,7 +173,7 @@ public class ContextMappings implements ToXContent {
|
|||
for (int typeId = 0; typeId < contextMappings.size(); typeId++) {
|
||||
scratch.setCharAt(0, (char) typeId);
|
||||
scratch.setLength(1);
|
||||
ContextMapping mapping = contextMappings.get(typeId);
|
||||
ContextMapping<?> mapping = contextMappings.get(typeId);
|
||||
List<ContextMapping.InternalQueryContext> internalQueryContext = queryContexts.get(mapping.name());
|
||||
if (internalQueryContext != null) {
|
||||
for (ContextMapping.InternalQueryContext context : internalQueryContext) {
|
||||
|
@ -204,7 +204,7 @@ public class ContextMappings implements ToXContent {
|
|||
for (CharSequence typedContext : contexts) {
|
||||
int typeId = typedContext.charAt(0);
|
||||
assert typeId < contextMappings.size() : "Returned context has invalid type";
|
||||
ContextMapping mapping = contextMappings.get(typeId);
|
||||
ContextMapping<?> mapping = contextMappings.get(typeId);
|
||||
Set<CharSequence> contextEntries = contextMap.get(mapping.name());
|
||||
if (contextEntries == null) {
|
||||
contextEntries = new HashSet<>();
|
||||
|
@ -224,10 +224,10 @@ public class ContextMappings implements ToXContent {
|
|||
*
|
||||
*/
|
||||
public static ContextMappings load(Object configuration, Version indexVersionCreated) throws ElasticsearchParseException {
|
||||
final List<ContextMapping> contextMappings;
|
||||
final List<ContextMapping<?>> contextMappings;
|
||||
if (configuration instanceof List) {
|
||||
contextMappings = new ArrayList<>();
|
||||
List<Object> configurations = (List<Object>)configuration;
|
||||
List<Object> configurations = (List<Object>) configuration;
|
||||
for (Object contextConfig : configurations) {
|
||||
contextMappings.add(load((Map<String, Object>) contextConfig, indexVersionCreated));
|
||||
}
|
||||
|
@ -242,10 +242,10 @@ public class ContextMappings implements ToXContent {
|
|||
return new ContextMappings(contextMappings);
|
||||
}
|
||||
|
||||
private static ContextMapping load(Map<String, Object> contextConfig, Version indexVersionCreated) {
|
||||
private static ContextMapping<?> load(Map<String, Object> contextConfig, Version indexVersionCreated) {
|
||||
String name = extractRequiredValue(contextConfig, FIELD_NAME);
|
||||
String type = extractRequiredValue(contextConfig, FIELD_TYPE);
|
||||
final ContextMapping contextMapping;
|
||||
final ContextMapping<?> contextMapping;
|
||||
switch (Type.fromString(type)) {
|
||||
case CATEGORY:
|
||||
contextMapping = CategoryContextMapping.load(name, contextConfig);
|
||||
|
@ -276,7 +276,7 @@ public class ContextMappings implements ToXContent {
|
|||
*/
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
for (ContextMapping contextMapping : contextMappings) {
|
||||
for (ContextMapping<?> contextMapping : contextMappings) {
|
||||
builder.startObject();
|
||||
contextMapping.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
|
|
|
@ -75,7 +75,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
interface CollapsingDocValuesProducer<T extends Comparable> {
|
||||
interface CollapsingDocValuesProducer<T extends Comparable<?>> {
|
||||
T randomGroup(int maxGroup);
|
||||
|
||||
void add(Document doc, T value, boolean multivalued);
|
||||
|
@ -83,14 +83,14 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
SortField sortField(boolean multivalued);
|
||||
}
|
||||
|
||||
<T extends Comparable> void assertSearchCollapse(CollapsingDocValuesProducer<T> dvProducers, boolean numeric) throws IOException {
|
||||
<T extends Comparable<T>> void assertSearchCollapse(CollapsingDocValuesProducer<T> dvProducers, boolean numeric) throws IOException {
|
||||
assertSearchCollapse(dvProducers, numeric, true, true);
|
||||
assertSearchCollapse(dvProducers, numeric, true, false);
|
||||
assertSearchCollapse(dvProducers, numeric, false, true);
|
||||
assertSearchCollapse(dvProducers, numeric, false, false);
|
||||
}
|
||||
|
||||
private <T extends Comparable> void assertSearchCollapse(CollapsingDocValuesProducer<T> dvProducers,
|
||||
private <T extends Comparable<T>> void assertSearchCollapse(CollapsingDocValuesProducer<T> dvProducers,
|
||||
boolean numeric, boolean multivalued,
|
||||
boolean trackMaxScores) throws IOException {
|
||||
final int numDocs = randomIntBetween(1000, 2000);
|
||||
|
@ -120,7 +120,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
|
||||
int expectedNumGroups = values.size();
|
||||
|
||||
final CollapsingTopDocsCollector collapsingCollector;
|
||||
final CollapsingTopDocsCollector<?> collapsingCollector;
|
||||
if (numeric) {
|
||||
collapsingCollector =
|
||||
CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores);
|
||||
|
@ -199,7 +199,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
final Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), true);
|
||||
for (int shardIDX = 0; shardIDX < subSearchers.length; shardIDX++) {
|
||||
final SegmentSearcher subSearcher = subSearchers[shardIDX];
|
||||
final CollapsingTopDocsCollector c;
|
||||
final CollapsingTopDocsCollector<?> c;
|
||||
if (numeric) {
|
||||
c = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores);
|
||||
} else {
|
||||
|
@ -221,7 +221,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCollapseLong() throws Exception {
|
||||
CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer<Long>() {
|
||||
CollapsingDocValuesProducer<Long> producer = new CollapsingDocValuesProducer<Long>() {
|
||||
@Override
|
||||
public Long randomGroup(int maxGroup) {
|
||||
return randomNonNegativeLong() % maxGroup;
|
||||
|
@ -249,7 +249,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCollapseInt() throws Exception {
|
||||
CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer<Integer>() {
|
||||
CollapsingDocValuesProducer<Integer> producer = new CollapsingDocValuesProducer<Integer>() {
|
||||
@Override
|
||||
public Integer randomGroup(int maxGroup) {
|
||||
return randomIntBetween(0, maxGroup - 1);
|
||||
|
@ -277,10 +277,10 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCollapseFloat() throws Exception {
|
||||
CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer<Float>() {
|
||||
CollapsingDocValuesProducer<Float> producer = new CollapsingDocValuesProducer<Float>() {
|
||||
@Override
|
||||
public Float randomGroup(int maxGroup) {
|
||||
return new Float(randomIntBetween(0, maxGroup - 1));
|
||||
return Float.valueOf(randomIntBetween(0, maxGroup - 1));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -305,10 +305,10 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCollapseDouble() throws Exception {
|
||||
CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer<Double>() {
|
||||
CollapsingDocValuesProducer<Double> producer = new CollapsingDocValuesProducer<Double>() {
|
||||
@Override
|
||||
public Double randomGroup(int maxGroup) {
|
||||
return new Double(randomIntBetween(0, maxGroup - 1));
|
||||
return Double.valueOf(randomIntBetween(0, maxGroup - 1));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -333,7 +333,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testCollapseString() throws Exception {
|
||||
CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer<BytesRef>() {
|
||||
CollapsingDocValuesProducer<BytesRef> producer = new CollapsingDocValuesProducer<BytesRef>() {
|
||||
@Override
|
||||
public BytesRef randomGroup(int maxGroup) {
|
||||
return new BytesRef(Integer.toString(randomIntBetween(0, maxGroup - 1)));
|
||||
|
@ -383,7 +383,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
SortField sortField = new SortField("group", SortField.Type.LONG);
|
||||
sortField.setMissingValue(Long.MAX_VALUE);
|
||||
Sort sort = new Sort(sortField);
|
||||
final CollapsingTopDocsCollector collapsingCollector =
|
||||
final CollapsingTopDocsCollector<?> collapsingCollector =
|
||||
CollapsingTopDocsCollector.createNumeric("group", sort, 10, false);
|
||||
searcher.search(new MatchAllDocsQuery(), collapsingCollector);
|
||||
CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs();
|
||||
|
@ -419,7 +419,7 @@ public class CollapsingTopDocsCollectorTests extends ESTestCase {
|
|||
final IndexReader reader = w.getReader();
|
||||
final IndexSearcher searcher = newSearcher(reader);
|
||||
Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL));
|
||||
final CollapsingTopDocsCollector collapsingCollector =
|
||||
final CollapsingTopDocsCollector<?> collapsingCollector =
|
||||
CollapsingTopDocsCollector.createKeyword("group", sort, 10, false);
|
||||
searcher.search(new MatchAllDocsQuery(), collapsingCollector);
|
||||
CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs();
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.test.ESTestCase;
|
|||
public class ActionTests extends ESTestCase {
|
||||
|
||||
public void testEquals() {
|
||||
class FakeAction extends Action {
|
||||
class FakeAction extends Action<ActionResponse> {
|
||||
protected FakeAction(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
|
|
@ -124,7 +124,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
|
||||
public void testBulkAddIterable() {
|
||||
BulkRequest bulkRequest = Requests.bulkRequest();
|
||||
List<DocWriteRequest> requests = new ArrayList<>();
|
||||
List<DocWriteRequest<?>> requests = new ArrayList<>();
|
||||
requests.add(new IndexRequest("test", "test", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"));
|
||||
requests.add(new UpdateRequest("test", "test", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"));
|
||||
requests.add(new DeleteRequest("test", "test", "id"));
|
||||
|
@ -279,7 +279,7 @@ public class BulkRequestTests extends ESTestCase {
|
|||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(data, null, null, xContentType);
|
||||
assertEquals(1, bulkRequest.requests().size());
|
||||
DocWriteRequest docWriteRequest = bulkRequest.requests().get(0);
|
||||
DocWriteRequest<?> docWriteRequest = bulkRequest.requests().get(0);
|
||||
assertEquals(DocWriteRequest.OpType.INDEX, docWriteRequest.opType());
|
||||
assertEquals("index", docWriteRequest.index());
|
||||
assertEquals("type", docWriteRequest.type());
|
||||
|
|
|
@ -84,7 +84,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
|
|||
@Captor
|
||||
ArgumentCaptor<TransportResponseHandler<BulkResponse>> remoteResponseHandler;
|
||||
@Captor
|
||||
ArgumentCaptor<Iterable<DocWriteRequest>> bulkDocsItr;
|
||||
ArgumentCaptor<Iterable<DocWriteRequest<?>>> bulkDocsItr;
|
||||
|
||||
/** The actual action we want to test, with real indexing mocked */
|
||||
TestTransportBulkAction action;
|
||||
|
@ -225,7 +225,7 @@ public class TransportBulkActionIngestTests extends ESTestCase {
|
|||
assertTrue(failureCalled.get());
|
||||
|
||||
// now check success
|
||||
Iterator<DocWriteRequest> req = bulkDocsItr.getValue().iterator();
|
||||
Iterator<DocWriteRequest<?>> req = bulkDocsItr.getValue().iterator();
|
||||
failureHandler.getValue().accept((IndexRequest)req.next(), exception); // have an exception for our one index request
|
||||
indexRequest2.setPipeline(null); // this is done by the real pipeline execution service when processing
|
||||
completionHandler.getValue().accept(null);
|
||||
|
|
|
@ -18,9 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.action.support.replication;
|
||||
|
||||
import org.elasticsearch.action.support.ActionTestUtils;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.NoShardAvailableActionException;
|
||||
import org.elasticsearch.action.UnavailableShardsException;
|
||||
|
@ -28,7 +25,9 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
|||
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
|
||||
import org.elasticsearch.action.admin.indices.flush.TransportFlushAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.ActionTestUtils;
|
||||
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastRequest;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
|
@ -41,6 +40,7 @@ import org.elasticsearch.common.network.NetworkService;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
|
@ -104,6 +104,7 @@ public class BroadcastReplicationTests extends ESTestCase {
|
|||
new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
|
@ -244,13 +245,15 @@ public class BroadcastReplicationTests extends ESTestCase {
|
|||
return flushResponse;
|
||||
}
|
||||
|
||||
public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) {
|
||||
public BroadcastResponse executeAndAssertImmediateResponse(
|
||||
TransportBroadcastReplicationAction<DummyBroadcastRequest, BroadcastResponse, ?, ?> broadcastAction,
|
||||
DummyBroadcastRequest request) {
|
||||
PlainActionFuture<BroadcastResponse> response = PlainActionFuture.newFuture();
|
||||
broadcastAction.execute(request, response);
|
||||
return response.actionGet("5s");
|
||||
}
|
||||
|
||||
private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) {
|
||||
private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class<?> exceptionClass) {
|
||||
assertThat(response.getSuccessfulShards(), equalTo(successful));
|
||||
assertThat(response.getTotalShards(), equalTo(total));
|
||||
assertThat(response.getFailedShards(), equalTo(failed));
|
||||
|
|
|
@ -167,7 +167,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
|
|||
public final boolean requestPositions;
|
||||
public final boolean requestOffsets;
|
||||
public final boolean requestPayloads;
|
||||
public Class expectedException = null;
|
||||
public Class<? extends Exception> expectedException = null;
|
||||
|
||||
public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) {
|
||||
this.doc = doc;
|
||||
|
@ -177,7 +177,7 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase {
|
|||
this.requestPayloads = requestPayloads;
|
||||
}
|
||||
|
||||
public TestConfig expectedException(Class exceptionClass) {
|
||||
public TestConfig expectedException(Class<? extends Exception> exceptionClass) {
|
||||
this.expectedException = exceptionClass;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction;
|
||||
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction;
|
||||
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction;
|
||||
|
@ -56,7 +56,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
|
|||
.put(ThreadContext.PREFIX + ".key2", "val 2")
|
||||
.build();
|
||||
|
||||
private static final Action[] ACTIONS = new Action[] {
|
||||
private static final Action<?>[] ACTIONS = new Action[] {
|
||||
// client actions
|
||||
GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE,
|
||||
IndexAction.INSTANCE,
|
||||
|
@ -92,7 +92,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase {
|
|||
terminate(threadPool);
|
||||
}
|
||||
|
||||
protected abstract Client buildClient(Settings headersSettings, Action[] testedActions);
|
||||
protected abstract Client buildClient(Settings headersSettings, Action<?>[] testedActions);
|
||||
|
||||
|
||||
public void testActions() {
|
||||
|
|
|
@ -18,13 +18,13 @@
|
|||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.locationtech.jts.geom.GeometryFactory;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.locationtech.jts.geom.GeometryFactory;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.ShapeCollection;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
|
@ -49,7 +49,7 @@ abstract class BaseGeoParsingTestCase extends ESTestCase {
|
|||
public abstract void testParseEnvelope() throws IOException;
|
||||
public abstract void testParseGeometryCollection() throws IOException;
|
||||
|
||||
protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException {
|
||||
protected void assertValidException(XContentBuilder builder, Class<?> expectedException) throws IOException {
|
||||
try (XContentParser parser = createParser(builder)) {
|
||||
parser.nextToken();
|
||||
ElasticsearchGeoAssertions.assertValidException(parser, expectedException);
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.io.IOException;
|
|||
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
|
||||
public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder> extends ESTestCase {
|
||||
public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder<?,?>> extends ESTestCase {
|
||||
|
||||
private static final int NUMBER_OF_TESTBUILDERS = 20;
|
||||
private static NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
@ -81,7 +81,7 @@ public abstract class AbstractShapeBuilderTestCase<SB extends ShapeBuilder> exte
|
|||
XContentBuilder shuffled = shuffleXContent(builder);
|
||||
try (XContentParser shapeContentParser = createParser(shuffled)) {
|
||||
shapeContentParser.nextToken();
|
||||
ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser);
|
||||
ShapeBuilder<?, ?> parsedShape = ShapeParser.parse(shapeContentParser);
|
||||
assertNotSame(testShape, parsedShape);
|
||||
assertEquals(testShape, parsedShape);
|
||||
assertEquals(testShape.hashCode(), parsedShape.hashCode());
|
||||
|
|
|
@ -629,7 +629,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
|
||||
public void testMap() throws Exception {
|
||||
Map<String, Map<String, ?>> maps = new HashMap<>();
|
||||
maps.put("{'map':null}", (Map) null);
|
||||
maps.put("{'map':null}", (Map<String, ?>) null);
|
||||
maps.put("{'map':{}}", Collections.emptyMap());
|
||||
maps.put("{'map':{'key':'value'}}", singletonMap("key", "value"));
|
||||
|
||||
|
@ -654,7 +654,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
|
||||
public void testIterable() throws Exception {
|
||||
Map<String, Iterable<?>> iterables = new HashMap<>();
|
||||
iterables.put("{'iter':null}", (Iterable) null);
|
||||
iterables.put("{'iter':null}", (Iterable<?>) null);
|
||||
iterables.put("{'iter':[]}", Collections.emptyList());
|
||||
iterables.put("{'iter':['a','b']}", Arrays.asList("a", "b"));
|
||||
|
||||
|
@ -944,7 +944,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder()
|
||||
.startObject()
|
||||
.field("field", (Iterable) values)
|
||||
.field("field", values)
|
||||
.endObject());
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
@ -959,7 +959,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder()
|
||||
.startObject()
|
||||
.field("field", (Iterable) values)
|
||||
.field("field", values)
|
||||
.endObject());
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
@ -972,7 +972,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
List<Object> it1 = new ArrayList<>();
|
||||
|
||||
map0.put("foo", 0);
|
||||
map0.put("it1", (Iterable<?>) it1); // map 0 -> it1
|
||||
map0.put("it1", it1); // map 0 -> it1
|
||||
|
||||
it1.add(map1);
|
||||
it1.add(map2); // it 1 -> map 1, map 2
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.search.MultiValueMode;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
|
||||
|
@ -72,7 +73,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
|
||||
public void testDeletedDocs() throws Exception {
|
||||
add2SingleValuedDocumentsAndDeleteOneOfThem();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -86,7 +87,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
|
||||
public void testSingleValueAllSet() throws Exception {
|
||||
fillSingleValueAllSet();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -156,7 +157,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
|
||||
public void testSingleValueWithMissing() throws Exception {
|
||||
fillSingleValueWithMissing();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -177,7 +178,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
// the segments are force merged to a single segment so that the sorted binary doc values can be asserted within a single segment.
|
||||
// Previously we used the SlowCompositeReaderWrapper but this is an unideal solution so force merging is a better idea.
|
||||
writer.forceMerge(1);
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -211,7 +212,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
|
||||
public void testMultiValueWithMissing() throws Exception {
|
||||
fillMultiValueWithMissing();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -227,7 +228,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
|
||||
public void testMissingValueForAll() throws Exception {
|
||||
fillAllMissing();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -251,7 +252,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes
|
|||
|
||||
public void testSortMultiValuesFields() throws Exception {
|
||||
fillExtendedMvSet();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
|
||||
SortField sortField =
|
||||
|
|
|
@ -170,7 +170,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
writer.addDocument(d);
|
||||
refreshReader();
|
||||
|
||||
IndexFieldData fieldData = getForField("non_existing_field");
|
||||
IndexFieldData<?> fieldData = getForField("non_existing_field");
|
||||
int max = randomInt(7);
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData previous = null;
|
||||
|
|
|
@ -260,7 +260,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
}
|
||||
}
|
||||
|
||||
final IndexFieldData indexFieldData = getForField("value");
|
||||
final IndexFieldData<?> indexFieldData = getForField("value");
|
||||
final String missingValue = values[1];
|
||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
|
||||
SortField sortField = indexFieldData.sortField(missingValue, MultiValueMode.MIN, null, reverse);
|
||||
|
@ -315,7 +315,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI
|
|||
writer.commit();
|
||||
}
|
||||
}
|
||||
final IndexFieldData indexFieldData = getForField("value");
|
||||
final IndexFieldData<?> indexFieldData = getForField("value");
|
||||
IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer));
|
||||
SortField sortField = indexFieldData.sortField(first ? "_first" : "_last", MultiValueMode.MIN, null, reverse);
|
||||
TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField));
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations;
|
|||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -29,8 +30,6 @@ import java.util.function.Function;
|
|||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
import org.elasticsearch.script.ScriptType;
|
||||
|
||||
/**
|
||||
* This class contains various mocked scripts that are used in aggregations integration tests.
|
||||
*/
|
||||
|
@ -68,32 +67,32 @@ public class AggregationTestScriptsPlugin extends MockScriptPlugin {
|
|||
});
|
||||
|
||||
scripts.put("doc['value'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?,?> doc = (Map<?,?>) vars.get("doc");
|
||||
return doc.get("value");
|
||||
});
|
||||
|
||||
scripts.put("doc['value'].value - dec", vars -> {
|
||||
int dec = (int) vars.get("dec");
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?,?> doc = (Map<?,?>) vars.get("doc");
|
||||
ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value");
|
||||
return value.getValue() - dec;
|
||||
});
|
||||
|
||||
scripts.put("doc['value'].value + inc", vars -> {
|
||||
int inc = (int) vars.get("inc");
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?,?> doc = (Map<?,?>) vars.get("doc");
|
||||
ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value");
|
||||
return value.getValue() + inc;
|
||||
});
|
||||
|
||||
scripts.put("doc['values'].values", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?, ?> doc = (Map<?,?>) vars.get("doc");
|
||||
return doc.get("values");
|
||||
});
|
||||
|
||||
scripts.put(DECREMENT_ALL_VALUES.getIdOrCode(), vars -> {
|
||||
int dec = (int) vars.get("dec");
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?, ?> doc = (Map<?,?>) vars.get("doc");
|
||||
ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values");
|
||||
|
||||
double[] res = new double[values.size()];
|
||||
|
|
|
@ -99,10 +99,10 @@ import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|||
*/
|
||||
public class AggregationsTests extends ESTestCase {
|
||||
|
||||
private static final List<InternalAggregationTestCase> aggsTests = getAggsTests();
|
||||
private static final List<InternalAggregationTestCase<?>> aggsTests = getAggsTests();
|
||||
|
||||
private static List<InternalAggregationTestCase> getAggsTests() {
|
||||
List<InternalAggregationTestCase> aggsTests = new ArrayList<>();
|
||||
private static List<InternalAggregationTestCase<?>> getAggsTests() {
|
||||
List<InternalAggregationTestCase<?>> aggsTests = new ArrayList<>();
|
||||
aggsTests.add(new InternalCardinalityTests());
|
||||
aggsTests.add(new InternalTDigestPercentilesTests());
|
||||
aggsTests.add(new InternalTDigestPercentilesRanksTests());
|
||||
|
@ -156,11 +156,11 @@ public class AggregationsTests extends ESTestCase {
|
|||
|
||||
@Before
|
||||
public void init() throws Exception {
|
||||
for (InternalAggregationTestCase aggsTest : aggsTests) {
|
||||
for (InternalAggregationTestCase<?> aggsTest : aggsTests) {
|
||||
if (aggsTest instanceof InternalMultiBucketAggregationTestCase) {
|
||||
// Lower down the number of buckets generated by multi bucket aggregation tests in
|
||||
// order to avoid too many aggregations to be created.
|
||||
((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3);
|
||||
((InternalMultiBucketAggregationTestCase<?>) aggsTest).setMaxNumberOfBuckets(3);
|
||||
}
|
||||
aggsTest.setUp();
|
||||
}
|
||||
|
@ -168,7 +168,7 @@ public class AggregationsTests extends ESTestCase {
|
|||
|
||||
@After
|
||||
public void cleanUp() throws Exception {
|
||||
for (InternalAggregationTestCase aggsTest : aggsTests) {
|
||||
for (InternalAggregationTestCase<?> aggsTest : aggsTests) {
|
||||
aggsTest.tearDown();
|
||||
}
|
||||
}
|
||||
|
@ -268,9 +268,9 @@ public class AggregationsTests extends ESTestCase {
|
|||
int numAggs = randomIntBetween(minNumAggs, 4);
|
||||
List<InternalAggregation> aggs = new ArrayList<>(numAggs);
|
||||
for (int i = 0; i < numAggs; i++) {
|
||||
InternalAggregationTestCase testCase = randomFrom(aggsTests);
|
||||
InternalAggregationTestCase<?> testCase = randomFrom(aggsTests);
|
||||
if (testCase instanceof InternalMultiBucketAggregationTestCase) {
|
||||
InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase;
|
||||
InternalMultiBucketAggregationTestCase<?> multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase<?>) testCase;
|
||||
if (currentDepth < maxDepth) {
|
||||
multiBucketAggTestCase.setSubAggregationsSupplier(
|
||||
() -> createTestInstance(0, currentDepth + 1, maxDepth)
|
||||
|
@ -281,7 +281,7 @@ public class AggregationsTests extends ESTestCase {
|
|||
);
|
||||
}
|
||||
} else if (testCase instanceof InternalSingleBucketAggregationTestCase) {
|
||||
InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase;
|
||||
InternalSingleBucketAggregationTestCase<?> singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase<?>) testCase;
|
||||
if (currentDepth < maxDepth) {
|
||||
singleBucketAggTestCase.subAggregationsSupplier = () -> createTestInstance(0, currentDepth + 1, maxDepth);
|
||||
} else {
|
||||
|
|
|
@ -69,23 +69,23 @@ public class CardinalityIT extends ESIntegTestCase {
|
|||
scripts.put("_value", vars -> vars.get("_value"));
|
||||
|
||||
scripts.put("doc['str_value'].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
|
||||
return doc.get("str_value");
|
||||
});
|
||||
|
||||
scripts.put("doc['str_values'].values", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?, ?> doc = (Map<?, ?>) vars.get("doc");
|
||||
ScriptDocValues.Strings strValue = (ScriptDocValues.Strings) doc.get("str_values");
|
||||
return strValue.getValues();
|
||||
});
|
||||
|
||||
scripts.put("doc[' + singleNumericField() + '].value", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?, ?> doc =(Map<?, ?>) vars.get("doc");
|
||||
return doc.get(singleNumericField());
|
||||
});
|
||||
|
||||
scripts.put("doc[' + multiNumericField(false) + '].values", vars -> {
|
||||
Map<?, ?> doc = (Map) vars.get("doc");
|
||||
Map<?, ?> doc =(Map<?, ?>) vars.get("doc");
|
||||
return ((ScriptDocValues<?>) doc.get(multiNumericField(false))).getValues();
|
||||
});
|
||||
|
||||
|
|
|
@ -890,7 +890,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
assertSuggestions(searchResponse, true, "suggestions", expected);
|
||||
}
|
||||
|
||||
public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) {
|
||||
public void assertSuggestions(String suggestionName, SuggestionBuilder<?> suggestBuilder, String... suggestions) {
|
||||
SearchResponse searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)).execute().actionGet();
|
||||
assertSuggestions(searchResponse, suggestionName, suggestions);
|
||||
}
|
||||
|
@ -971,7 +971,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
if (completionMappingBuilder.contextMappings != null) {
|
||||
mapping = mapping.startArray("contexts");
|
||||
for (Map.Entry<String, ContextMapping> contextMapping : completionMappingBuilder.contextMappings.entrySet()) {
|
||||
for (Map.Entry<String, ContextMapping<?>> contextMapping : completionMappingBuilder.contextMappings.entrySet()) {
|
||||
mapping = mapping.startObject()
|
||||
.field("name", contextMapping.getValue().name())
|
||||
.field("type", contextMapping.getValue().type().name());
|
||||
|
@ -1189,7 +1189,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
String indexAnalyzer = "simple";
|
||||
Boolean preserveSeparators = random().nextBoolean();
|
||||
Boolean preservePositionIncrements = random().nextBoolean();
|
||||
LinkedHashMap<String, ContextMapping> contextMappings = null;
|
||||
LinkedHashMap<String, ContextMapping<?>> contextMappings = null;
|
||||
|
||||
public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) {
|
||||
this.searchAnalyzer = searchAnalyzer;
|
||||
|
@ -1208,7 +1208,7 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
return this;
|
||||
}
|
||||
|
||||
public CompletionMappingBuilder context(LinkedHashMap<String, ContextMapping> contextMappings) {
|
||||
public CompletionMappingBuilder context(LinkedHashMap<String, ContextMapping<?>> contextMappings) {
|
||||
this.contextMappings = contextMappings;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
|
@ -68,7 +69,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testContextPrefix() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
boolean addAnotherContext = randomBoolean();
|
||||
if (addAnotherContext) {
|
||||
|
@ -99,7 +100,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testContextRegex() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
boolean addAnotherContext = randomBoolean();
|
||||
if (addAnotherContext) {
|
||||
|
@ -130,7 +131,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testContextFuzzy() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
boolean addAnotherContext = randomBoolean();
|
||||
if (addAnotherContext) {
|
||||
|
@ -162,7 +163,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testContextFilteringWorksWithUTF8Categories() throws Exception {
|
||||
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
IndexResponse indexResponse = client().prepareIndex(INDEX, TYPE, "1")
|
||||
|
@ -183,7 +184,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testSingleContextFiltering() throws Exception {
|
||||
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
int numDocs = 10;
|
||||
|
@ -209,7 +210,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testSingleContextBoosting() throws Exception {
|
||||
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
int numDocs = 10;
|
||||
|
@ -237,7 +238,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
public void testSingleContextMultipleContexts() throws Exception {
|
||||
CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build();
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping));
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
int numDocs = 10;
|
||||
|
@ -262,7 +263,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMultiContextFiltering() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
map.put("type", ContextBuilder.category("type").field("type").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
|
@ -306,7 +307,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
@AwaitsFix(bugUrl = "multiple context boosting is broken, as a suggestion, contexts pair is treated as (num(context) entries)")
|
||||
public void testMultiContextBoosting() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
map.put("type", ContextBuilder.category("type").field("type").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
|
@ -361,7 +362,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testMissingContextValue() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
map.put("type", ContextBuilder.category("type").field("type").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
|
@ -391,7 +392,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSeveralContexts() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
final int numContexts = randomIntBetween(2, 5);
|
||||
for (int i = 0; i < numContexts; i++) {
|
||||
map.put("type" + i, ContextBuilder.category("type" + i).field("type" + i).build());
|
||||
|
@ -421,7 +422,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleGeoPrefix() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("geo", ContextBuilder.geo("geo").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
|
@ -446,7 +447,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testGeoFiltering() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("geo", ContextBuilder.geo("geo").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
|
@ -478,7 +479,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testGeoBoosting() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("geo", ContextBuilder.geo("geo").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
|
@ -511,7 +512,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testGeoPointContext() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("geo", ContextBuilder.geo("geo").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
|
@ -551,7 +552,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
neighbours.add("gcpu");
|
||||
neighbours.add("u10h");
|
||||
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("geo", ContextBuilder.geo("geo").precision(4).build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
createIndexAndMapping(mapping);
|
||||
|
@ -640,7 +641,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSkipDuplicatesWithContexts() throws Exception {
|
||||
LinkedHashMap<String, ContextMapping> map = new LinkedHashMap<>();
|
||||
LinkedHashMap<String, ContextMapping<?>> map = new LinkedHashMap<>();
|
||||
map.put("type", ContextBuilder.category("type").field("type").build());
|
||||
map.put("cat", ContextBuilder.category("cat").field("cat").build());
|
||||
final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map);
|
||||
|
@ -706,7 +707,7 @@ public class ContextCompletionSuggestSearchIT extends ESIntegTestCase {
|
|||
List<String> categoryContextFields = new ArrayList<>();
|
||||
if (completionMappingBuilder.contextMappings != null) {
|
||||
mapping.startArray("contexts");
|
||||
for (Map.Entry<String, ContextMapping> contextMapping : completionMappingBuilder.contextMappings.entrySet()) {
|
||||
for (Map.Entry<String, ContextMapping<?>> contextMapping : completionMappingBuilder.contextMappings.entrySet()) {
|
||||
mapping.startObject()
|
||||
.field("name", contextMapping.getValue().name())
|
||||
.field("type", contextMapping.getValue().type().name());
|
||||
|
|
|
@ -48,7 +48,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe
|
|||
private static final Map<String, List<? extends ToXContent>> contextMap = new HashMap<>();
|
||||
private static String categoryContextName;
|
||||
private static String geoQueryContextName;
|
||||
private static List<ContextMapping> contextMappings = new ArrayList<>();
|
||||
private static List<ContextMapping<?>> contextMappings = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
protected CompletionSuggestionBuilder randomSuggestionBuilder() {
|
||||
|
|
|
@ -19,13 +19,13 @@
|
|||
|
||||
package org.elasticsearch.test.hamcrest;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.ShapeCollection;
|
||||
import org.locationtech.spatial4j.shape.impl.GeoCircle;
|
||||
import org.locationtech.spatial4j.shape.impl.RectangleImpl;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.junit.Assert;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.Geometry;
|
||||
import org.locationtech.jts.geom.LineString;
|
||||
|
@ -33,12 +33,12 @@ import org.locationtech.jts.geom.MultiLineString;
|
|||
import org.locationtech.jts.geom.MultiPoint;
|
||||
import org.locationtech.jts.geom.MultiPolygon;
|
||||
import org.locationtech.jts.geom.Polygon;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.junit.Assert;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
import org.locationtech.spatial4j.shape.ShapeCollection;
|
||||
import org.locationtech.spatial4j.shape.impl.GeoCircle;
|
||||
import org.locationtech.spatial4j.shape.impl.RectangleImpl;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsPoint;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -208,9 +208,9 @@ public class ElasticsearchGeoAssertions {
|
|||
} else if (s1 instanceof ShapeCollection && s2 instanceof ShapeCollection) {
|
||||
assertEquals((ShapeCollection)s1, (ShapeCollection)s2);
|
||||
} else if (s1 instanceof GeoCircle && s2 instanceof GeoCircle) {
|
||||
Assert.assertEquals((GeoCircle)s1, (GeoCircle)s2);
|
||||
Assert.assertEquals(s1, s2);
|
||||
} else if (s1 instanceof RectangleImpl && s2 instanceof RectangleImpl) {
|
||||
Assert.assertEquals((RectangleImpl)s1, (RectangleImpl)s2);
|
||||
Assert.assertEquals(s1, s2);
|
||||
} else {
|
||||
//We want to know the type of the shape because we test shape equality in a special way...
|
||||
//... in particular we test that one ring is equivalent to another ring even if the points are rotated or reversed.
|
||||
|
@ -254,7 +254,7 @@ public class ElasticsearchGeoAssertions {
|
|||
return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT);
|
||||
}
|
||||
|
||||
public static void assertValidException(XContentParser parser, Class expectedException) {
|
||||
public static void assertValidException(XContentParser parser, Class<?> expectedException) {
|
||||
try {
|
||||
ShapeParser.parse(parser).build();
|
||||
Assert.fail("process completed successfully when " + expectedException.getName() + " expected");
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.RandomizedRunner;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
|
@ -175,7 +176,7 @@ public class BootstrapForTesting {
|
|||
/** Add the codebase url of the given classname to the codebases map, if the class exists. */
|
||||
private static void addClassCodebase(Map<String, URL> codebases, String name, String classname) {
|
||||
try {
|
||||
Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname);
|
||||
Class<?> clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname);
|
||||
if (codebases.put(name, clazz.getProtectionDomain().getCodeSource().getLocation()) != null) {
|
||||
throw new IllegalStateException("Already added " + name + " codebase for testing");
|
||||
}
|
||||
|
|
|
@ -823,7 +823,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
});
|
||||
|
||||
try {
|
||||
StringMessageResponse message = res.txGet();
|
||||
res.txGet();
|
||||
fail("exception should be thrown");
|
||||
} catch (Exception e) {
|
||||
assertThat(e, instanceOf(ReceiveTimeoutTransportException.class));
|
||||
|
@ -939,8 +939,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testTracerLog() throws InterruptedException {
|
||||
TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse(""));
|
||||
TransportRequestHandler handlerWithError = new TransportRequestHandler<StringMessageRequest>() {
|
||||
TransportRequestHandler<TransportRequest> handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse(""));
|
||||
TransportRequestHandler<StringMessageRequest> handlerWithError = new TransportRequestHandler<StringMessageRequest>() {
|
||||
@Override
|
||||
public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception {
|
||||
if (request.timeout() > 0) {
|
||||
|
@ -952,7 +952,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
|
|||
};
|
||||
|
||||
final Semaphore requestCompleted = new Semaphore(0);
|
||||
TransportResponseHandler noopResponseHandler = new TransportResponseHandler<StringMessageResponse>() {
|
||||
TransportResponseHandler<StringMessageResponse> noopResponseHandler = new TransportResponseHandler<StringMessageResponse>() {
|
||||
|
||||
@Override
|
||||
public StringMessageResponse newInstance() {
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.core;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.NamedDiff;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
|
@ -157,7 +158,6 @@ import java.util.function.Supplier;
|
|||
|
||||
public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPlugin {
|
||||
|
||||
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
|
||||
static Optional<String> X_PACK_FEATURE = Optional.of("x-pack");
|
||||
|
||||
@Override
|
||||
|
@ -205,7 +205,7 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPl
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<Action> getClientActions() {
|
||||
public List<Action<? extends ActionResponse>> getClientActions() {
|
||||
return Arrays.asList(
|
||||
// deprecation
|
||||
DeprecationInfoAction.INSTANCE,
|
||||
|
|
|
@ -9,9 +9,9 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.support.ActionFilter;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
|
@ -261,8 +261,8 @@ public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, Exte
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<Action> getClientActions() {
|
||||
List<Action> actions = new ArrayList<>();
|
||||
public List<Action<? extends ActionResponse>> getClientActions() {
|
||||
List<Action<? extends ActionResponse>> actions = new ArrayList<>();
|
||||
actions.addAll(licensing.getClientActions());
|
||||
actions.addAll(super.getClientActions());
|
||||
return actions;
|
||||
|
|
|
@ -24,5 +24,6 @@ public abstract class ActionFactory {
|
|||
/**
|
||||
* Parses the given xcontent and creates a concrete action
|
||||
*/
|
||||
public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException;
|
||||
public abstract ExecutableAction<? extends Action> parseExecutable(String watchId, String actionId, XContentParser parser)
|
||||
throws IOException;
|
||||
}
|
||||
|
|
|
@ -40,14 +40,14 @@ public class ActionWrapper implements ToXContentObject {
|
|||
@Nullable
|
||||
private final ExecutableCondition condition;
|
||||
@Nullable
|
||||
private final ExecutableTransform transform;
|
||||
private final ExecutableTransform<Transform, Transform.Result> transform;
|
||||
private final ActionThrottler throttler;
|
||||
private final ExecutableAction action;
|
||||
private final ExecutableAction<? extends Action> action;
|
||||
|
||||
public ActionWrapper(String id, ActionThrottler throttler,
|
||||
@Nullable ExecutableCondition condition,
|
||||
@Nullable ExecutableTransform transform,
|
||||
ExecutableAction action) {
|
||||
@Nullable ExecutableTransform<Transform, Transform.Result> transform,
|
||||
ExecutableAction<? extends Action> action) {
|
||||
this.id = id;
|
||||
this.condition = condition;
|
||||
this.throttler = throttler;
|
||||
|
@ -63,7 +63,7 @@ public class ActionWrapper implements ToXContentObject {
|
|||
return condition;
|
||||
}
|
||||
|
||||
public ExecutableTransform transform() {
|
||||
public ExecutableTransform<Transform, Transform.Result> transform() {
|
||||
return transform;
|
||||
}
|
||||
|
||||
|
@ -71,7 +71,7 @@ public class ActionWrapper implements ToXContentObject {
|
|||
return throttler;
|
||||
}
|
||||
|
||||
public ExecutableAction action() {
|
||||
public ExecutableAction<? extends Action> action() {
|
||||
return action;
|
||||
}
|
||||
|
||||
|
@ -196,9 +196,9 @@ public class ActionWrapper implements ToXContentObject {
|
|||
assert parser.currentToken() == XContentParser.Token.START_OBJECT;
|
||||
|
||||
ExecutableCondition condition = null;
|
||||
ExecutableTransform transform = null;
|
||||
ExecutableTransform<Transform, Transform.Result> transform = null;
|
||||
TimeValue throttlePeriod = null;
|
||||
ExecutableAction action = null;
|
||||
ExecutableAction<? extends Action> action = null;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
|
|
|
@ -46,7 +46,7 @@ public final class AggregationTestUtils {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static Histogram createHistogramAggregation(String name, List histogramBuckets) {
|
||||
static Histogram createHistogramAggregation(String name, List<Histogram.Bucket> histogramBuckets) {
|
||||
Histogram histogram = mock(Histogram.class);
|
||||
when((List<Histogram.Bucket>)histogram.getBuckets()).thenReturn(histogramBuckets);
|
||||
when(histogram.getName()).thenReturn(name);
|
||||
|
@ -72,7 +72,7 @@ public final class AggregationTestUtils {
|
|||
static Terms createTerms(String name, Term... terms) {
|
||||
Terms termsAgg = mock(Terms.class);
|
||||
when(termsAgg.getName()).thenReturn(name);
|
||||
List buckets = new ArrayList<>();
|
||||
List<Terms.Bucket> buckets = new ArrayList<>();
|
||||
for (Term term: terms) {
|
||||
StringTerms.Bucket bucket = mock(StringTerms.Bucket.class);
|
||||
when(bucket.getKey()).thenReturn(term.key);
|
||||
|
|
|
@ -206,7 +206,7 @@ public class BasicDistributedJobsIT extends BaseMlIntegTestCase {
|
|||
assertBusy(() -> {
|
||||
ClusterState clusterState = client().admin().cluster().prepareState().get().getState();
|
||||
PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE);
|
||||
PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId));
|
||||
PersistentTask<?> task = tasks.getTask(MlMetadata.jobTaskId(jobId));
|
||||
|
||||
DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode());
|
||||
assertThat(node.getAttributes(), hasEntry(MachineLearning.ML_ENABLED_NODE_ATTR, "true"));
|
||||
|
|
|
@ -172,7 +172,7 @@ public class BatchedDocumentsIteratorTests extends ESTestCase {
|
|||
if (responses.size() > 0) {
|
||||
ActionFuture<SearchResponse> first = wrapResponse(responses.get(0));
|
||||
if (responses.size() > 1) {
|
||||
List<ActionFuture> rest = new ArrayList<>();
|
||||
List<ActionFuture<SearchResponse>> rest = new ArrayList<>();
|
||||
for (int i = 1; i < responses.size(); ++i) {
|
||||
rest.add(wrapResponse(responses.get(i)));
|
||||
}
|
||||
|
|
|
@ -229,6 +229,7 @@ public class AutodetectCommunicatorTests extends ESTestCase {
|
|||
return process;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private AutodetectCommunicator createAutodetectCommunicator(ExecutorService executorService, AutodetectProcess autodetectProcess,
|
||||
AutoDetectResultProcessor autoDetectResultProcessor,
|
||||
Consumer<Exception> finishHandler) throws IOException {
|
||||
|
@ -242,12 +243,13 @@ public class AutodetectCommunicatorTests extends ESTestCase {
|
|||
new NamedXContentRegistry(Collections.emptyList()), executorService);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private AutodetectCommunicator createAutodetectCommunicator(AutodetectProcess autodetectProcess,
|
||||
AutoDetectResultProcessor autoDetectResultProcessor) throws IOException {
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
when(executorService.submit(any(Callable.class))).thenReturn(mock(Future.class));
|
||||
doAnswer(invocationOnMock -> {
|
||||
Callable runnable = (Callable) invocationOnMock.getArguments()[0];
|
||||
Callable<Void> runnable = (Callable<Void>) invocationOnMock.getArguments()[0];
|
||||
runnable.call();
|
||||
return mock(Future.class);
|
||||
}).when(executorService).submit(any(Callable.class));
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskState;
|
|||
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
|
||||
import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats;
|
||||
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
|
||||
|
@ -42,6 +41,7 @@ import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests
|
|||
import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams;
|
||||
import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange;
|
||||
|
@ -202,6 +202,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
|||
verify(jobTask).updatePersistentTaskState(eq(new JobTaskState(JobState.OPENED, 1L)), any());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testOpenJob_exceedMaxNumJobs() {
|
||||
when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo"));
|
||||
when(jobManager.getJobOrThrowIfUnknown("bar")).thenReturn(createJobDetails("bar"));
|
||||
|
@ -214,7 +215,7 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
|||
ThreadPool.Cancellable cancellable = mock(ThreadPool.Cancellable.class);
|
||||
when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(cancellable);
|
||||
ExecutorService executorService = mock(ExecutorService.class);
|
||||
Future future = mock(Future.class);
|
||||
Future<?> future = mock(Future.class);
|
||||
when(executorService.submit(any(Callable.class))).thenReturn(future);
|
||||
when(threadPool.executor(anyString())).thenReturn(EsExecutors.newDirectExecutorService());
|
||||
AutodetectProcess autodetectProcess = mock(AutodetectProcess.class);
|
||||
|
@ -230,7 +231,6 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
|||
doReturn(executorService).when(manager).createAutodetectExecutorService(any());
|
||||
|
||||
doAnswer(invocationOnMock -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
CheckedConsumer<Exception, IOException> consumer = (CheckedConsumer<Exception, IOException>) invocationOnMock.getArguments()[2];
|
||||
consumer.accept(null);
|
||||
return null;
|
||||
|
|
|
@ -403,7 +403,7 @@ public class AuthorizationService extends AbstractComponent {
|
|||
}
|
||||
|
||||
private static String getAction(BulkItemRequest item) {
|
||||
final DocWriteRequest docWriteRequest = item.request();
|
||||
final DocWriteRequest<?> docWriteRequest = item.request();
|
||||
switch (docWriteRequest.opType()) {
|
||||
case INDEX:
|
||||
case CREATE:
|
||||
|
|
|
@ -129,14 +129,14 @@ public final class AuthorizationUtils {
|
|||
*/
|
||||
public static class AsyncAuthorizer {
|
||||
|
||||
private final ActionListener listener;
|
||||
private final ActionListener<Void> listener;
|
||||
private final BiConsumer<Role, Role> consumer;
|
||||
private final Authentication authentication;
|
||||
private volatile Role userRoles;
|
||||
private volatile Role runAsRoles;
|
||||
private CountDown countDown = new CountDown(2); // we expect only two responses!!
|
||||
|
||||
public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer<Role, Role> consumer) {
|
||||
public AsyncAuthorizer(Authentication authentication, ActionListener<Void> listener, BiConsumer<Role, Role> consumer) {
|
||||
this.consumer = consumer;
|
||||
this.listener = listener;
|
||||
this.authentication = authentication;
|
||||
|
|
|
@ -37,9 +37,9 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.iterableWithSize;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class AuditTrailTests extends SecurityIntegTestCase {
|
||||
|
||||
|
@ -163,7 +163,7 @@ public class AuditTrailTests extends SecurityIntegTestCase {
|
|||
.request();
|
||||
request.indicesOptions().ignoreUnavailable();
|
||||
|
||||
final PlainActionFuture<Collection<Map<String, Object>>> listener = new PlainActionFuture();
|
||||
final PlainActionFuture<Collection<Map<String, Object>>> listener = new PlainActionFuture<>();
|
||||
ScrollHelper.fetchAllByEntity(client, request, listener, SearchHit::getSourceAsMap);
|
||||
|
||||
return listener.get();
|
||||
|
|
|
@ -199,7 +199,6 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testTokenFirstMissingSecondFound() throws Exception {
|
||||
when(firstRealm.token(threadContext)).thenReturn(null);
|
||||
when(secondRealm.token(threadContext)).thenReturn(token);
|
||||
|
@ -227,7 +226,6 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
verifyNoMoreInteractions(auditTrail);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testAuthenticateBothSupportSecondSucceeds() throws Exception {
|
||||
User user = new User("_username", "r1");
|
||||
when(firstRealm.supports(token)).thenReturn(true);
|
||||
|
@ -698,7 +696,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
mockAuthenticate(secondRealm, token, user);
|
||||
mockRealmLookupReturnsNull(firstRealm, "run_as");
|
||||
doAnswer((i) -> {
|
||||
ActionListener listener = (ActionListener) i.getArguments()[1];
|
||||
ActionListener<User> listener = (ActionListener<User>) i.getArguments()[1];
|
||||
listener.onResponse(new User("looked up user", new String[]{"some role"}));
|
||||
return null;
|
||||
}).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class));
|
||||
|
@ -735,6 +733,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
assertTrue(completed.get());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testRunAsLookupDifferentRealm() throws Exception {
|
||||
AuthenticationToken token = mock(AuthenticationToken.class);
|
||||
threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as");
|
||||
|
@ -742,7 +741,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
when(secondRealm.supports(token)).thenReturn(true);
|
||||
mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"}));
|
||||
doAnswer((i) -> {
|
||||
ActionListener listener = (ActionListener) i.getArguments()[1];
|
||||
ActionListener<User> listener = (ActionListener<User>) i.getArguments()[1];
|
||||
listener.onResponse(new User("looked up user", new String[]{"some role"}));
|
||||
return null;
|
||||
}).when(firstRealm).lookupUser(eq("run_as"), any(ActionListener.class));
|
||||
|
@ -805,6 +804,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testAuthenticateTransportDisabledRunAsUser() throws Exception {
|
||||
AuthenticationToken token = mock(AuthenticationToken.class);
|
||||
threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as");
|
||||
|
@ -813,7 +813,7 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"}));
|
||||
mockRealmLookupReturnsNull(firstRealm, "run_as");
|
||||
doAnswer((i) -> {
|
||||
ActionListener listener = (ActionListener) i.getArguments()[1];
|
||||
ActionListener<User> listener = (ActionListener<User>) i.getArguments()[1];
|
||||
listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false));
|
||||
return null;
|
||||
}).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class));
|
||||
|
@ -833,7 +833,8 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"}));
|
||||
mockRealmLookupReturnsNull(firstRealm, "run_as");
|
||||
doAnswer((i) -> {
|
||||
ActionListener listener = (ActionListener) i.getArguments()[1];
|
||||
@SuppressWarnings("unchecked")
|
||||
ActionListener<User> listener = (ActionListener<User>) i.getArguments()[1];
|
||||
listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false));
|
||||
return null;
|
||||
}).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class));
|
||||
|
@ -984,9 +985,10 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
assertThat(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), equalTo((Object) authentication.encode()));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) {
|
||||
doAnswer((i) -> {
|
||||
ActionListener listener = (ActionListener) i.getArguments()[1];
|
||||
ActionListener<AuthenticationResult> listener = (ActionListener<AuthenticationResult>) i.getArguments()[1];
|
||||
if (user == null) {
|
||||
listener.onResponse(AuthenticationResult.notHandled());
|
||||
} else {
|
||||
|
@ -1008,9 +1010,10 @@ public class AuthenticationServiceTests extends ESTestCase {
|
|||
return future.actionGet();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static void mockRealmLookupReturnsNull(Realm realm, String username) {
|
||||
doAnswer((i) -> {
|
||||
ActionListener listener = (ActionListener) i.getArguments()[1];
|
||||
ActionListener<?> listener = (ActionListener<?>) i.getArguments()[1];
|
||||
listener.onResponse(null);
|
||||
return null;
|
||||
}).when(realm).lookupUser(eq(username), any(ActionListener.class));
|
||||
|
|
|
@ -168,6 +168,7 @@ public class AuthorizationServiceTests extends ESTestCase {
|
|||
private Map<String, RoleDescriptor> roleMap = new HashMap<>();
|
||||
private CompositeRolesStore rolesStore;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Before
|
||||
public void setup() {
|
||||
rolesStore = mock(CompositeRolesStore.class);
|
||||
|
@ -208,7 +209,7 @@ public class AuthorizationServiceTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void authorize(Authentication authentication, String action, TransportRequest request) {
|
||||
PlainActionFuture future = new PlainActionFuture();
|
||||
PlainActionFuture<Void> future = new PlainActionFuture<>();
|
||||
AuthorizationUtils.AsyncAuthorizer authorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, future,
|
||||
(userRoles, runAsRoles) -> {
|
||||
authorizationService.authorize(authentication, action, request, userRoles, runAsRoles);
|
||||
|
@ -598,7 +599,6 @@ public class AuthorizationServiceTests extends ESTestCase {
|
|||
public void testRunAsRequestWithNoRolesUser() {
|
||||
final TransportRequest request = mock(TransportRequest.class);
|
||||
final Authentication authentication = createAuthentication(new User("run as me", null, new User("test user", "admin")));
|
||||
final User user = new User("run as me", null, new User("test user", "admin"));
|
||||
assertNotEquals(authentication.getUser().authenticatedUser(), authentication);
|
||||
assertThrowsAuthorizationExceptionRunAs(
|
||||
() -> authorize(authentication, "indices:a", request),
|
||||
|
|
|
@ -33,7 +33,7 @@ public class InputRegistry {
|
|||
* @param parser The parser containing the input definition
|
||||
* @return A new input instance from the parser
|
||||
*/
|
||||
public ExecutableInput parse(String watchId, XContentParser parser) throws IOException {
|
||||
public ExecutableInput<?, ?> parse(String watchId, XContentParser parser) throws IOException {
|
||||
String type = null;
|
||||
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
|
@ -42,7 +42,7 @@ public class InputRegistry {
|
|||
}
|
||||
|
||||
XContentParser.Token token;
|
||||
ExecutableInput input = null;
|
||||
ExecutableInput<?, ?> input = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
type = parser.currentName();
|
||||
|
@ -50,7 +50,7 @@ public class InputRegistry {
|
|||
throw new ElasticsearchParseException("could not parse input for watch [{}]. expected field indicating the input type, " +
|
||||
"but found [{}] instead", watchId, token);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
InputFactory factory = factories.get(type);
|
||||
InputFactory<?, ?, ?> factory = factories.get(type);
|
||||
if (factory == null) {
|
||||
throw new ElasticsearchParseException("could not parse input for watch [{}]. unknown input type [{}]", watchId, type);
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.actions;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.core.watcher.actions.Action;
|
||||
import org.elasticsearch.xpack.core.watcher.actions.ActionStatus;
|
||||
import org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State;
|
||||
import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper;
|
||||
import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult;
|
||||
import org.elasticsearch.xpack.core.watcher.actions.ExecutableAction;
|
||||
|
@ -22,7 +23,6 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
@ -32,7 +32,8 @@ public class ActionWrapperTests extends ESTestCase {
|
|||
|
||||
private DateTime now = DateTime.now(DateTimeZone.UTC);
|
||||
private Watch watch = mock(Watch.class);
|
||||
private ExecutableAction executableAction = mock(ExecutableAction.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
private ExecutableAction<Action> executableAction = mock(ExecutableAction.class);
|
||||
private ActionWrapper actionWrapper = new ActionWrapper("_action", null, NeverCondition.INSTANCE, null, executableAction);
|
||||
|
||||
public void testThatUnmetActionConditionResetsAckStatus() throws Exception {
|
||||
|
|
|
@ -57,7 +57,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
.trigger(schedule(interval("60m")));
|
||||
|
||||
AvailableAction availableAction = randomFrom(AvailableAction.values());
|
||||
Action.Builder action = availableAction.action();
|
||||
Action.Builder<?> action = availableAction.action();
|
||||
watchSourceBuilder.addAction("test_id", action);
|
||||
|
||||
watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder.buildAsBytes(XContentType.JSON),
|
||||
|
@ -98,7 +98,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
Set<String> ackingActions = new HashSet<>();
|
||||
for (int i = 0; i < scaledRandomIntBetween(5,10); ++i) {
|
||||
AvailableAction availableAction = randomFrom(AvailableAction.values());
|
||||
Action.Builder action = availableAction.action();
|
||||
Action.Builder<?> action = availableAction.action();
|
||||
watchSourceBuilder.addAction("test_id" + i, action);
|
||||
if (randomBoolean()) {
|
||||
ackingActions.add("test_id" + i);
|
||||
|
@ -352,7 +352,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
enum AvailableAction {
|
||||
EMAIL {
|
||||
@Override
|
||||
public Action.Builder action() throws Exception {
|
||||
public Action.Builder<EmailAction> action() throws Exception {
|
||||
EmailTemplate.Builder emailBuilder = EmailTemplate.builder();
|
||||
emailBuilder.from("test@test.com");
|
||||
emailBuilder.to("test@test.com");
|
||||
|
@ -367,7 +367,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
},
|
||||
WEBHOOK {
|
||||
@Override
|
||||
public Action.Builder action() throws Exception {
|
||||
public Action.Builder<WebhookAction> action() throws Exception {
|
||||
HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("localhost", 1234)
|
||||
.path("/")
|
||||
.method(HttpMethod.GET);
|
||||
|
@ -381,7 +381,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
},
|
||||
LOGGING {
|
||||
@Override
|
||||
public Action.Builder action() throws Exception {
|
||||
public Action.Builder<LoggingAction> action() throws Exception {
|
||||
return LoggingAction.builder(new TextTemplate("_logging"));
|
||||
}
|
||||
|
||||
|
@ -392,7 +392,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
},
|
||||
INDEX {
|
||||
@Override
|
||||
public Action.Builder action() throws Exception {
|
||||
public Action.Builder<IndexAction> action() throws Exception {
|
||||
return IndexAction.builder("test_index", "test_type");
|
||||
}
|
||||
|
||||
|
@ -402,7 +402,7 @@ public class ActionThrottleTests extends AbstractWatcherIntegrationTestCase {
|
|||
}
|
||||
};
|
||||
|
||||
public abstract Action.Builder action() throws Exception;
|
||||
public abstract Action.Builder<? extends Action> action() throws Exception;
|
||||
|
||||
public abstract String type();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue