parent
ca5822eae1
commit
bd1c513422
|
@ -63,7 +63,7 @@ public final class ForEachProcessor extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument) throws Exception {
|
||||
List values = ingestDocument.getFieldValue(field, List.class, ignoreMissing);
|
||||
List<?> values = ingestDocument.getFieldValue(field, List.class, ignoreMissing);
|
||||
if (values == null) {
|
||||
if (ignoreMissing) {
|
||||
return;
|
||||
|
|
|
@ -73,7 +73,9 @@ public final class RemoveProcessor extends AbstractProcessor {
|
|||
final List<String> fields = new ArrayList<>();
|
||||
final Object field = ConfigurationUtils.readObject(TYPE, processorTag, config, "field");
|
||||
if (field instanceof List) {
|
||||
fields.addAll((List) field);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> stringList = (List<String>) field;
|
||||
fields.addAll(stringList);
|
||||
} else {
|
||||
fields.add((String) field);
|
||||
}
|
||||
|
|
|
@ -49,6 +49,7 @@ public final class SortProcessor extends AbstractProcessor {
|
|||
this.direction = direction;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return this.direction;
|
||||
}
|
||||
|
@ -94,13 +95,13 @@ public final class SortProcessor extends AbstractProcessor {
|
|||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void execute(IngestDocument document) {
|
||||
List<? extends Comparable> list = document.getFieldValue(field, List.class);
|
||||
List<? extends Comparable<Object>> list = document.getFieldValue(field, List.class);
|
||||
|
||||
if (list == null) {
|
||||
throw new IllegalArgumentException("field [" + field + "] is null, cannot sort.");
|
||||
}
|
||||
|
||||
List<? extends Comparable> copy = new ArrayList<>(list);
|
||||
List<? extends Comparable<Object>> copy = new ArrayList<>(list);
|
||||
|
||||
if (order.equals(SortOrder.ASCENDING)) {
|
||||
Collections.sort(copy);
|
||||
|
|
|
@ -19,13 +19,6 @@
|
|||
|
||||
package org.elasticsearch.ingest.common;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import org.elasticsearch.ingest.CompoundProcessor;
|
||||
import org.elasticsearch.ingest.IngestDocument;
|
||||
import org.elasticsearch.ingest.Processor;
|
||||
|
@ -34,6 +27,14 @@ import org.elasticsearch.ingest.TestTemplateService;
|
|||
import org.elasticsearch.script.TemplateScript;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -54,7 +55,8 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
);
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
List result = ingestDocument.getFieldValue("values", List.class);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> result = ingestDocument.getFieldValue("values", List.class);
|
||||
assertThat(result.get(0), equalTo("FOO"));
|
||||
assertThat(result.get(1), equalTo("BAR"));
|
||||
assertThat(result.get(2), equalTo("BAZ"));
|
||||
|
@ -204,12 +206,12 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
), false);
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
List result = ingestDocument.getFieldValue("values", List.class);
|
||||
List<?> result = ingestDocument.getFieldValue("values", List.class);
|
||||
assertThat(result.get(0), equalTo("STRING"));
|
||||
assertThat(result.get(1), equalTo(1));
|
||||
assertThat(result.get(2), equalTo(null));
|
||||
|
||||
List errors = ingestDocument.getFieldValue("errors", List.class);
|
||||
List<?> errors = ingestDocument.getFieldValue("errors", List.class);
|
||||
assertThat(errors.size(), equalTo(2));
|
||||
}
|
||||
|
||||
|
@ -230,7 +232,7 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor, false);
|
||||
forEachProcessor.execute(ingestDocument);
|
||||
|
||||
List result = ingestDocument.getFieldValue("values", List.class);
|
||||
List<?> result = ingestDocument.getFieldValue("values", List.class);
|
||||
assertThat(result.get(0), equalTo("new_value"));
|
||||
assertThat(result.get(1), equalTo("new_value"));
|
||||
assertThat(result.get(2), equalTo("new_value"));
|
||||
|
@ -263,13 +265,13 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
"_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor, false), false);
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
List result = ingestDocument.getFieldValue("values1.0.values2", List.class);
|
||||
List<?> result = ingestDocument.getFieldValue("values1.0.values2", List.class);
|
||||
assertThat(result.get(0), equalTo("ABC"));
|
||||
assertThat(result.get(1), equalTo("DEF"));
|
||||
|
||||
result = ingestDocument.getFieldValue("values1.1.values2", List.class);
|
||||
assertThat(result.get(0), equalTo("GHI"));
|
||||
assertThat(result.get(1), equalTo("JKL"));
|
||||
List<?> result2 = ingestDocument.getFieldValue("values1.1.values2", List.class);
|
||||
assertThat(result2.get(0), equalTo("GHI"));
|
||||
assertThat(result2.get(1), equalTo("JKL"));
|
||||
}
|
||||
|
||||
public void testIgnoreMissing() throws Exception {
|
||||
|
|
|
@ -60,7 +60,7 @@ public class IngestRestartIT extends ESIntegTestCase {
|
|||
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
|
||||
return Collections.singletonMap("my_script", script -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> ctx = (Map) script.get("ctx");
|
||||
Map<String, Object> ctx = (Map<String, Object>) script.get("ctx");
|
||||
ctx.put("z", 0);
|
||||
return null;
|
||||
});
|
||||
|
|
|
@ -30,6 +30,7 @@ import com.github.mustachejava.TemplateContext;
|
|||
import com.github.mustachejava.codes.DefaultMustache;
|
||||
import com.github.mustachejava.codes.IterableCode;
|
||||
import com.github.mustachejava.codes.WriteCode;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
@ -202,11 +203,9 @@ public class CustomMustacheFactory extends DefaultMustacheFactory {
|
|||
return null;
|
||||
}
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
|
||||
if (resolved == null) {
|
||||
builder.nullValue();
|
||||
} else if (resolved instanceof Iterable) {
|
||||
if (resolved instanceof Iterable) {
|
||||
builder.startArray();
|
||||
for (Object o : (Iterable) resolved) {
|
||||
for (Object o : (Iterable<?>) resolved) {
|
||||
builder.value(o);
|
||||
}
|
||||
builder.endArray();
|
||||
|
@ -254,7 +253,7 @@ public class CustomMustacheFactory extends DefaultMustacheFactory {
|
|||
return null;
|
||||
} else if (resolved instanceof Iterable) {
|
||||
StringJoiner joiner = new StringJoiner(delimiter);
|
||||
for (Object o : (Iterable) resolved) {
|
||||
for (Object o : (Iterable<?>) resolved) {
|
||||
joiner.add(oh.stringify(o));
|
||||
}
|
||||
return joiner.toString();
|
||||
|
|
|
@ -65,7 +65,7 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
|
|||
scripts.put("1==1", vars -> Boolean.TRUE);
|
||||
scripts.put("use_fielddata_please", vars -> {
|
||||
LeafDocLookup leafDocLookup = (LeafDocLookup) vars.get("_doc");
|
||||
ScriptDocValues scriptDocValues = leafDocLookup.get("employees.name");
|
||||
ScriptDocValues<?> scriptDocValues = leafDocLookup.get("employees.name");
|
||||
return "virginia_potts".equals(scriptDocValues.get(0));
|
||||
});
|
||||
return scripts;
|
||||
|
|
|
@ -173,7 +173,7 @@ public class CustomUnifiedHighlighter extends UnifiedHighlighter {
|
|||
SpanQuery[] innerQueries = new SpanQuery[terms[i].length];
|
||||
for (int j = 0; j < terms[i].length; j++) {
|
||||
if (i == sizeMinus1) {
|
||||
innerQueries[j] = new SpanMultiTermQueryWrapper(new PrefixQuery(terms[i][j]));
|
||||
innerQueries[j] = new SpanMultiTermQueryWrapper<PrefixQuery>(new PrefixQuery(terms[i][j]));
|
||||
} else {
|
||||
innerQueries[j] = new SpanTermQuery(terms[i][j]);
|
||||
}
|
||||
|
|
|
@ -57,6 +57,7 @@ public interface DocWriteRequest<T> extends IndicesRequest {
|
|||
* Get the options for this request
|
||||
* @return the indices options
|
||||
*/
|
||||
@Override
|
||||
IndicesOptions indicesOptions();
|
||||
|
||||
/**
|
||||
|
@ -157,9 +158,9 @@ public interface DocWriteRequest<T> extends IndicesRequest {
|
|||
}
|
||||
|
||||
/** read a document write (index/delete/update) request */
|
||||
static DocWriteRequest readDocumentRequest(StreamInput in) throws IOException {
|
||||
static DocWriteRequest<?> readDocumentRequest(StreamInput in) throws IOException {
|
||||
byte type = in.readByte();
|
||||
DocWriteRequest docWriteRequest;
|
||||
DocWriteRequest<?> docWriteRequest;
|
||||
if (type == 0) {
|
||||
IndexRequest indexRequest = new IndexRequest();
|
||||
indexRequest.readFrom(in);
|
||||
|
@ -179,7 +180,7 @@ public interface DocWriteRequest<T> extends IndicesRequest {
|
|||
}
|
||||
|
||||
/** write a document write (index/delete/update) request*/
|
||||
static void writeDocumentRequest(StreamOutput out, DocWriteRequest request) throws IOException {
|
||||
static void writeDocumentRequest(StreamOutput out, DocWriteRequest<?> request) throws IOException {
|
||||
if (request instanceof IndexRequest) {
|
||||
out.writeByte((byte) 0);
|
||||
((IndexRequest) request).writeTo(out);
|
||||
|
|
|
@ -108,8 +108,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
|
|||
/**
|
||||
* Sets the transient settings to be updated. They will not survive a full cluster restart
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public ClusterUpdateSettingsRequest transientSettings(Map source) {
|
||||
public ClusterUpdateSettingsRequest transientSettings(Map<String, ?> source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.map(source);
|
||||
|
@ -147,8 +146,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
|
|||
/**
|
||||
* Sets the persistent settings to be updated. They will get applied cross restarts
|
||||
*/
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public ClusterUpdateSettingsRequest persistentSettings(Map source) {
|
||||
public ClusterUpdateSettingsRequest persistentSettings(Map<String, ?> source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.map(source);
|
||||
|
|
|
@ -62,7 +62,7 @@ public class ClusterUpdateSettingsRequestBuilder extends AcknowledgedRequestBuil
|
|||
/**
|
||||
* Sets the transient settings to be updated. They will not survive a full cluster restart
|
||||
*/
|
||||
public ClusterUpdateSettingsRequestBuilder setTransientSettings(Map settings) {
|
||||
public ClusterUpdateSettingsRequestBuilder setTransientSettings(Map<String, ?> settings) {
|
||||
request.transientSettings(settings);
|
||||
return this;
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ public class ClusterUpdateSettingsRequestBuilder extends AcknowledgedRequestBuil
|
|||
/**
|
||||
* Sets the persistent settings to be updated. They will get applied cross restarts
|
||||
*/
|
||||
public ClusterUpdateSettingsRequestBuilder setPersistentSettings(Map settings) {
|
||||
public ClusterUpdateSettingsRequestBuilder setPersistentSettings(Map<String, ?> settings) {
|
||||
request.persistentSettings(settings);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -58,9 +58,9 @@ import java.util.Objects;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* A request to create an index. Best created with {@link org.elasticsearch.client.Requests#createIndexRequest(String)}.
|
||||
|
@ -189,8 +189,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
/**
|
||||
* The settings to create the index with (either json/yaml/properties format)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public CreateIndexRequest settings(Map source) {
|
||||
public CreateIndexRequest settings(Map<String, ?> source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.map(source);
|
||||
|
@ -256,8 +255,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
* @param type The mapping type
|
||||
* @param source The mapping source
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public CreateIndexRequest mapping(String type, Map source) {
|
||||
public CreateIndexRequest mapping(String type, Map<String, ?> source) {
|
||||
if (mappings.containsKey(type)) {
|
||||
throw new IllegalStateException("mappings for type \"" + type + "\" were already defined");
|
||||
}
|
||||
|
@ -286,8 +284,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
/**
|
||||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public CreateIndexRequest aliases(Map source) {
|
||||
public CreateIndexRequest aliases(Map<String, ?> source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.map(source);
|
||||
|
|
|
@ -147,7 +147,7 @@ public class CreateIndexRequestBuilder extends AcknowledgedRequestBuilder<Create
|
|||
/**
|
||||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
public CreateIndexRequestBuilder setAliases(Map source) {
|
||||
public CreateIndexRequestBuilder setAliases(Map<String, ?> source) {
|
||||
request.aliases(source);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -50,10 +50,11 @@ public class TransportShardFlushAction extends TransportReplicationAction<ShardF
|
|||
}
|
||||
|
||||
@Override
|
||||
protected PrimaryResult shardOperationOnPrimary(ShardFlushRequest shardRequest, IndexShard primary) {
|
||||
protected PrimaryResult<ShardFlushRequest, ReplicationResponse> shardOperationOnPrimary(ShardFlushRequest shardRequest,
|
||||
IndexShard primary) {
|
||||
primary.flush(shardRequest.getRequest());
|
||||
logger.trace("{} flush request executed on primary", primary.shardId());
|
||||
return new PrimaryResult(shardRequest, new ReplicationResponse());
|
||||
return new PrimaryResult<ShardFlushRequest, ReplicationResponse>(shardRequest, new ReplicationResponse());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.action.admin.indices.mapping.get;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
@ -27,7 +28,6 @@ import org.elasticsearch.common.Strings;
|
|||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -39,9 +39,6 @@ public class GetMappingsResponse extends ActionResponse implements ToXContentFra
|
|||
|
||||
private static final ParseField MAPPINGS = new ParseField("mappings");
|
||||
|
||||
private static final ObjectParser<GetMappingsResponse, Void> PARSER =
|
||||
new ObjectParser<GetMappingsResponse, Void>("get-mappings", false, GetMappingsResponse::new);
|
||||
|
||||
private ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings = ImmutableOpenMap.of();
|
||||
|
||||
GetMappingsResponse(ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> mappings) {
|
||||
|
@ -101,13 +98,15 @@ public class GetMappingsResponse extends ActionResponse implements ToXContentFra
|
|||
for (Map.Entry<String, Object> entry : parts.entrySet()) {
|
||||
final String indexName = entry.getKey();
|
||||
assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass();
|
||||
final Map<String, Object> mapping = (Map<String, Object>) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName());
|
||||
@SuppressWarnings("unchecked")
|
||||
final Map<String, Object> mapping = (Map<String, Object>) ((Map<String, ?>) entry.getValue()).get(MAPPINGS.getPreferredName());
|
||||
|
||||
ImmutableOpenMap.Builder<String, MappingMetaData> typeBuilder = new ImmutableOpenMap.Builder<>();
|
||||
for (Map.Entry<String, Object> typeEntry : mapping.entrySet()) {
|
||||
final String typeName = typeEntry.getKey();
|
||||
assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " +
|
||||
typeEntry.getValue().getClass();
|
||||
@SuppressWarnings("unchecked")
|
||||
final Map<String, Object> fieldMappings = (Map<String, Object>) typeEntry.getValue();
|
||||
MappingMetaData mmd = new MappingMetaData(typeName, fieldMappings);
|
||||
typeBuilder.put(typeName, mmd);
|
||||
|
|
|
@ -256,8 +256,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
/**
|
||||
* The mapping source definition.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public PutMappingRequest source(Map mappingSource) {
|
||||
public PutMappingRequest source(Map<String, ?> mappingSource) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.map(mappingSource);
|
||||
|
|
|
@ -90,10 +90,10 @@ public abstract class Condition<T> implements NamedWriteable, ToXContentFragment
|
|||
* Holder for evaluated condition result
|
||||
*/
|
||||
public static class Result {
|
||||
public final Condition condition;
|
||||
public final Condition<?> condition;
|
||||
public final boolean matched;
|
||||
|
||||
protected Result(Condition condition, boolean matched) {
|
||||
protected Result(Condition<?> condition, boolean matched) {
|
||||
this.condition = condition;
|
||||
this.matched = matched;
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ public class RolloverInfo extends AbstractDiffable<RolloverInfo> implements Writ
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static ConstructingObjectParser<RolloverInfo, String> PARSER = new ConstructingObjectParser<>("rollover_info", false,
|
||||
(a, alias) -> new RolloverInfo(alias, (List<Condition>) a[0], (Long) a[1]));
|
||||
(a, alias) -> new RolloverInfo(alias, (List<Condition<?>>) a[0], (Long) a[1]));
|
||||
static {
|
||||
PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(),
|
||||
(p, c, n) -> p.namedObject(Condition.class, n, c), CONDITION_FIELD);
|
||||
|
@ -53,10 +53,10 @@ public class RolloverInfo extends AbstractDiffable<RolloverInfo> implements Writ
|
|||
}
|
||||
|
||||
private final String alias;
|
||||
private final List<Condition> metConditions;
|
||||
private final List<Condition<?>> metConditions;
|
||||
private final long time;
|
||||
|
||||
public RolloverInfo(String alias, List<Condition> metConditions, long time) {
|
||||
public RolloverInfo(String alias, List<Condition<?>> metConditions, long time) {
|
||||
this.alias = alias;
|
||||
this.metConditions = metConditions;
|
||||
this.time = time;
|
||||
|
@ -65,7 +65,7 @@ public class RolloverInfo extends AbstractDiffable<RolloverInfo> implements Writ
|
|||
public RolloverInfo(StreamInput in) throws IOException {
|
||||
this.alias = in.readString();
|
||||
this.time = in.readVLong();
|
||||
this.metConditions = in.readNamedWriteableList(Condition.class);
|
||||
this.metConditions = (List) in.readNamedWriteableList(Condition.class);
|
||||
}
|
||||
|
||||
public static RolloverInfo parse(XContentParser parser, String alias) {
|
||||
|
@ -76,7 +76,7 @@ public class RolloverInfo extends AbstractDiffable<RolloverInfo> implements Writ
|
|||
return alias;
|
||||
}
|
||||
|
||||
public List<Condition> getMetConditions() {
|
||||
public List<Condition<?>> getMetConditions() {
|
||||
return metConditions;
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ public class RolloverInfo extends AbstractDiffable<RolloverInfo> implements Writ
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(alias);
|
||||
builder.startObject(CONDITION_FIELD.getPreferredName());
|
||||
for (Condition condition : metConditions) {
|
||||
for (Condition<?> condition : metConditions) {
|
||||
condition.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -45,7 +45,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implements IndicesRequest, ToXContentObject {
|
||||
|
||||
private static final ObjectParser<RolloverRequest, Void> PARSER = new ObjectParser<>("rollover");
|
||||
private static final ObjectParser<Map<String, Condition>, Void> CONDITION_PARSER = new ObjectParser<>("conditions");
|
||||
private static final ObjectParser<Map<String, Condition<?>>, Void> CONDITION_PARSER = new ObjectParser<>("conditions");
|
||||
|
||||
private static final ParseField CONDITIONS = new ParseField("conditions");
|
||||
private static final ParseField MAX_AGE_CONDITION = new ParseField(MaxAgeCondition.NAME);
|
||||
|
@ -78,7 +78,7 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
private String alias;
|
||||
private String newIndexName;
|
||||
private boolean dryRun;
|
||||
private Map<String, Condition> conditions = new HashMap<>(2);
|
||||
private Map<String, Condition<?>> conditions = new HashMap<>(2);
|
||||
//the index name "_na_" is never read back, what matters are settings, mappings and aliases
|
||||
private CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_");
|
||||
|
||||
|
@ -106,7 +106,7 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
dryRun = in.readBoolean();
|
||||
int size = in.readVInt();
|
||||
for (int i = 0; i < size; i++) {
|
||||
Condition condition = in.readNamedWriteable(Condition.class);
|
||||
Condition<?> condition = in.readNamedWriteable(Condition.class);
|
||||
this.conditions.put(condition.name, condition);
|
||||
}
|
||||
createIndexRequest = new CreateIndexRequest();
|
||||
|
@ -120,7 +120,7 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
out.writeOptionalString(newIndexName);
|
||||
out.writeBoolean(dryRun);
|
||||
out.writeVInt(conditions.size());
|
||||
for (Condition condition : conditions.values()) {
|
||||
for (Condition<?> condition : conditions.values()) {
|
||||
if (condition.includedInVersion(out.getVersion())) {
|
||||
out.writeNamedWriteable(condition);
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
return dryRun;
|
||||
}
|
||||
|
||||
Map<String, Condition> getConditions() {
|
||||
Map<String, Condition<?>> getConditions() {
|
||||
return conditions;
|
||||
}
|
||||
|
||||
|
@ -221,7 +221,7 @@ public class RolloverRequest extends AcknowledgedRequest<RolloverRequest> implem
|
|||
createIndexRequest.innerToXContent(builder, params);
|
||||
|
||||
builder.startObject(CONDITIONS.getPreferredName());
|
||||
for (Condition condition : conditions.values()) {
|
||||
for (Condition<?> condition : conditions.values()) {
|
||||
condition.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
|
|
|
@ -132,7 +132,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverR
|
|||
new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, true, false, false, false));
|
||||
return;
|
||||
}
|
||||
List<Condition> metConditions = rolloverRequest.getConditions().values().stream()
|
||||
List<Condition<?>> metConditions = rolloverRequest.getConditions().values().stream()
|
||||
.filter(condition -> conditionResults.get(condition.toString())).collect(Collectors.toList());
|
||||
if (conditionResults.size() == 0 || metConditions.size() > 0) {
|
||||
CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(unresolvedName, rolloverIndexName,
|
||||
|
@ -221,7 +221,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverR
|
|||
}
|
||||
}
|
||||
|
||||
static Map<String, Boolean> evaluateConditions(final Collection<Condition> conditions,
|
||||
static Map<String, Boolean> evaluateConditions(final Collection<Condition<?>> conditions,
|
||||
final DocsStats docsStats, final IndexMetaData metaData) {
|
||||
final long numDocs = docsStats == null ? 0 : docsStats.getCount();
|
||||
final long indexSize = docsStats == null ? 0 : docsStats.getTotalSizeInBytes();
|
||||
|
@ -231,7 +231,7 @@ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverR
|
|||
.collect(Collectors.toMap(result -> result.condition.toString(), result -> result.matched));
|
||||
}
|
||||
|
||||
static Map<String, Boolean> evaluateConditions(final Collection<Condition> conditions, final IndexMetaData metaData,
|
||||
static Map<String, Boolean> evaluateConditions(final Collection<Condition<?>> conditions, final IndexMetaData metaData,
|
||||
final IndicesStatsResponse statsResponse) {
|
||||
return evaluateConditions(conditions, statsResponse.getPrimaries().getDocs(), metaData);
|
||||
}
|
||||
|
|
|
@ -155,8 +155,7 @@ public class UpdateSettingsRequest extends AcknowledgedRequest<UpdateSettingsReq
|
|||
/**
|
||||
* Sets the settings to be updated (either json or yaml format)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public UpdateSettingsRequest settings(Map source) {
|
||||
public UpdateSettingsRequest settings(Map<String, ?> source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
|
||||
builder.map(source);
|
||||
|
|
|
@ -61,9 +61,9 @@ import java.util.Set;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* A request to create an index template.
|
||||
|
@ -309,7 +309,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
* The template source definition.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public PutIndexTemplateRequest source(Map templateSource) {
|
||||
public PutIndexTemplateRequest source(Map<String, Object> templateSource) {
|
||||
Map<String, Object> source = templateSource;
|
||||
for (Map.Entry<String, Object> entry : source.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
|
@ -411,8 +411,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
/**
|
||||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public PutIndexTemplateRequest aliases(Map source) {
|
||||
public PutIndexTemplateRequest aliases(Map<String, ?> source) {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.map(source);
|
||||
|
|
|
@ -140,7 +140,7 @@ public class PutIndexTemplateRequestBuilder
|
|||
/**
|
||||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
public PutIndexTemplateRequestBuilder setAliases(Map source) {
|
||||
public PutIndexTemplateRequestBuilder setAliases(Map<String, Object> source) {
|
||||
request.aliases(source);
|
||||
return this;
|
||||
}
|
||||
|
@ -221,7 +221,7 @@ public class PutIndexTemplateRequestBuilder
|
|||
/**
|
||||
* The template source definition.
|
||||
*/
|
||||
public PutIndexTemplateRequestBuilder setSource(Map templateSource) {
|
||||
public PutIndexTemplateRequestBuilder setSource(Map<String, Object> templateSource) {
|
||||
request.source(templateSource);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -117,7 +117,7 @@ public class BulkShardRequest extends ReplicatedWriteRequest<BulkShardRequest> {
|
|||
if (item.request() instanceof ReplicationRequest) {
|
||||
// all replication requests need to be notified here as well to ie. make sure that internal optimizations are
|
||||
// disabled see IndexRequest#canHaveDuplicates()
|
||||
((ReplicationRequest) item.request()).onRetry();
|
||||
((ReplicationRequest<?>) item.request()).onRetry();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,7 +205,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
return primaryResponse;
|
||||
|
||||
} else if (operationResult.getResultType() == Engine.Result.Type.FAILURE) {
|
||||
DocWriteRequest docWriteRequest = replicaRequest.request();
|
||||
DocWriteRequest<?> docWriteRequest = replicaRequest.request();
|
||||
Exception failure = operationResult.getFailure();
|
||||
if (isConflictException(failure)) {
|
||||
logger.trace(() -> new ParameterizedMessage("{} failed to execute bulk item ({}) {}",
|
||||
|
@ -240,7 +240,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
int requestIndex, UpdateHelper updateHelper,
|
||||
LongSupplier nowInMillisSupplier,
|
||||
final MappingUpdatePerformer mappingUpdater) throws Exception {
|
||||
final DocWriteRequest itemRequest = request.items()[requestIndex].request();
|
||||
final DocWriteRequest<?> itemRequest = request.items()[requestIndex].request();
|
||||
final DocWriteRequest.OpType opType = itemRequest.opType();
|
||||
final BulkItemResultHolder responseHolder;
|
||||
switch (itemRequest.opType()) {
|
||||
|
@ -486,7 +486,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
for (int i = 0; i < request.items().length; i++) {
|
||||
BulkItemRequest item = request.items()[i];
|
||||
final Engine.Result operationResult;
|
||||
DocWriteRequest docWriteRequest = item.request();
|
||||
DocWriteRequest<?> docWriteRequest = item.request();
|
||||
switch (replicaItemExecutionMode(item, i)) {
|
||||
case NORMAL:
|
||||
final DocWriteResponse primaryResponse = item.getPrimaryResponse().getResponse();
|
||||
|
@ -510,7 +510,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
return location;
|
||||
}
|
||||
|
||||
private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse, DocWriteRequest docWriteRequest,
|
||||
private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse, DocWriteRequest<?> docWriteRequest,
|
||||
IndexShard replica) throws Exception {
|
||||
final Engine.Result result;
|
||||
switch (docWriteRequest.opType()) {
|
||||
|
@ -605,6 +605,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
|
||||
class ConcreteMappingUpdatePerformer implements MappingUpdatePerformer {
|
||||
|
||||
@Override
|
||||
public void updateMappings(final Mapping update, final ShardId shardId, final String type) {
|
||||
assert update != null;
|
||||
assert shardId != null;
|
||||
|
|
|
@ -75,7 +75,7 @@ public abstract class TransportSingleItemBulkWriteAction<
|
|||
BulkItemRequest[] itemRequests = new BulkItemRequest[1];
|
||||
WriteRequest.RefreshPolicy refreshPolicy = request.getRefreshPolicy();
|
||||
request.setRefreshPolicy(WriteRequest.RefreshPolicy.NONE);
|
||||
itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest) request));
|
||||
itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest<?>) request));
|
||||
BulkShardRequest bulkShardRequest = new BulkShardRequest(request.shardId(), refreshPolicy, itemRequests);
|
||||
WritePrimaryResult<BulkShardRequest, BulkShardResponse> bulkResult =
|
||||
shardBulkAction.shardOperationOnPrimary(bulkShardRequest, primary);
|
||||
|
@ -98,7 +98,7 @@ public abstract class TransportSingleItemBulkWriteAction<
|
|||
Request replicaRequest, IndexShard replica) throws Exception {
|
||||
BulkItemRequest[] itemRequests = new BulkItemRequest[1];
|
||||
WriteRequest.RefreshPolicy refreshPolicy = replicaRequest.getRefreshPolicy();
|
||||
itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest) replicaRequest));
|
||||
itemRequests[0] = new BulkItemRequest(0, ((DocWriteRequest<?>) replicaRequest));
|
||||
BulkShardRequest bulkShardRequest = new BulkShardRequest(replicaRequest.shardId(), refreshPolicy, itemRequests);
|
||||
WriteReplicaResult<BulkShardRequest> result = shardBulkAction.shardOperationOnReplica(bulkShardRequest, replica);
|
||||
// a replica operation can never throw a document-level failure,
|
||||
|
@ -121,9 +121,9 @@ public abstract class TransportSingleItemBulkWriteAction<
|
|||
}, listener::onFailure);
|
||||
}
|
||||
|
||||
public static BulkRequest toSingleItemBulkRequest(ReplicatedWriteRequest request) {
|
||||
public static BulkRequest toSingleItemBulkRequest(ReplicatedWriteRequest<?> request) {
|
||||
BulkRequest bulkRequest = new BulkRequest();
|
||||
bulkRequest.add(((DocWriteRequest) request));
|
||||
bulkRequest.add(((DocWriteRequest<?>) request));
|
||||
bulkRequest.setRefreshPolicy(request.getRefreshPolicy());
|
||||
bulkRequest.timeout(request.timeout());
|
||||
bulkRequest.waitForActiveShards(request.waitForActiveShards());
|
||||
|
|
|
@ -282,7 +282,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
*
|
||||
* @param source The map to index
|
||||
*/
|
||||
public IndexRequest source(Map source) throws ElasticsearchGenerationException {
|
||||
public IndexRequest source(Map<String, ?> source) throws ElasticsearchGenerationException {
|
||||
return source(source, Requests.INDEX_CONTENT_TYPE);
|
||||
}
|
||||
|
||||
|
@ -291,7 +291,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
|
|||
*
|
||||
* @param source The map to index
|
||||
*/
|
||||
public IndexRequest source(Map source, XContentType contentType) throws ElasticsearchGenerationException {
|
||||
public IndexRequest source(Map<String, ?> source, XContentType contentType) throws ElasticsearchGenerationException {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(contentType);
|
||||
builder.map(source);
|
||||
|
|
|
@ -26,11 +26,11 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.collapse.CollapseBuilder;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.collapse.CollapseBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.rescore.RescorerBuilder;
|
||||
import org.elasticsearch.search.slice.SliceBuilder;
|
||||
|
@ -341,7 +341,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
*
|
||||
* @see org.elasticsearch.search.sort.SortBuilders
|
||||
*/
|
||||
public SearchRequestBuilder addSort(SortBuilder sort) {
|
||||
public SearchRequestBuilder addSort(SortBuilder<?> sort) {
|
||||
sourceBuilder().sort(sort);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ public abstract class BroadcastShardRequest extends TransportRequest implements
|
|||
public BroadcastShardRequest() {
|
||||
}
|
||||
|
||||
protected BroadcastShardRequest(ShardId shardId, BroadcastRequest request) {
|
||||
protected BroadcastShardRequest(ShardId shardId, BroadcastRequest<? extends BroadcastRequest<?>> request) {
|
||||
this.shardId = shardId;
|
||||
this.originalIndices = new OriginalIndices(request);
|
||||
}
|
||||
|
|
|
@ -205,7 +205,6 @@ public abstract class TransportBroadcastAction<Request extends BroadcastRequest<
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
void onOperation(@Nullable ShardRouting shard, final ShardIterator shardIt, int shardIndex, Exception e) {
|
||||
// we set the shard failure always, even if its the first in the replication group, and the next one
|
||||
// will work (it will just override it...)
|
||||
|
|
|
@ -150,7 +150,6 @@ public abstract class ReplicationRequest<Request extends ReplicationRequest<Requ
|
|||
* shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)}
|
||||
* to get the ActiveShardCount.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public final Request waitForActiveShards(final int waitForActiveShards) {
|
||||
return waitForActiveShards(ActiveShardCount.from(waitForActiveShards));
|
||||
}
|
||||
|
|
|
@ -77,7 +77,6 @@ public class UpdateHelper extends AbstractComponent {
|
|||
* Prepares an update request by converting it into an index or delete request or an update response (no action, in the event of a
|
||||
* noop).
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult, LongSupplier nowInMillis) {
|
||||
if (getResult.isExists() == false) {
|
||||
// If the document didn't exist, execute the update request as an upsert
|
||||
|
@ -108,7 +107,8 @@ public class UpdateHelper extends AbstractComponent {
|
|||
ctx = executeScript(script, ctx);
|
||||
|
||||
UpdateOpType operation = UpdateOpType.lenientFromString((String) ctx.get(ContextFields.OP), logger, script.getIdOrCode());
|
||||
Map newSource = (Map) ctx.get(ContextFields.SOURCE);
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> newSource = (Map<String, Object>) ctx.get(ContextFields.SOURCE);
|
||||
|
||||
if (operation != UpdateOpType.CREATE && operation != UpdateOpType.NONE) {
|
||||
// Only valid options for an upsert script are "create" (the default) or "none", meaning abort upsert
|
||||
|
@ -248,6 +248,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
|
||||
UpdateOpType operation = UpdateOpType.lenientFromString((String) ctx.get(ContextFields.OP), logger, request.script.getIdOrCode());
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
final Map<String, Object> updatedSourceAsMap = (Map<String, Object>) ctx.get(ContextFields.SOURCE);
|
||||
|
||||
switch (operation) {
|
||||
|
|
|
@ -551,7 +551,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
/**
|
||||
* Sets the doc to use for updates when a script is not specified.
|
||||
*/
|
||||
public UpdateRequest doc(Map source) {
|
||||
public UpdateRequest doc(Map<String, Object> source) {
|
||||
safeDoc().source(source);
|
||||
return this;
|
||||
}
|
||||
|
@ -559,7 +559,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
/**
|
||||
* Sets the doc to use for updates when a script is not specified.
|
||||
*/
|
||||
public UpdateRequest doc(Map source, XContentType contentType) {
|
||||
public UpdateRequest doc(Map<String, Object> source, XContentType contentType) {
|
||||
safeDoc().source(source, contentType);
|
||||
return this;
|
||||
}
|
||||
|
@ -637,7 +637,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
/**
|
||||
* Sets the doc source of the update request to be used when the document does not exists.
|
||||
*/
|
||||
public UpdateRequest upsert(Map source) {
|
||||
public UpdateRequest upsert(Map<String, Object> source) {
|
||||
safeUpsertRequest().source(source);
|
||||
return this;
|
||||
}
|
||||
|
@ -645,7 +645,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
/**
|
||||
* Sets the doc source of the update request to be used when the document does not exists.
|
||||
*/
|
||||
public UpdateRequest upsert(Map source, XContentType contentType) {
|
||||
public UpdateRequest upsert(Map<String, Object> source, XContentType contentType) {
|
||||
safeUpsertRequest().source(source, contentType);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -187,7 +187,7 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
/**
|
||||
* Sets the doc to use for updates when a script is not specified.
|
||||
*/
|
||||
public UpdateRequestBuilder setDoc(Map source) {
|
||||
public UpdateRequestBuilder setDoc(Map<String, Object> source) {
|
||||
request.doc(source);
|
||||
return this;
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
/**
|
||||
* Sets the doc to use for updates when a script is not specified.
|
||||
*/
|
||||
public UpdateRequestBuilder setDoc(Map source, XContentType contentType) {
|
||||
public UpdateRequestBuilder setDoc(Map<String, Object> source, XContentType contentType) {
|
||||
request.doc(source, contentType);
|
||||
return this;
|
||||
}
|
||||
|
@ -262,7 +262,7 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
/**
|
||||
* Sets the doc source of the update request to be used when the document does not exists.
|
||||
*/
|
||||
public UpdateRequestBuilder setUpsert(Map source) {
|
||||
public UpdateRequestBuilder setUpsert(Map<String, Object> source) {
|
||||
request.upsert(source);
|
||||
return this;
|
||||
}
|
||||
|
@ -270,7 +270,7 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
|
|||
/**
|
||||
* Sets the doc source of the update request to be used when the document does not exists.
|
||||
*/
|
||||
public UpdateRequestBuilder setUpsert(Map source, XContentType contentType) {
|
||||
public UpdateRequestBuilder setUpsert(Map<String, Object> source, XContentType contentType) {
|
||||
request.upsert(source, contentType);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ public class DelayedAllocationService extends AbstractLifecycleComponent impleme
|
|||
class DelayedRerouteTask extends ClusterStateUpdateTask {
|
||||
final TimeValue nextDelay; // delay until submitting the reroute command
|
||||
final long baseTimestampNanos; // timestamp (in nanos) upon which delay was calculated
|
||||
volatile ScheduledFuture future;
|
||||
volatile ScheduledFuture<?> future;
|
||||
final AtomicBoolean cancelScheduling = new AtomicBoolean();
|
||||
|
||||
DelayedRerouteTask(TimeValue nextDelay, long baseTimestampNanos) {
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.builders.CircleBuilder;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
|
@ -35,6 +34,7 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation;
|
|||
import org.elasticsearch.common.geo.parsers.CoordinateNode;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
@ -48,7 +48,7 @@ import java.util.Map;
|
|||
public enum GeoShapeType {
|
||||
POINT("point") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public PointBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
return new PointBuilder().coordinate(validate(coordinates, coerce).coordinate);
|
||||
}
|
||||
|
@ -66,7 +66,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
MULTIPOINT("multipoint") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public MultiPointBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
CoordinatesBuilder coordinatesBuilder = new CoordinatesBuilder();
|
||||
|
@ -96,7 +96,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
LINESTRING("linestring") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public LineStringBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
CoordinatesBuilder line = new CoordinatesBuilder();
|
||||
|
@ -117,7 +117,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
MULTILINESTRING("multilinestring") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public MultiLineStringBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
MultiLineStringBuilder multiline = new MultiLineStringBuilder();
|
||||
|
@ -138,7 +138,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
POLYGON("polygon") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public PolygonBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
// build shell
|
||||
|
@ -199,7 +199,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
MULTIPOLYGON("multipolygon") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public MultiPolygonBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
MultiPolygonBuilder polygons = new MultiPolygonBuilder(orientation);
|
||||
|
@ -217,7 +217,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
ENVELOPE("envelope") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public EnvelopeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
validate(coordinates, coerce);
|
||||
// verify coordinate bounds, correct if necessary
|
||||
|
@ -249,7 +249,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
CIRCLE("circle") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public CircleBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
return new CircleBuilder().center(coordinates.coordinate).radius(radius);
|
||||
|
||||
|
@ -263,7 +263,7 @@ public enum GeoShapeType {
|
|||
},
|
||||
GEOMETRYCOLLECTION("geometrycollection") {
|
||||
@Override
|
||||
public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public ShapeBuilder<?, ?> getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
Orientation orientation, boolean coerce) {
|
||||
// noop, handled in parser
|
||||
return null;
|
||||
|
@ -303,7 +303,7 @@ public enum GeoShapeType {
|
|||
throw new IllegalArgumentException("unknown geo_shape ["+geoshapename+"]");
|
||||
}
|
||||
|
||||
public abstract ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
public abstract ShapeBuilder<?, ?> getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius,
|
||||
ShapeBuilder.Orientation orientation, boolean coerce);
|
||||
abstract CoordinateNode validate(CoordinateNode coordinates, boolean coerce);
|
||||
|
||||
|
|
|
@ -19,13 +19,12 @@
|
|||
|
||||
package org.elasticsearch.common.geo.builders;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.XShapeCollection;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.spatial4j.shape.Point;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -62,7 +62,7 @@ public abstract class DefaultElementVisitor<V> implements ElementVisitor<V> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public V visit(InjectionRequest injectionRequest) {
|
||||
public V visit(InjectionRequest<?> injectionRequest) {
|
||||
return visitOther(injectionRequest);
|
||||
}
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ public final class Dependency<T> {
|
|||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o instanceof Dependency) {
|
||||
Dependency dependency = (Dependency) o;
|
||||
Dependency<?> dependency = (Dependency<?>) o;
|
||||
return Objects.equals(injectionPoint, dependency.injectionPoint)
|
||||
&& Objects.equals(parameterIndex, dependency.parameterIndex)
|
||||
&& Objects.equals(key, dependency.key);
|
||||
|
|
|
@ -131,7 +131,7 @@ public class DeprecationLogger {
|
|||
// LRU set of keys used to determine if a deprecation message should be emitted to the deprecation logs
|
||||
private Set<String> keys = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<String, Boolean>() {
|
||||
@Override
|
||||
protected boolean removeEldestEntry(final Map.Entry eldest) {
|
||||
protected boolean removeEldestEntry(final Map.Entry<String, Boolean> eldest) {
|
||||
return size() > 128;
|
||||
}
|
||||
}));
|
||||
|
@ -390,7 +390,7 @@ public class DeprecationLogger {
|
|||
final StringBuilder sb = new StringBuilder(s.length());
|
||||
boolean encodingNeeded = false;
|
||||
for (int i = 0; i < s.length();) {
|
||||
int current = (int) s.charAt(i);
|
||||
int current = s.charAt(i);
|
||||
/*
|
||||
* Either the character does not need encoding or it does; when the character does not need encoding we append the character to
|
||||
* a buffer and move to the next character and when the character does need encoding, we peel off as many characters as possible
|
||||
|
|
|
@ -89,7 +89,7 @@ final class BytesBinaryDVAtomicFieldData implements AtomicFieldData {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ScriptDocValues getScriptValues() {
|
||||
public ScriptDocValues<BytesRef> getScriptValues() {
|
||||
return new ScriptDocValues.BytesRefs(getBytesValues());
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
|
||||
public class PutRepositoryRequestTests extends ESTestCase {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCreateRepositoryToXContent() throws IOException {
|
||||
Map<String, String> mapParams = new HashMap<>();
|
||||
PutRepositoryRequest request = new PutRepositoryRequest();
|
||||
|
@ -62,11 +63,11 @@ public class PutRepositoryRequestTests extends ESTestCase {
|
|||
assertThat(outputMap.get("name"), equalTo(request.name()));
|
||||
assertThat(outputMap.get("verify"), equalTo(request.verify()));
|
||||
assertThat(outputMap.get("type"), equalTo(request.type()));
|
||||
Map settings = (Map) outputMap.get("settings");
|
||||
Map<String, Object> settings = (Map<String, Object>) outputMap.get("settings");
|
||||
if (addSettings) {
|
||||
assertThat(settings.get(FsRepository.LOCATION_SETTING.getKey()), equalTo("."));
|
||||
} else {
|
||||
assertTrue(((Map) outputMap.get("settings")).isEmpty());
|
||||
assertTrue(((Map<String, Object>) outputMap.get("settings")).isEmpty());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -273,7 +273,7 @@ public class RolloverIT extends ESIntegTestCase {
|
|||
assertThat(response.getNewIndex(), equalTo("test-000002"));
|
||||
assertThat("Should rollover with a small max_size condition", response.isRolledOver(), equalTo(true));
|
||||
final IndexMetaData oldIndex = client().admin().cluster().prepareState().get().getState().metaData().index("test-1");
|
||||
List<Condition> metConditions = oldIndex.getRolloverInfos().get("test_alias").getMetConditions();
|
||||
List<Condition<?>> metConditions = oldIndex.getRolloverInfos().get("test_alias").getMetConditions();
|
||||
assertThat(metConditions.size(), equalTo(1));
|
||||
assertThat(metConditions.get(0).toString(), equalTo(new MaxSizeCondition(maxSizeValue).toString()));
|
||||
assertThat(oldIndex.getRolloverInfos().get("test_alias").getTime(),
|
||||
|
|
|
@ -73,7 +73,7 @@ public class RolloverRequestTests extends ESTestCase {
|
|||
.endObject()
|
||||
.endObject();
|
||||
request.fromXContent(createParser(builder));
|
||||
Map<String, Condition> conditions = request.getConditions();
|
||||
Map<String, Condition<?>> conditions = request.getConditions();
|
||||
assertThat(conditions.size(), equalTo(3));
|
||||
MaxAgeCondition maxAgeCondition = (MaxAgeCondition)conditions.get(MaxAgeCondition.NAME);
|
||||
assertThat(maxAgeCondition.value.getMillis(), equalTo(TimeValue.timeValueHours(24 * 10).getMillis()));
|
||||
|
@ -109,7 +109,7 @@ public class RolloverRequestTests extends ESTestCase {
|
|||
.endObject()
|
||||
.endObject();
|
||||
request.fromXContent(createParser(builder));
|
||||
Map<String, Condition> conditions = request.getConditions();
|
||||
Map<String, Condition<?>> conditions = request.getConditions();
|
||||
assertThat(conditions.size(), equalTo(2));
|
||||
assertThat(request.getCreateIndexRequest().mappings().size(), equalTo(1));
|
||||
assertThat(request.getCreateIndexRequest().aliases().size(), equalTo(1));
|
||||
|
@ -129,8 +129,8 @@ public class RolloverRequestTests extends ESTestCase {
|
|||
cloneRequest.readFrom(in);
|
||||
assertThat(cloneRequest.getNewIndexName(), equalTo(originalRequest.getNewIndexName()));
|
||||
assertThat(cloneRequest.getAlias(), equalTo(originalRequest.getAlias()));
|
||||
for (Map.Entry<String, Condition> entry : cloneRequest.getConditions().entrySet()) {
|
||||
Condition condition = originalRequest.getConditions().get(entry.getKey());
|
||||
for (Map.Entry<String, Condition<?>> entry : cloneRequest.getConditions().entrySet()) {
|
||||
Condition<?> condition = originalRequest.getConditions().get(entry.getKey());
|
||||
//here we compare the string representation as there is some information loss when serializing
|
||||
//and de-serializing MaxAgeCondition
|
||||
assertEquals(condition.toString(), entry.getValue().toString());
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TransportRolloverActionTests extends ESTestCase {
|
|||
long docsInPrimaryShards = 100;
|
||||
long docsInShards = 200;
|
||||
|
||||
final Condition condition = createTestCondition();
|
||||
final Condition<?> condition = createTestCondition();
|
||||
evaluateConditions(Sets.newHashSet(condition), createMetaData(), createIndicesStatResponse(docsInShards, docsInPrimaryShards));
|
||||
final ArgumentCaptor<Condition.Stats> argument = ArgumentCaptor.forClass(Condition.Stats.class);
|
||||
verify(condition).evaluate(argument.capture());
|
||||
|
@ -89,7 +89,7 @@ public class TransportRolloverActionTests extends ESTestCase {
|
|||
.creationDate(System.currentTimeMillis() - TimeValue.timeValueHours(3).getMillis())
|
||||
.settings(settings)
|
||||
.build();
|
||||
final Set<Condition> conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition);
|
||||
final Set<Condition<?>> conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition);
|
||||
Map<String, Boolean> results = evaluateConditions(conditions,
|
||||
new DocsStats(matchMaxDocs, 0L, ByteSizeUnit.MB.toBytes(120)), metaData);
|
||||
assertThat(results.size(), equalTo(3));
|
||||
|
@ -117,7 +117,7 @@ public class TransportRolloverActionTests extends ESTestCase {
|
|||
MaxAgeCondition maxAgeCondition = new MaxAgeCondition(TimeValue.timeValueHours(randomIntBetween(1, 3)));
|
||||
MaxSizeCondition maxSizeCondition = new MaxSizeCondition(new ByteSizeValue(randomNonNegativeLong()));
|
||||
|
||||
Set<Condition> conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition);
|
||||
Set<Condition<?>> conditions = Sets.newHashSet(maxDocsCondition, maxAgeCondition, maxSizeCondition);
|
||||
final Settings settings = Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())
|
||||
|
@ -285,8 +285,8 @@ public class TransportRolloverActionTests extends ESTestCase {
|
|||
.build();
|
||||
}
|
||||
|
||||
private static Condition createTestCondition() {
|
||||
final Condition condition = mock(Condition.class);
|
||||
private static Condition<?> createTestCondition() {
|
||||
final Condition<?> condition = mock(Condition.class);
|
||||
when(condition.evaluate(any())).thenReturn(new Condition.Result(condition, true));
|
||||
return condition;
|
||||
}
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.elasticsearch.transport.NodeDisconnectedException;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -74,9 +73,10 @@ public class IndicesShardStoreResponseTests extends ESTestCase {
|
|||
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) {
|
||||
Map<String, Object> map = parser.map();
|
||||
List failureList = (List) map.get("failures");
|
||||
List<?> failureList = (List<?>) map.get("failures");
|
||||
assertThat(failureList.size(), equalTo(1));
|
||||
HashMap failureMap = (HashMap) failureList.get(0);
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, ?> failureMap = (Map<String, ?>) failureList.get(0);
|
||||
assertThat(failureMap.containsKey("index"), equalTo(true));
|
||||
assertThat(((String) failureMap.get("index")), equalTo("test"));
|
||||
assertThat(failureMap.containsKey("shard"), equalTo(true));
|
||||
|
@ -84,18 +84,22 @@ public class IndicesShardStoreResponseTests extends ESTestCase {
|
|||
assertThat(failureMap.containsKey("node"), equalTo(true));
|
||||
assertThat(((String) failureMap.get("node")), equalTo("node1"));
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> indices = (Map<String, Object>) map.get("indices");
|
||||
for (String index : new String[] {"test", "test2"}) {
|
||||
assertThat(indices.containsKey(index), equalTo(true));
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> shards = ((Map<String, Object>) ((Map<String, Object>) indices.get(index)).get("shards"));
|
||||
assertThat(shards.size(), equalTo(2));
|
||||
for (String shardId : shards.keySet()) {
|
||||
HashMap shardStoresStatus = (HashMap) shards.get(shardId);
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, ?> shardStoresStatus = (Map<String, ?>) shards.get(shardId);
|
||||
assertThat(shardStoresStatus.containsKey("stores"), equalTo(true));
|
||||
List stores = (ArrayList) shardStoresStatus.get("stores");
|
||||
List<?> stores = (List<?>) shardStoresStatus.get("stores");
|
||||
assertThat(stores.size(), equalTo(storeStatusList.size()));
|
||||
for (int i = 0; i < stores.size(); i++) {
|
||||
HashMap storeInfo = ((HashMap) stores.get(i));
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, ?> storeInfo = ((Map<String, ?>) stores.get(i));
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = storeStatusList.get(i);
|
||||
assertThat(((String) storeInfo.get("allocation_id")), equalTo((storeStatus.getAllocationId())));
|
||||
assertThat(storeInfo.containsKey("allocation"), equalTo(true));
|
||||
|
|
|
@ -19,12 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.bulk;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
|
@ -34,6 +28,12 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
@ -114,7 +114,7 @@ public class BulkRequestModifierTests extends ESTestCase {
|
|||
});
|
||||
|
||||
List<BulkItemResponse> originalResponses = new ArrayList<>();
|
||||
for (DocWriteRequest actionRequest : bulkRequest.requests()) {
|
||||
for (DocWriteRequest<?> actionRequest : bulkRequest.requests()) {
|
||||
IndexRequest indexRequest = (IndexRequest) actionRequest;
|
||||
IndexResponse indexResponse = new IndexResponse(new ShardId("index", "_na_", 0), indexRequest.type(),
|
||||
indexRequest.id(), 1, 17, 1, true);
|
||||
|
|
|
@ -64,9 +64,9 @@ import static org.hamcrest.CoreMatchers.notNullValue;
|
|||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Mockito.any;
|
||||
import static org.mockito.Mockito.anyBoolean;
|
||||
import static org.mockito.Mockito.anyLong;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyBoolean;
|
||||
import static org.mockito.Matchers.anyLong;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
|
@ -91,7 +91,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
|
||||
public void testShouldExecuteReplicaItem() throws Exception {
|
||||
// Successful index request should be replicated
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE, "foo", "bar");
|
||||
DocWriteResponse response = new IndexResponse(shardId, "type", "id", 1, 17, 1, randomBoolean());
|
||||
BulkItemRequest request = new BulkItemRequest(0, writeRequest);
|
||||
|
@ -121,9 +121,9 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
assertThat(replicaItemExecutionMode(request, 0),
|
||||
equalTo(ReplicaItemExecutionMode.FAILURE));
|
||||
// NOOP requests should not be replicated
|
||||
writeRequest = new UpdateRequest("index", "type", "id");
|
||||
DocWriteRequest<UpdateRequest> updateRequest = new UpdateRequest("index", "type", "id");
|
||||
response = new UpdateResponse(shardId, "type", "id", 1, DocWriteResponse.Result.NOOP);
|
||||
request = new BulkItemRequest(0, writeRequest);
|
||||
request = new BulkItemRequest(0, updateRequest);
|
||||
request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.UPDATE,
|
||||
response));
|
||||
assertThat(replicaItemExecutionMode(request, 0),
|
||||
|
@ -137,7 +137,8 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
boolean create = randomBoolean();
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(create);
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE)
|
||||
.create(create);
|
||||
BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
|
||||
items[0] = primaryRequest;
|
||||
BulkShardRequest bulkShardRequest =
|
||||
|
@ -208,7 +209,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
|
||||
BulkItemRequest[] items = new BulkItemRequest[randomIntBetween(2, 5)];
|
||||
for (int i = 0; i < items.length; i++) {
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id_" + i)
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id_" + i)
|
||||
.source(Requests.INDEX_CONTENT_TYPE)
|
||||
.opType(DocWriteRequest.OpType.INDEX);
|
||||
items[i] = new BulkItemRequest(i, writeRequest);
|
||||
|
@ -258,7 +259,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
IndexShard shard = newStartedShard(true);
|
||||
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE, "foo", "bar");
|
||||
items[0] = new BulkItemRequest(0, writeRequest);
|
||||
BulkShardRequest bulkShardRequest =
|
||||
|
@ -281,7 +282,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
IndexShard shard = newStartedShard(true);
|
||||
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE, "foo", "bar");
|
||||
items[0] = new BulkItemRequest(0, writeRequest);
|
||||
BulkShardRequest bulkShardRequest =
|
||||
|
@ -323,7 +324,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
IndexShard shard = newStartedShard(true);
|
||||
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
DocWriteRequest writeRequest = new DeleteRequest("index", "_doc", "id");
|
||||
DocWriteRequest<DeleteRequest> writeRequest = new DeleteRequest("index", "_doc", "id");
|
||||
items[0] = new BulkItemRequest(0, writeRequest);
|
||||
BulkShardRequest bulkShardRequest =
|
||||
new BulkShardRequest(shardId, RefreshPolicy.NONE, items);
|
||||
|
@ -339,7 +340,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
assertThat(newLocation, not(location));
|
||||
|
||||
BulkItemRequest replicaRequest = bulkShardRequest.items()[0];
|
||||
DocWriteRequest replicaDeleteRequest = replicaRequest.request();
|
||||
DocWriteRequest<?> replicaDeleteRequest = replicaRequest.request();
|
||||
BulkItemResponse primaryResponse = replicaRequest.getPrimaryResponse();
|
||||
DeleteResponse response = primaryResponse.getResponse();
|
||||
|
||||
|
@ -405,7 +406,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
}
|
||||
|
||||
public void testNoopUpdateReplicaRequest() throws Exception {
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE, "field", "value");
|
||||
BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest);
|
||||
|
||||
|
@ -436,7 +437,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
}
|
||||
|
||||
public void testUpdateReplicaRequestWithFailure() throws Exception {
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE);
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE);
|
||||
BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest);
|
||||
|
||||
Exception err = new ElasticsearchException("I'm dead <(x.x)>");
|
||||
|
@ -472,7 +473,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
}
|
||||
|
||||
public void testUpdateReplicaRequestWithConflictFailure() throws Exception {
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE);
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE);
|
||||
BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest);
|
||||
|
||||
Exception err = new VersionConflictEngineException(shardId, "_doc", "id",
|
||||
|
@ -509,7 +510,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
}
|
||||
|
||||
public void testUpdateReplicaRequestWithSuccess() throws Exception {
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE);
|
||||
BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest);
|
||||
|
||||
|
@ -545,7 +546,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
public void testCalculateTranslogLocation() throws Exception {
|
||||
final Translog.Location original = new Translog.Location(0, 0, 0);
|
||||
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE);
|
||||
BulkItemRequest replicaRequest = new BulkItemRequest(0, writeRequest);
|
||||
BulkItemResultHolder results = new BulkItemResultHolder(null, null, replicaRequest);
|
||||
|
@ -659,7 +660,6 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
|
||||
BulkItemRequest[] itemRequests = new BulkItemRequest[1];
|
||||
itemRequests[0] = request;
|
||||
BulkShardRequest bulkShardRequest = new BulkShardRequest(shard.shardId(), RefreshPolicy.NONE, itemRequests);
|
||||
|
||||
BulkItemResultHolder holder = TransportShardBulkAction.processUpdateResponse(updateRequest,
|
||||
"index", indexResult, translate, shard, 7);
|
||||
|
@ -671,7 +671,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
assertThat(holder.operationResult, equalTo(indexResult));
|
||||
BulkItemRequest replicaBulkRequest = holder.replicaRequest;
|
||||
assertThat(replicaBulkRequest.id(), equalTo(7));
|
||||
DocWriteRequest replicaRequest = replicaBulkRequest.request();
|
||||
DocWriteRequest<?> replicaRequest = replicaBulkRequest.request();
|
||||
assertThat(replicaRequest, instanceOf(IndexRequest.class));
|
||||
assertThat(replicaRequest, equalTo(indexRequest));
|
||||
|
||||
|
@ -685,7 +685,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
assertThat(deleteHolder.operationResult, equalTo(deleteResult));
|
||||
BulkItemRequest delReplicaBulkRequest = deleteHolder.replicaRequest;
|
||||
assertThat(delReplicaBulkRequest.id(), equalTo(8));
|
||||
DocWriteRequest delReplicaRequest = delReplicaBulkRequest.request();
|
||||
DocWriteRequest<?> delReplicaRequest = delReplicaBulkRequest.request();
|
||||
assertThat(delReplicaRequest, instanceOf(DeleteRequest.class));
|
||||
assertThat(delReplicaRequest, equalTo(deleteRequest));
|
||||
|
||||
|
@ -699,13 +699,11 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
Map<String, Object> source = new HashMap<>();
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
boolean create = randomBoolean();
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE).create(create);
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id").source(Requests.INDEX_CONTENT_TYPE)
|
||||
.create(create);
|
||||
BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
|
||||
items[0] = primaryRequest;
|
||||
BulkShardRequest bulkShardRequest =
|
||||
new BulkShardRequest(shardId, RefreshPolicy.NONE, items);
|
||||
|
||||
Translog.Location location = new Translog.Location(0, 0, 0);
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "id");
|
||||
indexRequest.source(source);
|
||||
|
||||
|
@ -730,7 +728,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
assertThat(updateResp.getGetResult(), equalTo(null));
|
||||
BulkItemRequest replicaBulkRequest = holder.replicaRequest;
|
||||
assertThat(replicaBulkRequest.id(), equalTo(0));
|
||||
DocWriteRequest replicaRequest = replicaBulkRequest.request();
|
||||
DocWriteRequest<?> replicaRequest = replicaBulkRequest.request();
|
||||
assertThat(replicaRequest, instanceOf(IndexRequest.class));
|
||||
assertThat(replicaRequest, equalTo(indexRequest));
|
||||
|
||||
|
@ -747,19 +745,15 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
source.put("foo", "bar");
|
||||
BulkItemRequest[] items = new BulkItemRequest[1];
|
||||
boolean create = randomBoolean();
|
||||
DocWriteRequest writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
DocWriteRequest<IndexRequest> writeRequest = new IndexRequest("index", "_doc", "id")
|
||||
.source(Requests.INDEX_CONTENT_TYPE, "foo", "bar")
|
||||
.create(create);
|
||||
BulkItemRequest primaryRequest = new BulkItemRequest(0, writeRequest);
|
||||
items[0] = primaryRequest;
|
||||
BulkShardRequest bulkShardRequest =
|
||||
new BulkShardRequest(shardId, RefreshPolicy.NONE, items);
|
||||
|
||||
Translog.Location location = new Translog.Location(0, 0, 0);
|
||||
IndexRequest indexRequest = new IndexRequest("index", "_doc", "id");
|
||||
indexRequest.source(source);
|
||||
|
||||
DocWriteResponse.Result docWriteResult = DocWriteResponse.Result.CREATED;
|
||||
Exception prepareFailure = new IllegalArgumentException("I failed to do something!");
|
||||
UpdateHelper updateHelper = new FailingUpdateHelper(prepareFailure);
|
||||
UpdateRequest updateRequest = new UpdateRequest("index", "_doc", "id");
|
||||
|
@ -849,6 +843,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
|
||||
/** Doesn't perform any mapping updates */
|
||||
public static class NoopMappingUpdatePerformer implements MappingUpdatePerformer {
|
||||
@Override
|
||||
public void updateMappings(Mapping update, ShardId shardId, String type) {
|
||||
}
|
||||
}
|
||||
|
@ -860,6 +855,7 @@ public class TransportShardBulkActionTests extends IndexShardTestCase {
|
|||
this.e = e;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void updateMappings(Mapping update, ShardId shardId, String type) {
|
||||
throw e;
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ public class SearchAsyncActionTests extends ESTestCase {
|
|||
lookup.put(replicaNode.getId(), new MockConnection(replicaNode));
|
||||
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
|
||||
AtomicInteger numRequests = new AtomicInteger(0);
|
||||
AbstractSearchAsyncAction asyncAction =
|
||||
AbstractSearchAsyncAction<TestSearchPhaseResult> asyncAction =
|
||||
new AbstractSearchAsyncAction<TestSearchPhaseResult>(
|
||||
"test",
|
||||
logger,
|
||||
|
@ -190,7 +190,7 @@ public class SearchAsyncActionTests extends ESTestCase {
|
|||
CountDownLatch awaitInitialRequests = new CountDownLatch(1);
|
||||
AtomicInteger numRequests = new AtomicInteger(0);
|
||||
AtomicInteger numResponses = new AtomicInteger(0);
|
||||
AbstractSearchAsyncAction asyncAction =
|
||||
AbstractSearchAsyncAction<TestSearchPhaseResult> asyncAction =
|
||||
new AbstractSearchAsyncAction<TestSearchPhaseResult>(
|
||||
"test",
|
||||
logger,
|
||||
|
@ -297,7 +297,7 @@ public class SearchAsyncActionTests extends ESTestCase {
|
|||
lookup.put(replicaNode.getId(), new MockConnection(replicaNode));
|
||||
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
|
||||
final ExecutorService executor = Executors.newFixedThreadPool(randomIntBetween(1, Runtime.getRuntime().availableProcessors()));
|
||||
AbstractSearchAsyncAction asyncAction =
|
||||
AbstractSearchAsyncAction<TestSearchPhaseResult> asyncAction =
|
||||
new AbstractSearchAsyncAction<TestSearchPhaseResult>(
|
||||
"test",
|
||||
logger,
|
||||
|
|
|
@ -128,9 +128,9 @@ public class SearchScrollAsyncActionTests extends ESTestCase {
|
|||
request.scroll(new Scroll(TimeValue.timeValueMinutes(1)));
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicInteger movedCounter = new AtomicInteger(0);
|
||||
ActionListener listener = new ActionListener() {
|
||||
ActionListener<SearchResponse> listener = new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(Object o) {
|
||||
public void onResponse(SearchResponse o) {
|
||||
try {
|
||||
fail("got a result");
|
||||
} finally {
|
||||
|
@ -374,9 +374,9 @@ public class SearchScrollAsyncActionTests extends ESTestCase {
|
|||
SearchScrollRequest request = new SearchScrollRequest();
|
||||
request.scroll(new Scroll(TimeValue.timeValueMinutes(1)));
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
ActionListener listener = new ActionListener() {
|
||||
ActionListener<SearchResponse> listener = new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(Object o) {
|
||||
public void onResponse(SearchResponse o) {
|
||||
try {
|
||||
fail("got a result");
|
||||
} finally {
|
||||
|
|
|
@ -308,15 +308,15 @@ public class IndicesOptionsTests extends ESTestCase {
|
|||
|
||||
boolean open = wildcardStates.contains(WildcardStates.OPEN);
|
||||
if (open) {
|
||||
assertTrue(((List)map.get("expand_wildcards")).contains("open"));
|
||||
assertTrue(((List<?>) map.get("expand_wildcards")).contains("open"));
|
||||
} else {
|
||||
assertFalse(((List)map.get("expand_wildcards")).contains("open"));
|
||||
assertFalse(((List<?>) map.get("expand_wildcards")).contains("open"));
|
||||
}
|
||||
boolean closed = wildcardStates.contains(WildcardStates.CLOSED);
|
||||
if (closed) {
|
||||
assertTrue(((List)map.get("expand_wildcards")).contains("closed"));
|
||||
assertTrue(((List<?>) map.get("expand_wildcards")).contains("closed"));
|
||||
} else {
|
||||
assertFalse(((List)map.get("expand_wildcards")).contains("closed"));
|
||||
assertFalse(((List<?>) map.get("expand_wildcards")).contains("closed"));
|
||||
}
|
||||
assertEquals(map.get("ignore_unavailable"), options.contains(Option.IGNORE_UNAVAILABLE));
|
||||
assertEquals(map.get("allow_no_indices"), options.contains(Option.ALLOW_NO_INDICES));
|
||||
|
|
|
@ -208,8 +208,8 @@ public class BroadcastReplicationTests extends ESTestCase {
|
|||
protected final Set<Tuple<ShardId, ActionListener<ReplicationResponse>>> capturedShardRequests = ConcurrentCollections.newConcurrentSet();
|
||||
|
||||
TestBroadcastReplicationAction(Settings settings, ClusterService clusterService, TransportService transportService,
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
TransportReplicationAction replicatedBroadcastShardAction) {
|
||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
TransportReplicationAction<BasicReplicationRequest, BasicReplicationRequest, ReplicationResponse> replicatedBroadcastShardAction) {
|
||||
super("internal:test-broadcast-replication-action", DummyBroadcastRequest::new, settings, clusterService, transportService,
|
||||
actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction);
|
||||
}
|
||||
|
|
|
@ -140,6 +140,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
updateHelper = new UpdateHelper(settings, scriptService);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testFromXContent() throws Exception {
|
||||
UpdateRequest request = new UpdateRequest("test", "type", "1");
|
||||
// simple script
|
||||
|
@ -233,7 +234,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
Map<String, Object> upsertDoc =
|
||||
XContentHelper.convertToMap(request.upsertRequest().source(), true, request.upsertRequest().getContentType()).v2();
|
||||
assertThat(upsertDoc.get("field1").toString(), equalTo("value1"));
|
||||
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
assertThat(((Map<String, Object>) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject()
|
||||
|
@ -260,7 +261,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
assertThat(params.get("param1").toString(), equalTo("value1"));
|
||||
upsertDoc = XContentHelper.convertToMap(request.upsertRequest().source(), true, request.upsertRequest().getContentType()).v2();
|
||||
assertThat(upsertDoc.get("field1").toString(), equalTo("value1"));
|
||||
assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
assertThat(((Map<String, Object>) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
|
||||
// script with doc
|
||||
request = new UpdateRequest("test", "type", "1");
|
||||
|
@ -275,7 +276,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
.endObject()));
|
||||
Map<String, Object> doc = request.doc().sourceAsMap();
|
||||
assertThat(doc.get("field1").toString(), equalTo("value1"));
|
||||
assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
assertThat(((Map<String, Object>) doc.get("compound")).get("field2").toString(), equalTo("value2"));
|
||||
}
|
||||
|
||||
public void testUnknownFieldParsing() throws Exception {
|
||||
|
@ -424,7 +425,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||
ESTestCase::randomNonNegativeLong);
|
||||
final Streamable action = result.action();
|
||||
assertThat(action, instanceOf(ReplicationRequest.class));
|
||||
final ReplicationRequest request = (ReplicationRequest) action;
|
||||
final ReplicationRequest<?> request = (ReplicationRequest<?>) action;
|
||||
assertThat(request.timeout(), equalTo(updateRequest.timeout()));
|
||||
}
|
||||
|
||||
|
|
|
@ -167,8 +167,7 @@ public class ClusterModuleTests extends ModuleTestCase {
|
|||
|
||||
public void testShardsAllocatorFactoryNull() {
|
||||
Settings settings = Settings.builder().put(ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING.getKey(), "bad").build();
|
||||
NullPointerException e = expectThrows(NullPointerException.class, () ->
|
||||
newClusterModuleWithShardsAllocator(settings, "bad", () -> null));
|
||||
expectThrows(NullPointerException.class, () -> newClusterModuleWithShardsAllocator(settings, "bad", () -> null));
|
||||
}
|
||||
|
||||
// makes sure that the allocation deciders are setup in the correct order, such that the
|
||||
|
|
|
@ -19,17 +19,10 @@
|
|||
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.LineString;
|
||||
import org.locationtech.jts.geom.LinearRing;
|
||||
import org.locationtech.jts.geom.MultiLineString;
|
||||
import org.locationtech.jts.geom.Point;
|
||||
import org.locationtech.jts.geom.Polygon;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.geo.parsers.ShapeParser;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -41,6 +34,12 @@ import org.elasticsearch.index.mapper.ContentPath;
|
|||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.LineString;
|
||||
import org.locationtech.jts.geom.LinearRing;
|
||||
import org.locationtech.jts.geom.MultiLineString;
|
||||
import org.locationtech.jts.geom.Point;
|
||||
import org.locationtech.jts.geom.Polygon;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Circle;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
|
@ -828,7 +827,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase {
|
|||
.endArray()
|
||||
.endObject();
|
||||
|
||||
ShapeCollection expected = shapeCollection(
|
||||
ShapeCollection<?> expected = shapeCollection(
|
||||
SPATIAL_CONTEXT.makePoint(100, 0),
|
||||
SPATIAL_CONTEXT.makePoint(101, 1.0));
|
||||
assertGeometryEquals(expected, multiPointGeoJson);
|
||||
|
@ -951,6 +950,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase {
|
|||
assertGeometryEquals(jtsGeom(withHoles), multiPolygonGeoJson);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testParseGeometryCollection() throws IOException {
|
||||
XContentBuilder geometryCollectionGeoJson = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
|
|
|
@ -18,12 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.LineString;
|
||||
import org.locationtech.jts.geom.LinearRing;
|
||||
import org.locationtech.jts.geom.MultiLineString;
|
||||
import org.locationtech.jts.geom.Point;
|
||||
import org.locationtech.jts.geom.Polygon;
|
||||
import org.apache.lucene.geo.GeoTestUtil;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
|
@ -50,6 +44,12 @@ import org.elasticsearch.index.mapper.ContentPath;
|
|||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.jts.geom.LineString;
|
||||
import org.locationtech.jts.geom.LinearRing;
|
||||
import org.locationtech.jts.geom.MultiLineString;
|
||||
import org.locationtech.jts.geom.Point;
|
||||
import org.locationtech.jts.geom.Polygon;
|
||||
import org.locationtech.spatial4j.exception.InvalidShapeException;
|
||||
import org.locationtech.spatial4j.shape.Rectangle;
|
||||
import org.locationtech.spatial4j.shape.Shape;
|
||||
|
@ -69,7 +69,7 @@ import static org.hamcrest.Matchers.hasToString;
|
|||
*/
|
||||
public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase {
|
||||
|
||||
private static XContentBuilder toWKTContent(ShapeBuilder builder, boolean generateMalformed)
|
||||
private static XContentBuilder toWKTContent(ShapeBuilder<?, ?> builder, boolean generateMalformed)
|
||||
throws IOException {
|
||||
String wkt = builder.toWKT();
|
||||
if (generateMalformed) {
|
||||
|
@ -84,12 +84,12 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase {
|
|||
return XContentFactory.jsonBuilder().value(wkt);
|
||||
}
|
||||
|
||||
private void assertExpected(Shape expected, ShapeBuilder builder) throws IOException {
|
||||
private void assertExpected(Shape expected, ShapeBuilder<?, ?> builder) throws IOException {
|
||||
XContentBuilder xContentBuilder = toWKTContent(builder, false);
|
||||
assertGeometryEquals(expected, xContentBuilder);
|
||||
}
|
||||
|
||||
private void assertMalformed(ShapeBuilder builder) throws IOException {
|
||||
private void assertMalformed(ShapeBuilder<?, ?> builder) throws IOException {
|
||||
XContentBuilder xContentBuilder = toWKTContent(builder, true);
|
||||
assertValidException(xContentBuilder, ElasticsearchParseException.class);
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase {
|
|||
coordinates.add(new Coordinate(p.lon(), p.lat()));
|
||||
shapes[i] = SPATIAL_CONTEXT.makePoint(p.lon(), p.lat());
|
||||
}
|
||||
ShapeCollection expected = shapeCollection(shapes);
|
||||
ShapeCollection<?> expected = shapeCollection(shapes);
|
||||
assertExpected(expected, new MultiPointBuilder(coordinates));
|
||||
assertMalformed(new MultiPointBuilder(coordinates));
|
||||
}
|
||||
|
@ -314,7 +314,7 @@ public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase {
|
|||
Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath());
|
||||
final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext);
|
||||
|
||||
ShapeBuilder shapeBuilder = ShapeParser.parse(parser, mapperBuilder);
|
||||
ShapeBuilder<?, ?> shapeBuilder = ShapeParser.parse(parser, mapperBuilder);
|
||||
assertEquals(shapeBuilder.numDimensions(), 3);
|
||||
}
|
||||
|
||||
|
|
|
@ -66,10 +66,10 @@ public class GeometryCollectionBuilderTests extends AbstractShapeBuilderTestCase
|
|||
}
|
||||
|
||||
static GeometryCollectionBuilder mutate(GeometryCollectionBuilder original) throws IOException {
|
||||
GeometryCollectionBuilder mutation = (GeometryCollectionBuilder) copyShape(original);
|
||||
GeometryCollectionBuilder mutation = copyShape(original);
|
||||
if (mutation.shapes.size() > 0) {
|
||||
int shapePosition = randomIntBetween(0, mutation.shapes.size() - 1);
|
||||
ShapeBuilder shapeToChange = mutation.shapes.get(shapePosition);
|
||||
ShapeBuilder<?, ?> shapeToChange = mutation.shapes.get(shapePosition);
|
||||
switch (shapeToChange.type()) {
|
||||
case POINT:
|
||||
shapeToChange = PointBuilderTests.mutate((PointBuilder) shapeToChange);
|
||||
|
|
|
@ -118,7 +118,7 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
extValue = XContentMapValues.extractValue("path1.test", map);
|
||||
assertThat(extValue, instanceOf(List.class));
|
||||
|
||||
List<?> extListValue = (List) extValue;
|
||||
List<?> extListValue = (List<?>) extValue;
|
||||
assertThat(extListValue, hasSize(2));
|
||||
|
||||
builder = XContentFactory.jsonBuilder().startObject()
|
||||
|
@ -137,7 +137,7 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
extValue = XContentMapValues.extractValue("path1.path2.test", map);
|
||||
assertThat(extValue, instanceOf(List.class));
|
||||
|
||||
extListValue = (List) extValue;
|
||||
extListValue = (List<?>) extValue;
|
||||
assertThat(extListValue, hasSize(2));
|
||||
assertThat(extListValue.get(0).toString(), equalTo("value1"));
|
||||
assertThat(extListValue.get(1).toString(), equalTo("value2"));
|
||||
|
@ -225,13 +225,13 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
assertThat(filteredMap.size(), equalTo(1));
|
||||
|
||||
assertThat(((List<?>) filteredMap.get("array")), hasSize(1));
|
||||
assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(0)).size(), equalTo(1));
|
||||
assertThat((Integer) ((Map<String, Object>) ((List) filteredMap.get("array")).get(0)).get("nested"), equalTo(2));
|
||||
assertThat(((Map<String, Object>) ((List<?>) filteredMap.get("array")).get(0)).size(), equalTo(1));
|
||||
assertThat((Integer) ((Map<String, Object>) ((List<?>) filteredMap.get("array")).get(0)).get("nested"), equalTo(2));
|
||||
|
||||
filteredMap = XContentMapValues.filter(map, new String[]{"array.*"}, Strings.EMPTY_ARRAY);
|
||||
assertThat(filteredMap.size(), equalTo(1));
|
||||
assertThat(((List<?>) filteredMap.get("array")), hasSize(1));
|
||||
assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(0)).size(), equalTo(2));
|
||||
assertThat(((Map<String, Object>) ((List<?>) filteredMap.get("array")).get(0)).size(), equalTo(2));
|
||||
|
||||
map.clear();
|
||||
map.put("field", "value");
|
||||
|
@ -285,16 +285,16 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
|
||||
filteredMap = XContentMapValues.filter(map, new String[]{"array"}, new String[]{});
|
||||
assertThat(filteredMap.size(), equalTo(1));
|
||||
assertThat(((List) filteredMap.get("array")).size(), equalTo(2));
|
||||
assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1));
|
||||
assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(1)).size(), equalTo(2));
|
||||
assertThat(((List<?>) filteredMap.get("array")).size(), equalTo(2));
|
||||
assertThat((Integer) ((List<?>) filteredMap.get("array")).get(0), equalTo(1));
|
||||
assertThat(((Map<String, Object>) ((List<?>) filteredMap.get("array")).get(1)).size(), equalTo(2));
|
||||
|
||||
filteredMap = XContentMapValues.filter(map, new String[]{"array"}, new String[]{"*.field2"});
|
||||
assertThat(filteredMap.size(), equalTo(1));
|
||||
assertThat(((List<?>) filteredMap.get("array")), hasSize(2));
|
||||
assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1));
|
||||
assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(1)).size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(1)).get("field").toString(), equalTo("value"));
|
||||
assertThat((Integer) ((List<?>) filteredMap.get("array")).get(0), equalTo(1));
|
||||
assertThat(((Map<String, Object>) ((List<?>) filteredMap.get("array")).get(1)).size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) ((List<?>) filteredMap.get("array")).get(1)).get("field").toString(), equalTo("value"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -331,7 +331,7 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
assertThat(filteredMap.size(), equalTo(3));
|
||||
assertThat(filteredMap, hasKey("field"));
|
||||
assertThat(filteredMap, hasKey("obj"));
|
||||
assertThat(((Map) filteredMap.get("obj")).size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) filteredMap.get("obj")).size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) filteredMap.get("obj")), hasKey("field"));
|
||||
assertThat(filteredMap, hasKey("n_obj"));
|
||||
assertThat(((Map<String, Object>) filteredMap.get("n_obj")).size(), equalTo(1));
|
||||
|
@ -371,6 +371,7 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
assertThat(mapTuple.v2(), equalTo(filteredSource));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testNotOmittingObjectsWithExcludedProperties() throws Exception {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("obj")
|
||||
|
@ -383,7 +384,7 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
|
||||
assertThat(filteredSource.size(), equalTo(1));
|
||||
assertThat(filteredSource, hasKey("obj"));
|
||||
assertThat(((Map) filteredSource.get("obj")).size(), equalTo(0));
|
||||
assertThat(((Map<String, Object>) filteredSource.get("obj")).size(), equalTo(0));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
|
@ -403,20 +404,20 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
|
||||
assertThat(filteredSource.size(), equalTo(1));
|
||||
assertThat(filteredSource, hasKey("obj1"));
|
||||
assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(0));
|
||||
assertThat(((Map<String, Object>) filteredSource.get("obj1")).size(), equalTo(0));
|
||||
|
||||
// explicit include
|
||||
filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"obj1"}, new String[]{"*.obj2"});
|
||||
assertThat(filteredSource.size(), equalTo(1));
|
||||
assertThat(filteredSource, hasKey("obj1"));
|
||||
assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(0));
|
||||
assertThat(((Map<String, Object>) filteredSource.get("obj1")).size(), equalTo(0));
|
||||
|
||||
// wild card include
|
||||
filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"*.obj2"}, new String[]{"*.obj3"});
|
||||
assertThat(filteredSource.size(), equalTo(1));
|
||||
assertThat(filteredSource, hasKey("obj1"));
|
||||
assertThat(((Map<String, Object>) filteredSource.get("obj1")), hasKey("obj2"));
|
||||
assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0));
|
||||
assertThat(((Map<String, Object>) ((Map<String, Object>) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0));
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
|
@ -433,9 +434,9 @@ public class XContentMapValuesTests extends AbstractFilteringTestCase {
|
|||
|
||||
assertThat(filteredSource.size(), equalTo(1));
|
||||
assertThat(filteredSource, hasKey("obj1"));
|
||||
assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) filteredSource.get("obj1")).size(), equalTo(1));
|
||||
assertThat(((Map<String, Object>) filteredSource.get("obj1")), hasKey("obj2"));
|
||||
assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0));
|
||||
assertThat(((Map<String, Object>) ((Map<String, Object>) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -35,8 +35,8 @@ import org.elasticsearch.test.InternalTestCluster.RestartCallback;
|
|||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
@ -80,6 +80,7 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
|
|||
assertIndexInMetaState(masterNode, index);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testMetaWrittenWhenIndexIsClosedAndMetaUpdated() throws Exception {
|
||||
String masterNode = internalCluster().startMasterOnlyNode(Settings.EMPTY);
|
||||
final String dataNode = internalCluster().startDataOnlyNode(Settings.EMPTY);
|
||||
|
@ -108,11 +109,11 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
|
|||
.endObject()).get();
|
||||
|
||||
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get();
|
||||
assertNotNull(((LinkedHashMap) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("integer_field"));
|
||||
assertNotNull(((Map<String,?>) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("integer_field"));
|
||||
|
||||
// make sure it was also written on red node although index is closed
|
||||
ImmutableOpenMap<String, IndexMetaData> indicesMetaData = getIndicesMetaDataOnNode(dataNode);
|
||||
assertNotNull(((LinkedHashMap) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("integer_field"));
|
||||
assertNotNull(((Map<String,?>) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("integer_field"));
|
||||
assertThat(indicesMetaData.get(index).getState(), equalTo(IndexMetaData.State.CLOSE));
|
||||
|
||||
/* Try the same and see if this also works if node was just restarted.
|
||||
|
@ -133,11 +134,11 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase {
|
|||
.endObject()).get();
|
||||
|
||||
getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes("_doc").get();
|
||||
assertNotNull(((LinkedHashMap) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("float_field"));
|
||||
assertNotNull(((Map<String,?>) (getMappingsResponse.getMappings().get(index).get("_doc").getSourceAsMap().get("properties"))).get("float_field"));
|
||||
|
||||
// make sure it was also written on red node although index is closed
|
||||
indicesMetaData = getIndicesMetaDataOnNode(dataNode);
|
||||
assertNotNull(((LinkedHashMap) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field"));
|
||||
assertNotNull(((Map<String,?>) (indicesMetaData.get(index).getMappings().get("_doc").getSourceAsMap().get("properties"))).get("float_field"));
|
||||
assertThat(indicesMetaData.get(index).getState(), equalTo(IndexMetaData.State.CLOSE));
|
||||
|
||||
// finally check that meta data is also written of index opened again
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.elasticsearch.index.fielddata.plain.AbstractAtomicGeoPointFieldData;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
|
||||
/**
|
||||
|
@ -153,7 +154,7 @@ public class GeoFieldDataTests extends AbstractGeoFieldDataTestCase {
|
|||
@Override
|
||||
public void testSingleValueAllSet() throws Exception {
|
||||
fillSingleValueAllSet();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -169,7 +170,7 @@ public class GeoFieldDataTests extends AbstractGeoFieldDataTestCase {
|
|||
@Override
|
||||
public void testSingleValueWithMissing() throws Exception {
|
||||
fillSingleValueWithMissing();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -185,7 +186,7 @@ public class GeoFieldDataTests extends AbstractGeoFieldDataTestCase {
|
|||
@Override
|
||||
public void testMultiValueAllSet() throws Exception {
|
||||
fillMultiValueAllSet();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
@ -201,7 +202,7 @@ public class GeoFieldDataTests extends AbstractGeoFieldDataTestCase {
|
|||
@Override
|
||||
public void testMultiValueWithMissing() throws Exception {
|
||||
fillMultiValueWithMissing();
|
||||
IndexFieldData indexFieldData = getForField("value");
|
||||
IndexFieldData<?> indexFieldData = getForField("value");
|
||||
List<LeafReaderContext> readerContexts = refreshReader();
|
||||
for (LeafReaderContext readerContext : readerContexts) {
|
||||
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||
|
|
|
@ -43,6 +43,7 @@ import static org.hamcrest.Matchers.notNullValue;
|
|||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testMultiFields() throws Exception {
|
||||
assertAcked(
|
||||
client().admin().indices().prepareCreate("my-index")
|
||||
|
@ -53,10 +54,10 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
|
||||
assertThat(mappingMetaData, not(nullValue()));
|
||||
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
|
||||
Map titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource));
|
||||
Map<String, Object> titleFields = ((Map<String, Object>) XContentMapValues.extractValue("properties.title.fields", mappingSource));
|
||||
assertThat(titleFields.size(), equalTo(1));
|
||||
assertThat(titleFields.get("not_analyzed"), notNullValue());
|
||||
assertThat(((Map)titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword"));
|
||||
assertThat(((Map<String, Object>) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword"));
|
||||
|
||||
client().prepareIndex("my-index", "my-type", "1")
|
||||
.setSource("title", "Multi fields")
|
||||
|
@ -81,13 +82,13 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
|
||||
assertThat(mappingMetaData, not(nullValue()));
|
||||
mappingSource = mappingMetaData.sourceAsMap();
|
||||
assertThat(((Map) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2));
|
||||
titleFields = ((Map) XContentMapValues.extractValue("properties.title.fields", mappingSource));
|
||||
assertThat(((Map<String, Object>) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2));
|
||||
titleFields = ((Map<String, Object>) XContentMapValues.extractValue("properties.title.fields", mappingSource));
|
||||
assertThat(titleFields.size(), equalTo(2));
|
||||
assertThat(titleFields.get("not_analyzed"), notNullValue());
|
||||
assertThat(((Map)titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword"));
|
||||
assertThat(((Map<String, Object>) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword"));
|
||||
assertThat(titleFields.get("uncased"), notNullValue());
|
||||
assertThat(((Map)titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace"));
|
||||
assertThat(((Map<String, Object>) titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace"));
|
||||
|
||||
client().prepareIndex("my-index", "my-type", "1")
|
||||
.setSource("title", "Multi fields")
|
||||
|
@ -100,6 +101,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testGeoPointMultiField() throws Exception {
|
||||
assertAcked(
|
||||
client().admin().indices().prepareCreate("my-index")
|
||||
|
@ -110,13 +112,13 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
|
||||
assertThat(mappingMetaData, not(nullValue()));
|
||||
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
|
||||
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
|
||||
Map<String, Object> aField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a", mappingSource));
|
||||
logger.info("Keys: {}", aField.keySet());
|
||||
assertThat(aField.size(), equalTo(2));
|
||||
assertThat(aField.get("type").toString(), equalTo("geo_point"));
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
Map<String, Object> bField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
|
@ -130,6 +132,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(countResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testCompletionMultiField() throws Exception {
|
||||
assertAcked(
|
||||
client().admin().indices().prepareCreate("my-index")
|
||||
|
@ -140,12 +143,12 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
|
||||
assertThat(mappingMetaData, not(nullValue()));
|
||||
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
|
||||
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
|
||||
Map<String, Object> aField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a", mappingSource));
|
||||
assertThat(aField.size(), equalTo(6));
|
||||
assertThat(aField.get("type").toString(), equalTo("completion"));
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
Map<String, Object> bField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
|
@ -154,6 +157,7 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
assertThat(countResponse.getHits().getTotalHits(), equalTo(1L));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testIpMultiField() throws Exception {
|
||||
assertAcked(
|
||||
client().admin().indices().prepareCreate("my-index")
|
||||
|
@ -164,12 +168,12 @@ public class MultiFieldsIntegrationIT extends ESIntegTestCase {
|
|||
MappingMetaData mappingMetaData = getMappingsResponse.mappings().get("my-index").get("my-type");
|
||||
assertThat(mappingMetaData, not(nullValue()));
|
||||
Map<String, Object> mappingSource = mappingMetaData.sourceAsMap();
|
||||
Map aField = ((Map) XContentMapValues.extractValue("properties.a", mappingSource));
|
||||
Map<String, Object> aField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a", mappingSource));
|
||||
assertThat(aField.size(), equalTo(2));
|
||||
assertThat(aField.get("type").toString(), equalTo("ip"));
|
||||
assertThat(aField.get("fields"), notNullValue());
|
||||
|
||||
Map bField = ((Map) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
Map<String, Object> bField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a.fields.b", mappingSource));
|
||||
assertThat(bField.size(), equalTo(1));
|
||||
assertThat(bField.get("type").toString(), equalTo("keyword"));
|
||||
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
@ -29,6 +28,7 @@ import org.elasticsearch.search.internal.SearchContext;
|
|||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
import org.locationtech.spatial4j.shape.jts.JtsGeometry;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -73,7 +73,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
}
|
||||
|
||||
private static List<GeoPoint> randomPolygon() {
|
||||
ShapeBuilder shapeBuilder = null;
|
||||
ShapeBuilder<?, ?> shapeBuilder = null;
|
||||
// This is a temporary fix because sometimes the RandomShapeGenerator
|
||||
// returns null. This is if there is an error generating the polygon. So
|
||||
// in this case keep trying until we successfully generate one
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
|
@ -44,6 +42,7 @@ import org.elasticsearch.test.AbstractQueryTestCase;
|
|||
import org.elasticsearch.test.geo.RandomShapeGenerator;
|
||||
import org.elasticsearch.test.geo.RandomShapeGenerator.ShapeType;
|
||||
import org.junit.After;
|
||||
import org.locationtech.jts.geom.Coordinate;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -60,7 +59,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
private static String indexedShapePath;
|
||||
private static String indexedShapeIndex;
|
||||
private static String indexedShapeRouting;
|
||||
private static ShapeBuilder indexedShapeToReturn;
|
||||
private static ShapeBuilder<?, ?> indexedShapeToReturn;
|
||||
|
||||
@Override
|
||||
protected GeoShapeQueryBuilder doCreateTestQueryBuilder() {
|
||||
|
@ -68,7 +67,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
private GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) {
|
||||
ShapeType shapeType = ShapeType.randomType(random());
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
ShapeBuilder<?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
GeoShapeQueryBuilder builder;
|
||||
clearShapeFields();
|
||||
if (indexedShape == false) {
|
||||
|
@ -166,7 +165,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
|
||||
public void testNoFieldName() throws Exception {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
ShapeBuilder<?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoShapeQueryBuilder(null, shape));
|
||||
assertEquals("fieldName is required", e.getMessage());
|
||||
}
|
||||
|
@ -188,14 +187,14 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
}
|
||||
|
||||
public void testNoRelation() throws IOException {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
ShapeBuilder<?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.relation(null));
|
||||
assertEquals("No Shape Relation defined", e.getMessage());
|
||||
}
|
||||
|
||||
public void testInvalidRelation() throws IOException {
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
ShapeBuilder<?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null);
|
||||
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
|
||||
builder.strategy(SpatialStrategy.TERM);
|
||||
expectThrows(IllegalArgumentException.class, () -> builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN)));
|
||||
|
@ -266,7 +265,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
|
||||
public void testIgnoreUnmapped() throws IOException {
|
||||
ShapeType shapeType = ShapeType.randomType(random());
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
ShapeBuilder<?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
final GeoShapeQueryBuilder queryBuilder = new GeoShapeQueryBuilder("unmapped", shape);
|
||||
queryBuilder.ignoreUnmapped(true);
|
||||
Query query = queryBuilder.toQuery(createShardContext());
|
||||
|
@ -282,7 +281,7 @@ public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQue
|
|||
public void testWrongFieldType() throws IOException {
|
||||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
ShapeType shapeType = ShapeType.randomType(random());
|
||||
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
ShapeBuilder<?, ?> shape = RandomShapeGenerator.createShapeWithin(random(), null, shapeType);
|
||||
final GeoShapeQueryBuilder queryBuilder = new GeoShapeQueryBuilder(STRING_FIELD_NAME, shape);
|
||||
QueryShardException e = expectThrows(QueryShardException.class, () -> queryBuilder.toQuery(createShardContext()));
|
||||
assertThat(e.getMessage(), containsString("Field [mapped_string] is not of type [geo_shape] but of type [text]"));
|
||||
|
|
|
@ -66,7 +66,7 @@ public class RewriteableTests extends ESTestCase {
|
|||
|
||||
public void testRewriteList() throws IOException {
|
||||
QueryRewriteContext context = new QueryRewriteContext(null, null, null, null);
|
||||
List<TestRewriteable> rewriteableList = new ArrayList();
|
||||
List<TestRewriteable> rewriteableList = new ArrayList<>();
|
||||
int numInstances = randomIntBetween(1, 10);
|
||||
rewriteableList.add(new TestRewriteable(randomIntBetween(1, Rewriteable.MAX_REWRITE_ROUNDS)));
|
||||
for (int i = 0; i < numInstances; i++) {
|
||||
|
@ -103,7 +103,7 @@ public class RewriteableTests extends ESTestCase {
|
|||
this(numRewrites, fetch, null);
|
||||
}
|
||||
|
||||
TestRewriteable(int numRewrites, boolean fetch, Supplier supplier) {
|
||||
TestRewriteable(int numRewrites, boolean fetch, Supplier<Boolean> supplier) {
|
||||
this.numRewrites = numRewrites;
|
||||
this.fetch = fetch;
|
||||
this.supplier = supplier;
|
||||
|
|
|
@ -42,17 +42,17 @@ import java.util.function.Predicate;
|
|||
public abstract class ModuleTestCase extends ESTestCase {
|
||||
|
||||
/** Configures the module and asserts "clazz" is bound to "to". */
|
||||
public void assertBinding(Module module, Class to, Class clazz) {
|
||||
public void assertBinding(Module module, Class<?> to, Class<?> clazz) {
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
for (Element element : elements) {
|
||||
if (element instanceof LinkedKeyBinding) {
|
||||
LinkedKeyBinding binding = (LinkedKeyBinding) element;
|
||||
LinkedKeyBinding<?> binding = (LinkedKeyBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
assertSame(clazz, binding.getLinkedKey().getTypeLiteral().getType());
|
||||
return;
|
||||
}
|
||||
} else if (element instanceof UntargettedBinding) {
|
||||
UntargettedBinding binding = (UntargettedBinding) element;
|
||||
UntargettedBinding<?> binding = (UntargettedBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
assertSame(clazz, to);
|
||||
return;
|
||||
|
@ -67,16 +67,16 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
/** Configures the module and asserts "clazz" is not bound to anything. */
|
||||
public void assertNotBound(Module module, Class clazz) {
|
||||
public void assertNotBound(Module module, Class<?> clazz) {
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
for (Element element : elements) {
|
||||
if (element instanceof LinkedKeyBinding) {
|
||||
LinkedKeyBinding binding = (LinkedKeyBinding) element;
|
||||
LinkedKeyBinding<?> binding = (LinkedKeyBinding<?>) element;
|
||||
if (clazz.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
fail("Found binding for " + clazz.getName() + " to " + binding.getKey().getTypeLiteral().getType().getTypeName());
|
||||
}
|
||||
} else if (element instanceof UntargettedBinding) {
|
||||
UntargettedBinding binding = (UntargettedBinding) element;
|
||||
UntargettedBinding<?> binding = (UntargettedBinding<?>) element;
|
||||
if (clazz.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
fail("Found binding for " + clazz.getName());
|
||||
}
|
||||
|
@ -107,18 +107,18 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
* Configures the module and checks a Map<String, Class> of the "to" class
|
||||
* is bound to "theClass".
|
||||
*/
|
||||
public void assertMapMultiBinding(Module module, Class to, Class theClass) {
|
||||
public void assertMapMultiBinding(Module module, Class<?> to, Class<?> theClass) {
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
Set<Type> bindings = new HashSet<>();
|
||||
boolean providerFound = false;
|
||||
for (Element element : elements) {
|
||||
if (element instanceof LinkedKeyBinding) {
|
||||
LinkedKeyBinding binding = (LinkedKeyBinding) element;
|
||||
LinkedKeyBinding<?> binding = (LinkedKeyBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
bindings.add(binding.getLinkedKey().getTypeLiteral().getType());
|
||||
}
|
||||
} else if (element instanceof ProviderInstanceBinding) {
|
||||
ProviderInstanceBinding binding = (ProviderInstanceBinding) element;
|
||||
ProviderInstanceBinding<?> binding = (ProviderInstanceBinding<?>) element;
|
||||
String setType = binding.getKey().getTypeLiteral().getType().toString();
|
||||
if (setType.equals("java.util.Map<java.lang.String, " + to.getName() + ">")) {
|
||||
providerFound = true;
|
||||
|
@ -138,18 +138,18 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
* is bound to "classes". There may be more classes bound
|
||||
* to "to" than just "classes".
|
||||
*/
|
||||
public void assertSetMultiBinding(Module module, Class to, Class... classes) {
|
||||
public void assertSetMultiBinding(Module module, Class<?> to, Class<?>... classes) {
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
Set<Type> bindings = new HashSet<>();
|
||||
boolean providerFound = false;
|
||||
for (Element element : elements) {
|
||||
if (element instanceof LinkedKeyBinding) {
|
||||
LinkedKeyBinding binding = (LinkedKeyBinding) element;
|
||||
LinkedKeyBinding<?> binding = (LinkedKeyBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
bindings.add(binding.getLinkedKey().getTypeLiteral().getType());
|
||||
}
|
||||
} else if (element instanceof ProviderInstanceBinding) {
|
||||
ProviderInstanceBinding binding = (ProviderInstanceBinding) element;
|
||||
ProviderInstanceBinding<?> binding = (ProviderInstanceBinding<?>) element;
|
||||
String setType = binding.getKey().getTypeLiteral().getType().toString();
|
||||
if (setType.equals("java.util.Set<" + to.getName() + ">")) {
|
||||
providerFound = true;
|
||||
|
@ -157,7 +157,7 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
for (Class clazz : classes) {
|
||||
for (Class<?> clazz : classes) {
|
||||
if (bindings.contains(clazz) == false) {
|
||||
fail("Expected to find " + clazz.getName() + " as set binding to " + to.getName() + ", found these classes:\n" + bindings);
|
||||
}
|
||||
|
@ -180,12 +180,12 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
List<Element> elements = Elements.getElements(module);
|
||||
for (Element element : elements) {
|
||||
if (element instanceof InstanceBinding) {
|
||||
InstanceBinding binding = (InstanceBinding) element;
|
||||
InstanceBinding<?> binding = (InstanceBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
return to.cast(binding.getInstance());
|
||||
}
|
||||
} else if (element instanceof ProviderInstanceBinding) {
|
||||
ProviderInstanceBinding binding = (ProviderInstanceBinding) element;
|
||||
ProviderInstanceBinding<?> binding = (ProviderInstanceBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
return to.cast(binding.getProviderInstance().get());
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
List<Element> elements = Elements.getElements(module);
|
||||
for (Element element : elements) {
|
||||
if (element instanceof InstanceBinding) {
|
||||
InstanceBinding binding = (InstanceBinding) element;
|
||||
InstanceBinding<?> binding = (InstanceBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
if (annotation == null || annotation.equals(binding.getKey().getAnnotationType())) {
|
||||
assertTrue(tester.test(to.cast(binding.getInstance())));
|
||||
|
@ -211,7 +211,7 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
} else if (element instanceof ProviderInstanceBinding) {
|
||||
ProviderInstanceBinding binding = (ProviderInstanceBinding) element;
|
||||
ProviderInstanceBinding<?> binding = (ProviderInstanceBinding<?>) element;
|
||||
if (to.equals(binding.getKey().getTypeLiteral().getType())) {
|
||||
assertTrue(tester.test(to.cast(binding.getProviderInstance().get())));
|
||||
return;
|
||||
|
@ -232,27 +232,27 @@ public abstract class ModuleTestCase extends ESTestCase {
|
|||
@SuppressWarnings("unchecked")
|
||||
public <K, V> void assertMapInstanceBinding(Module module, Class<K> keyType, Class<V> valueType, Map<K, V> expected) throws Exception {
|
||||
// this method is insane because java type erasure makes it incredibly difficult...
|
||||
Map<K, Key> keys = new HashMap<>();
|
||||
Map<Key, V> values = new HashMap<>();
|
||||
Map<K, Key<?>> keys = new HashMap<>();
|
||||
Map<Key<?>, V> values = new HashMap<>();
|
||||
List<Element> elements = Elements.getElements(module);
|
||||
for (Element element : elements) {
|
||||
if (element instanceof InstanceBinding) {
|
||||
InstanceBinding binding = (InstanceBinding) element;
|
||||
InstanceBinding<?> binding = (InstanceBinding<?>) element;
|
||||
if (binding.getKey().getRawType().equals(valueType)) {
|
||||
values.put(binding.getKey(), (V) binding.getInstance());
|
||||
} else if (binding.getInstance() instanceof Map.Entry) {
|
||||
Map.Entry entry = (Map.Entry) binding.getInstance();
|
||||
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) binding.getInstance();
|
||||
Object key = entry.getKey();
|
||||
Object providerValue = entry.getValue();
|
||||
if (key.getClass().equals(keyType) && providerValue instanceof ProviderLookup.ProviderImpl) {
|
||||
ProviderLookup.ProviderImpl provider = (ProviderLookup.ProviderImpl) providerValue;
|
||||
ProviderLookup.ProviderImpl<?> provider = (ProviderLookup.ProviderImpl<?>) providerValue;
|
||||
keys.put((K) key, provider.getKey());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (Map.Entry<K, V> entry : expected.entrySet()) {
|
||||
Key valueKey = keys.get(entry.getKey());
|
||||
Key<?> valueKey = keys.get(entry.getKey());
|
||||
assertNotNull("Could not find binding for key [" + entry.getKey() + "], found these keys:\n" + keys.keySet(), valueKey);
|
||||
V value = values.get(valueKey);
|
||||
assertNotNull("Could not find value for instance key [" + valueKey + "], found these bindings:\n" + elements);
|
||||
|
|
|
@ -67,8 +67,8 @@ public class DatafeedJobBuilderTests extends ESTestCase {
|
|||
}).when(jobProvider).dataCounts(any(), any(), any());
|
||||
|
||||
doAnswer(invocationOnMock -> {
|
||||
@SuppressWarnings("rawtypes")
|
||||
Consumer consumer = (Consumer) invocationOnMock.getArguments()[3];
|
||||
@SuppressWarnings("unchecked")
|
||||
Consumer<ResourceNotFoundException> consumer = (Consumer<ResourceNotFoundException>) invocationOnMock.getArguments()[3];
|
||||
consumer.accept(new ResourceNotFoundException("dummy"));
|
||||
return null;
|
||||
}).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any());
|
||||
|
@ -153,8 +153,8 @@ public class DatafeedJobBuilderTests extends ESTestCase {
|
|||
|
||||
Exception error = new RuntimeException("error");
|
||||
doAnswer(invocationOnMock -> {
|
||||
@SuppressWarnings("rawtypes")
|
||||
Consumer consumer = (Consumer) invocationOnMock.getArguments()[3];
|
||||
@SuppressWarnings("unchecked")
|
||||
Consumer<Exception> consumer = (Consumer<Exception>) invocationOnMock.getArguments()[3];
|
||||
consumer.accept(error);
|
||||
return null;
|
||||
}).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any());
|
||||
|
@ -176,11 +176,11 @@ public class DatafeedJobBuilderTests extends ESTestCase {
|
|||
}).when(jobProvider).dataCounts(any(), any(), any());
|
||||
|
||||
doAnswer(invocationOnMock -> {
|
||||
@SuppressWarnings("rawtypes")
|
||||
Consumer consumer = (Consumer) invocationOnMock.getArguments()[2];
|
||||
@SuppressWarnings("unchecked")
|
||||
Consumer<QueryPage<Bucket>> consumer = (Consumer<QueryPage<Bucket>>) invocationOnMock.getArguments()[2];
|
||||
Bucket bucket = mock(Bucket.class);
|
||||
when(bucket.getTimestamp()).thenReturn(new Date(latestBucketTimestamp));
|
||||
QueryPage<Bucket> bucketQueryPage = new QueryPage(Collections.singletonList(bucket), 1, Bucket.RESULTS_FIELD);
|
||||
QueryPage<Bucket> bucketQueryPage = new QueryPage<Bucket>(Collections.singletonList(bucket), 1, Bucket.RESULTS_FIELD);
|
||||
consumer.accept(bucketQueryPage);
|
||||
return null;
|
||||
}).when(jobProvider).bucketsViaInternalClient(any(), any(), any(), any());
|
||||
|
|
Loading…
Reference in New Issue