mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Merge branch 'master' into tests/switch_to_random_value_other_than_for_sort
This commit is contained in:
commit
cf1d0d5935
@ -21,6 +21,7 @@ import com.bmuschko.gradle.nexus.NexusPlugin
|
||||
import org.eclipse.jgit.lib.Repository
|
||||
import org.eclipse.jgit.lib.RepositoryBuilder
|
||||
import org.gradle.plugins.ide.eclipse.model.SourceFolder
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
|
||||
// common maven publishing configuration
|
||||
subprojects {
|
||||
@ -249,6 +250,9 @@ allprojects {
|
||||
// Name all the non-root projects after their path so that paths get grouped together when imported into eclipse.
|
||||
if (path != ':') {
|
||||
eclipse.project.name = path
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
eclipse.project.name = eclipse.project.name.replace(':', '_')
|
||||
}
|
||||
}
|
||||
|
||||
plugins.withType(JavaBasePlugin) {
|
||||
|
@ -355,6 +355,11 @@ class BuildPlugin implements Plugin<Project> {
|
||||
}
|
||||
options.encoding = 'UTF-8'
|
||||
//options.incremental = true
|
||||
|
||||
// gradle ignores target/source compatibility when it is "unnecessary", but since to compile with
|
||||
// java 9, gradle is running in java 8, it incorrectly thinks it is unnecessary
|
||||
assert minimumJava == JavaVersion.VERSION_1_8
|
||||
options.compilerArgs << '-target' << '1.8' << '-source' << '1.8'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,11 +57,13 @@ class PluginPropertiesTask extends Copy {
|
||||
// configure property substitution
|
||||
from(templateFile)
|
||||
into(generatedResourcesDir)
|
||||
expand(generateSubstitutions())
|
||||
Map<String, String> properties = generateSubstitutions()
|
||||
expand(properties)
|
||||
inputs.properties(properties)
|
||||
}
|
||||
}
|
||||
|
||||
Map generateSubstitutions() {
|
||||
Map<String, String> generateSubstitutions() {
|
||||
def stringSnap = { version ->
|
||||
if (version.endsWith("-SNAPSHOT")) {
|
||||
return version.substring(0, version.length() - 9)
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.cluster.allocation;
|
||||
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
@ -32,7 +31,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@ -45,21 +43,18 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
||||
private final ShardId shard;
|
||||
private final boolean primary;
|
||||
private final String assignedNodeId;
|
||||
private final Map<DiscoveryNode, Decision> nodeToDecision;
|
||||
private final Map<DiscoveryNode, Float> nodeWeights;
|
||||
private final UnassignedInfo unassignedInfo;
|
||||
private final long remainingDelayNanos;
|
||||
private final long remainingDelayMillis;
|
||||
private final Map<DiscoveryNode, NodeExplanation> nodeExplanations;
|
||||
|
||||
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId,
|
||||
UnassignedInfo unassignedInfo, Map<DiscoveryNode, Decision> nodeToDecision,
|
||||
Map<DiscoveryNode, Float> nodeWeights, long remainingDelayNanos) {
|
||||
public ClusterAllocationExplanation(ShardId shard, boolean primary, @Nullable String assignedNodeId, long remainingDelayMillis,
|
||||
@Nullable UnassignedInfo unassignedInfo, Map<DiscoveryNode, NodeExplanation> nodeExplanations) {
|
||||
this.shard = shard;
|
||||
this.primary = primary;
|
||||
this.assignedNodeId = assignedNodeId;
|
||||
this.unassignedInfo = unassignedInfo;
|
||||
this.nodeToDecision = nodeToDecision == null ? Collections.emptyMap() : nodeToDecision;
|
||||
this.nodeWeights = nodeWeights == null ? Collections.emptyMap() : nodeWeights;
|
||||
this.remainingDelayNanos = remainingDelayNanos;
|
||||
this.remainingDelayMillis = remainingDelayMillis;
|
||||
this.nodeExplanations = nodeExplanations;
|
||||
}
|
||||
|
||||
public ClusterAllocationExplanation(StreamInput in) throws IOException {
|
||||
@ -67,27 +62,15 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
||||
this.primary = in.readBoolean();
|
||||
this.assignedNodeId = in.readOptionalString();
|
||||
this.unassignedInfo = in.readOptionalWriteable(UnassignedInfo::new);
|
||||
this.remainingDelayMillis = in.readVLong();
|
||||
|
||||
Map<DiscoveryNode, Decision> ntd = null;
|
||||
int size = in.readVInt();
|
||||
ntd = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
DiscoveryNode dn = new DiscoveryNode(in);
|
||||
Decision decision = Decision.readFrom(in);
|
||||
ntd.put(dn, decision);
|
||||
int mapSize = in.readVInt();
|
||||
Map<DiscoveryNode, NodeExplanation> nodeToExplanation = new HashMap<>(mapSize);
|
||||
for (int i = 0; i < mapSize; i++) {
|
||||
NodeExplanation nodeExplanation = new NodeExplanation(in);
|
||||
nodeToExplanation.put(nodeExplanation.getNode(), nodeExplanation);
|
||||
}
|
||||
this.nodeToDecision = ntd;
|
||||
|
||||
Map<DiscoveryNode, Float> ntw = null;
|
||||
size = in.readVInt();
|
||||
ntw = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
DiscoveryNode dn = new DiscoveryNode(in);
|
||||
float weight = in.readFloat();
|
||||
ntw.put(dn, weight);
|
||||
}
|
||||
this.nodeWeights = ntw;
|
||||
remainingDelayNanos = in.readVLong();
|
||||
this.nodeExplanations = nodeToExplanation;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -96,27 +79,20 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
||||
out.writeBoolean(this.isPrimary());
|
||||
out.writeOptionalString(this.getAssignedNodeId());
|
||||
out.writeOptionalWriteable(this.getUnassignedInfo());
|
||||
out.writeVLong(remainingDelayMillis);
|
||||
|
||||
Map<DiscoveryNode, Decision> ntd = this.getNodeDecisions();
|
||||
out.writeVInt(ntd.size());
|
||||
for (Map.Entry<DiscoveryNode, Decision> entry : ntd.entrySet()) {
|
||||
entry.getKey().writeTo(out);
|
||||
Decision.writeTo(entry.getValue(), out);
|
||||
out.writeVInt(this.nodeExplanations.size());
|
||||
for (NodeExplanation explanation : this.nodeExplanations.values()) {
|
||||
explanation.writeTo(out);
|
||||
}
|
||||
Map<DiscoveryNode, Float> ntw = this.getNodeWeights();
|
||||
out.writeVInt(ntw.size());
|
||||
for (Map.Entry<DiscoveryNode, Float> entry : ntw.entrySet()) {
|
||||
entry.getKey().writeTo(out);
|
||||
out.writeFloat(entry.getValue());
|
||||
}
|
||||
out.writeVLong(remainingDelayNanos);
|
||||
}
|
||||
|
||||
|
||||
/** Return the shard that the explanation is about */
|
||||
public ShardId getShard() {
|
||||
return this.shard;
|
||||
}
|
||||
|
||||
/** Return true if the explained shard is primary, false otherwise */
|
||||
public boolean isPrimary() {
|
||||
return this.primary;
|
||||
}
|
||||
@ -138,22 +114,14 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
||||
return this.unassignedInfo;
|
||||
}
|
||||
|
||||
/** Return a map of node to decision for shard allocation */
|
||||
public Map<DiscoveryNode, Decision> getNodeDecisions() {
|
||||
return this.nodeToDecision;
|
||||
/** Return the remaining allocation delay for this shard in millisocends */
|
||||
public long getRemainingDelayMillis() {
|
||||
return this.remainingDelayMillis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a map of node to balancer "weight" for allocation. Higher weights mean the balancer wants to allocated the shard to that node
|
||||
* more
|
||||
*/
|
||||
public Map<DiscoveryNode, Float> getNodeWeights() {
|
||||
return this.nodeWeights;
|
||||
}
|
||||
|
||||
/** Return the remaining allocation delay for this shard in nanoseconds */
|
||||
public long getRemainingDelayNanos() {
|
||||
return this.remainingDelayNanos;
|
||||
/** Return a map of node to the explanation for that node */
|
||||
public Map<DiscoveryNode, NodeExplanation> getNodeExplanations() {
|
||||
return this.nodeExplanations;
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
@ -174,36 +142,118 @@ public final class ClusterAllocationExplanation implements ToXContent, Writeable
|
||||
if (unassignedInfo != null) {
|
||||
unassignedInfo.toXContent(builder, params);
|
||||
long delay = unassignedInfo.getLastComputedLeftDelayNanos();
|
||||
builder.field("allocation_delay", TimeValue.timeValueNanos(delay));
|
||||
builder.field("allocation_delay_ms", TimeValue.timeValueNanos(delay).millis());
|
||||
builder.field("remaining_delay", TimeValue.timeValueNanos(remainingDelayNanos));
|
||||
builder.field("remaining_delay_ms", TimeValue.timeValueNanos(remainingDelayNanos).millis());
|
||||
builder.timeValueField("allocation_delay_in_millis", "allocation_delay", TimeValue.timeValueNanos(delay));
|
||||
builder.timeValueField("remaining_delay_in_millis", "remaining_delay", TimeValue.timeValueMillis(remainingDelayMillis));
|
||||
}
|
||||
builder.startObject("nodes");
|
||||
for (Map.Entry<DiscoveryNode, Float> entry : nodeWeights.entrySet()) {
|
||||
DiscoveryNode node = entry.getKey();
|
||||
builder.startObject(node.getId()); {
|
||||
builder.field("node_name", node.getName());
|
||||
builder.startObject("node_attributes"); {
|
||||
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
|
||||
builder.field(attrEntry.getKey(), attrEntry.getValue());
|
||||
}
|
||||
}
|
||||
builder.endObject(); // end attributes
|
||||
Decision d = nodeToDecision.get(node);
|
||||
if (node.getId().equals(assignedNodeId)) {
|
||||
builder.field("final_decision", "CURRENTLY_ASSIGNED");
|
||||
} else {
|
||||
builder.field("final_decision", d.type().toString());
|
||||
}
|
||||
builder.field("weight", entry.getValue());
|
||||
d.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject(); // end node <uuid>
|
||||
for (NodeExplanation explanation : nodeExplanations.values()) {
|
||||
explanation.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject(); // end nodes
|
||||
}
|
||||
builder.endObject(); // end wrapping object
|
||||
return builder;
|
||||
}
|
||||
|
||||
/** An Enum representing the final decision for a shard allocation on a node */
|
||||
public enum FinalDecision {
|
||||
// Yes, the shard can be assigned
|
||||
YES((byte) 0),
|
||||
// No, the shard cannot be assigned
|
||||
NO((byte) 1),
|
||||
// The shard is already assigned to this node
|
||||
ALREADY_ASSIGNED((byte) 2);
|
||||
|
||||
private final byte id;
|
||||
|
||||
FinalDecision (byte id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
private static FinalDecision fromId(byte id) {
|
||||
switch (id) {
|
||||
case 0: return YES;
|
||||
case 1: return NO;
|
||||
case 2: return ALREADY_ASSIGNED;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown id for final decision: [" + id + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
switch (id) {
|
||||
case 0: return "YES";
|
||||
case 1: return "NO";
|
||||
case 2: return "ALREADY_ASSIGNED";
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown id for final decision: [" + id + "]");
|
||||
}
|
||||
}
|
||||
|
||||
static FinalDecision readFrom(StreamInput in) throws IOException {
|
||||
return fromId(in.readByte());
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte(id);
|
||||
}
|
||||
}
|
||||
|
||||
/** An Enum representing the state of the shard store's copy of the data on a node */
|
||||
public enum StoreCopy {
|
||||
// No data for this shard is on the node
|
||||
NONE((byte) 0),
|
||||
// A copy of the data is available on this node
|
||||
AVAILABLE((byte) 1),
|
||||
// The copy of the data on the node is corrupt
|
||||
CORRUPT((byte) 2),
|
||||
// There was an error reading this node's copy of the data
|
||||
IO_ERROR((byte) 3),
|
||||
// The copy of the data on the node is stale
|
||||
STALE((byte) 4),
|
||||
// It's unknown what the copy of the data is
|
||||
UNKNOWN((byte) 5);
|
||||
|
||||
private final byte id;
|
||||
|
||||
StoreCopy (byte id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
private static StoreCopy fromId(byte id) {
|
||||
switch (id) {
|
||||
case 0: return NONE;
|
||||
case 1: return AVAILABLE;
|
||||
case 2: return CORRUPT;
|
||||
case 3: return IO_ERROR;
|
||||
case 4: return STALE;
|
||||
case 5: return UNKNOWN;
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown id for store copy: [" + id + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
switch (id) {
|
||||
case 0: return "NONE";
|
||||
case 1: return "AVAILABLE";
|
||||
case 2: return "CORRUPT";
|
||||
case 3: return "IO_ERROR";
|
||||
case 4: return "STALE";
|
||||
case 5: return "UNKNOWN";
|
||||
default:
|
||||
throw new IllegalArgumentException("unknown id for store copy: [" + id + "]");
|
||||
}
|
||||
}
|
||||
|
||||
static StoreCopy readFrom(StreamInput in) throws IOException {
|
||||
return fromId(in.readByte());
|
||||
}
|
||||
|
||||
void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.allocation;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
/** The cluster allocation explanation for a single node */
|
||||
public class NodeExplanation implements Writeable, ToXContent {
|
||||
private final DiscoveryNode node;
|
||||
private final Decision nodeDecision;
|
||||
private final Float nodeWeight;
|
||||
private final IndicesShardStoresResponse.StoreStatus storeStatus;
|
||||
private final ClusterAllocationExplanation.FinalDecision finalDecision;
|
||||
private final ClusterAllocationExplanation.StoreCopy storeCopy;
|
||||
private final String finalExplanation;
|
||||
|
||||
public NodeExplanation(final DiscoveryNode node, final Decision nodeDecision, final Float nodeWeight,
|
||||
final @Nullable IndicesShardStoresResponse.StoreStatus storeStatus,
|
||||
final ClusterAllocationExplanation.FinalDecision finalDecision,
|
||||
final String finalExplanation,
|
||||
final ClusterAllocationExplanation.StoreCopy storeCopy) {
|
||||
this.node = node;
|
||||
this.nodeDecision = nodeDecision;
|
||||
this.nodeWeight = nodeWeight;
|
||||
this.storeStatus = storeStatus;
|
||||
this.finalDecision = finalDecision;
|
||||
this.finalExplanation = finalExplanation;
|
||||
this.storeCopy = storeCopy;
|
||||
}
|
||||
|
||||
public NodeExplanation(StreamInput in) throws IOException {
|
||||
this.node = new DiscoveryNode(in);
|
||||
this.nodeDecision = Decision.readFrom(in);
|
||||
this.nodeWeight = in.readFloat();
|
||||
if (in.readBoolean()) {
|
||||
this.storeStatus = IndicesShardStoresResponse.StoreStatus.readStoreStatus(in);
|
||||
} else {
|
||||
this.storeStatus = null;
|
||||
}
|
||||
this.finalDecision = ClusterAllocationExplanation.FinalDecision.readFrom(in);
|
||||
this.finalExplanation = in.readString();
|
||||
this.storeCopy = ClusterAllocationExplanation.StoreCopy.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
node.writeTo(out);
|
||||
Decision.writeTo(nodeDecision, out);
|
||||
out.writeFloat(nodeWeight);
|
||||
if (storeStatus == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
storeStatus.writeTo(out);
|
||||
}
|
||||
finalDecision.writeTo(out);
|
||||
out.writeString(finalExplanation);
|
||||
storeCopy.writeTo(out);
|
||||
}
|
||||
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(node.getId()); {
|
||||
builder.field("node_name", node.getName());
|
||||
builder.startObject("node_attributes"); {
|
||||
for (Map.Entry<String, String> attrEntry : node.getAttributes().entrySet()) {
|
||||
builder.field(attrEntry.getKey(), attrEntry.getValue());
|
||||
}
|
||||
}
|
||||
builder.endObject(); // end attributes
|
||||
builder.startObject("store"); {
|
||||
builder.field("shard_copy", storeCopy.toString());
|
||||
if (storeStatus != null) {
|
||||
final Throwable storeErr = storeStatus.getStoreException();
|
||||
if (storeErr != null) {
|
||||
builder.field("store_exception", ExceptionsHelper.detailedMessage(storeErr));
|
||||
}
|
||||
}
|
||||
}
|
||||
builder.endObject(); // end store
|
||||
builder.field("final_decision", finalDecision.toString());
|
||||
builder.field("final_explanation", finalExplanation.toString());
|
||||
builder.field("weight", nodeWeight);
|
||||
nodeDecision.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject(); // end node <uuid>
|
||||
return builder;
|
||||
}
|
||||
|
||||
public DiscoveryNode getNode() {
|
||||
return this.node;
|
||||
}
|
||||
|
||||
public Decision getDecision() {
|
||||
return this.nodeDecision;
|
||||
}
|
||||
|
||||
public Float getWeight() {
|
||||
return this.nodeWeight;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public IndicesShardStoresResponse.StoreStatus getStoreStatus() {
|
||||
return this.storeStatus;
|
||||
}
|
||||
|
||||
public ClusterAllocationExplanation.FinalDecision getFinalDecision() {
|
||||
return this.finalDecision;
|
||||
}
|
||||
|
||||
public String getFinalExplanation() {
|
||||
return this.finalExplanation;
|
||||
}
|
||||
|
||||
public ClusterAllocationExplanation.StoreCopy getStoreCopy() {
|
||||
return this.storeCopy;
|
||||
}
|
||||
}
|
@ -20,8 +20,13 @@
|
||||
package org.elasticsearch.action.admin.cluster.allocation;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresRequest;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
|
||||
import org.elasticsearch.action.admin.indices.shards.TransportIndicesShardStoresAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
|
||||
import org.elasticsearch.cluster.ClusterInfoService;
|
||||
@ -47,8 +52,10 @@ import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenIntMap;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
@ -56,6 +63,7 @@ import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* The {@code TransportClusterAllocationExplainAction} is responsible for actually executing the explanation of a shard's allocation on the
|
||||
@ -68,19 +76,22 @@ public class TransportClusterAllocationExplainAction
|
||||
private final ClusterInfoService clusterInfoService;
|
||||
private final AllocationDeciders allocationDeciders;
|
||||
private final ShardsAllocator shardAllocator;
|
||||
private final TransportIndicesShardStoresAction shardStoresAction;
|
||||
|
||||
@Inject
|
||||
public TransportClusterAllocationExplainAction(Settings settings, TransportService transportService, ClusterService clusterService,
|
||||
ThreadPool threadPool, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
AllocationService allocationService, ClusterInfoService clusterInfoService,
|
||||
AllocationDeciders allocationDeciders, ShardsAllocator shardAllocator) {
|
||||
AllocationDeciders allocationDeciders, ShardsAllocator shardAllocator,
|
||||
TransportIndicesShardStoresAction shardStoresAction) {
|
||||
super(settings, ClusterAllocationExplainAction.NAME, transportService, clusterService, threadPool, actionFilters,
|
||||
indexNameExpressionResolver, ClusterAllocationExplainRequest::new);
|
||||
this.allocationService = allocationService;
|
||||
this.clusterInfoService = clusterInfoService;
|
||||
this.allocationDeciders = allocationDeciders;
|
||||
this.shardAllocator = shardAllocator;
|
||||
this.shardStoresAction = shardStoresAction;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -118,12 +129,86 @@ public class TransportClusterAllocationExplainAction
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a {@code NodeExplanation} object for the given shard given all the metadata. This also attempts to construct the human
|
||||
* readable FinalDecision and final explanation as part of the explanation.
|
||||
*/
|
||||
public static NodeExplanation calculateNodeExplanation(ShardRouting shard,
|
||||
IndexMetaData indexMetaData,
|
||||
DiscoveryNode node,
|
||||
Decision nodeDecision,
|
||||
Float nodeWeight,
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus,
|
||||
String assignedNodeId,
|
||||
Set<String> activeAllocationIds) {
|
||||
final ClusterAllocationExplanation.FinalDecision finalDecision;
|
||||
final ClusterAllocationExplanation.StoreCopy storeCopy;
|
||||
final String finalExplanation;
|
||||
|
||||
if (storeStatus == null) {
|
||||
// No copies of the data
|
||||
storeCopy = ClusterAllocationExplanation.StoreCopy.NONE;
|
||||
} else {
|
||||
final Throwable storeErr = storeStatus.getStoreException();
|
||||
if (storeErr != null) {
|
||||
if (ExceptionsHelper.unwrapCause(storeErr) instanceof CorruptIndexException) {
|
||||
storeCopy = ClusterAllocationExplanation.StoreCopy.CORRUPT;
|
||||
} else {
|
||||
storeCopy = ClusterAllocationExplanation.StoreCopy.IO_ERROR;
|
||||
}
|
||||
} else if (activeAllocationIds.isEmpty()) {
|
||||
// The ids are only empty if dealing with a legacy index
|
||||
// TODO: fetch the shard state versions and display here?
|
||||
storeCopy = ClusterAllocationExplanation.StoreCopy.UNKNOWN;
|
||||
} else if (activeAllocationIds.contains(storeStatus.getAllocationId())) {
|
||||
storeCopy = ClusterAllocationExplanation.StoreCopy.AVAILABLE;
|
||||
} else {
|
||||
// Otherwise, this is a stale copy of the data (allocation ids don't match)
|
||||
storeCopy = ClusterAllocationExplanation.StoreCopy.STALE;
|
||||
}
|
||||
}
|
||||
|
||||
if (node.getId().equals(assignedNodeId)) {
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED;
|
||||
finalExplanation = "the shard is already assigned to this node";
|
||||
} else if (shard.primary() && shard.unassigned() && shard.allocatedPostIndexCreate(indexMetaData) &&
|
||||
storeCopy == ClusterAllocationExplanation.StoreCopy.STALE) {
|
||||
finalExplanation = "the copy of the shard is stale, allocation ids do not match";
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
|
||||
} else if (shard.primary() && shard.unassigned() && shard.allocatedPostIndexCreate(indexMetaData) &&
|
||||
storeCopy == ClusterAllocationExplanation.StoreCopy.NONE) {
|
||||
finalExplanation = "there is no copy of the shard available";
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
|
||||
} else if (shard.primary() && shard.unassigned() && storeCopy == ClusterAllocationExplanation.StoreCopy.CORRUPT) {
|
||||
finalExplanation = "the copy of the shard is corrupt";
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
|
||||
} else if (shard.primary() && shard.unassigned() && storeCopy == ClusterAllocationExplanation.StoreCopy.IO_ERROR) {
|
||||
finalExplanation = "the copy of the shard cannot be read";
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
|
||||
} else {
|
||||
if (nodeDecision.type() == Decision.Type.NO) {
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.NO;
|
||||
finalExplanation = "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision";
|
||||
} else {
|
||||
finalDecision = ClusterAllocationExplanation.FinalDecision.YES;
|
||||
if (storeCopy == ClusterAllocationExplanation.StoreCopy.AVAILABLE) {
|
||||
finalExplanation = "the shard can be assigned and the node contains a valid copy of the shard data";
|
||||
} else {
|
||||
finalExplanation = "the shard can be assigned";
|
||||
}
|
||||
}
|
||||
}
|
||||
return new NodeExplanation(node, nodeDecision, nodeWeight, storeStatus, finalDecision, finalExplanation, storeCopy);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* For the given {@code ShardRouting}, return the explanation of the allocation for that shard on all nodes. If {@code
|
||||
* includeYesDecisions} is true, returns all decisions, otherwise returns only 'NO' and 'THROTTLE' decisions.
|
||||
*/
|
||||
public static ClusterAllocationExplanation explainShard(ShardRouting shard, RoutingAllocation allocation, RoutingNodes routingNodes,
|
||||
boolean includeYesDecisions, ShardsAllocator shardAllocator) {
|
||||
boolean includeYesDecisions, ShardsAllocator shardAllocator,
|
||||
List<IndicesShardStoresResponse.StoreStatus> shardStores) {
|
||||
// don't short circuit deciders, we want a full explanation
|
||||
allocation.debugDecision(true);
|
||||
// get the existing unassigned info if available
|
||||
@ -139,14 +224,35 @@ public class TransportClusterAllocationExplainAction
|
||||
nodeToDecision.put(discoNode, d);
|
||||
}
|
||||
}
|
||||
long remainingDelayNanos = 0;
|
||||
long remainingDelayMillis = 0;
|
||||
final MetaData metadata = allocation.metaData();
|
||||
final IndexMetaData indexMetaData = metadata.index(shard.index());
|
||||
if (ui != null) {
|
||||
final MetaData metadata = allocation.metaData();
|
||||
final Settings indexSettings = metadata.index(shard.index()).getSettings();
|
||||
remainingDelayNanos = ui.getRemainingDelay(System.nanoTime(), metadata.settings(), indexSettings);
|
||||
final Settings indexSettings = indexMetaData.getSettings();
|
||||
long remainingDelayNanos = ui.getRemainingDelay(System.nanoTime(), metadata.settings(), indexSettings);
|
||||
remainingDelayMillis = TimeValue.timeValueNanos(remainingDelayNanos).millis();
|
||||
}
|
||||
return new ClusterAllocationExplanation(shard.shardId(), shard.primary(), shard.currentNodeId(), ui, nodeToDecision,
|
||||
shardAllocator.weighShard(allocation, shard), remainingDelayNanos);
|
||||
|
||||
// Calculate weights for each of the nodes
|
||||
Map<DiscoveryNode, Float> weights = shardAllocator.weighShard(allocation, shard);
|
||||
|
||||
Map<DiscoveryNode, IndicesShardStoresResponse.StoreStatus> nodeToStatus = new HashMap<>(shardStores.size());
|
||||
for (IndicesShardStoresResponse.StoreStatus status : shardStores) {
|
||||
nodeToStatus.put(status.getNode(), status);
|
||||
}
|
||||
|
||||
Map<DiscoveryNode, NodeExplanation> explanations = new HashMap<>(shardStores.size());
|
||||
for (Map.Entry<DiscoveryNode, Decision> entry : nodeToDecision.entrySet()) {
|
||||
DiscoveryNode node = entry.getKey();
|
||||
Decision decision = entry.getValue();
|
||||
Float weight = weights.get(node);
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = nodeToStatus.get(node);
|
||||
NodeExplanation nodeExplanation = calculateNodeExplanation(shard, indexMetaData, node, decision, weight,
|
||||
storeStatus, shard.currentNodeId(), indexMetaData.activeAllocationIds(shard.getId()));
|
||||
explanations.put(node, nodeExplanation);
|
||||
}
|
||||
return new ClusterAllocationExplanation(shard.shardId(), shard.primary(),
|
||||
shard.currentNodeId(), remainingDelayMillis, ui, explanations);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -156,30 +262,30 @@ public class TransportClusterAllocationExplainAction
|
||||
final RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, routingNodes, state.nodes(),
|
||||
clusterInfoService.getClusterInfo(), System.nanoTime());
|
||||
|
||||
ShardRouting shardRouting = null;
|
||||
ShardRouting foundShard = null;
|
||||
if (request.useAnyUnassignedShard()) {
|
||||
// If we can use any shard, just pick the first unassigned one (if there are any)
|
||||
RoutingNodes.UnassignedShards.UnassignedIterator ui = routingNodes.unassigned().iterator();
|
||||
if (ui.hasNext()) {
|
||||
shardRouting = ui.next();
|
||||
foundShard = ui.next();
|
||||
}
|
||||
} else {
|
||||
String index = request.getIndex();
|
||||
int shard = request.getShard();
|
||||
if (request.isPrimary()) {
|
||||
// If we're looking for the primary shard, there's only one copy, so pick it directly
|
||||
shardRouting = allocation.routingTable().shardRoutingTable(index, shard).primaryShard();
|
||||
foundShard = allocation.routingTable().shardRoutingTable(index, shard).primaryShard();
|
||||
} else {
|
||||
// If looking for a replica, go through all the replica shards
|
||||
List<ShardRouting> replicaShardRoutings = allocation.routingTable().shardRoutingTable(index, shard).replicaShards();
|
||||
if (replicaShardRoutings.size() > 0) {
|
||||
// Pick the first replica at the very least
|
||||
shardRouting = replicaShardRoutings.get(0);
|
||||
foundShard = replicaShardRoutings.get(0);
|
||||
// In case there are multiple replicas where some are assigned and some aren't,
|
||||
// try to find one that is unassigned at least
|
||||
for (ShardRouting replica : replicaShardRoutings) {
|
||||
if (replica.unassigned()) {
|
||||
shardRouting = replica;
|
||||
foundShard = replica;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -187,14 +293,34 @@ public class TransportClusterAllocationExplainAction
|
||||
}
|
||||
}
|
||||
|
||||
if (shardRouting == null) {
|
||||
if (foundShard == null) {
|
||||
listener.onFailure(new ElasticsearchException("unable to find any shards to explain [{}] in the routing table", request));
|
||||
return;
|
||||
}
|
||||
final ShardRouting shardRouting = foundShard;
|
||||
logger.debug("explaining the allocation for [{}], found shard [{}]", request, shardRouting);
|
||||
|
||||
ClusterAllocationExplanation cae = explainShard(shardRouting, allocation, routingNodes,
|
||||
request.includeYesDecisions(), shardAllocator);
|
||||
listener.onResponse(new ClusterAllocationExplainResponse(cae));
|
||||
getShardStores(shardRouting, new ActionListener<IndicesShardStoresResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndicesShardStoresResponse shardStoreResponse) {
|
||||
ImmutableOpenIntMap<List<IndicesShardStoresResponse.StoreStatus>> shardStatuses =
|
||||
shardStoreResponse.getStoreStatuses().get(shardRouting.getIndexName());
|
||||
List<IndicesShardStoresResponse.StoreStatus> shardStoreStatus = shardStatuses.get(shardRouting.id());
|
||||
ClusterAllocationExplanation cae = explainShard(shardRouting, allocation, routingNodes,
|
||||
request.includeYesDecisions(), shardAllocator, shardStoreStatus);
|
||||
listener.onResponse(new ClusterAllocationExplainResponse(cae));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Throwable e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void getShardStores(ShardRouting shard, final ActionListener<IndicesShardStoresResponse> listener) {
|
||||
IndicesShardStoresRequest request = new IndicesShardStoresRequest(shard.getIndexName());
|
||||
request.shardStatuses("all");
|
||||
shardStoresAction.execute(request, listener);
|
||||
}
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ public class IndicesShardStoresResponse extends ActionResponse implements ToXCon
|
||||
return allocationStatus;
|
||||
}
|
||||
|
||||
static StoreStatus readStoreStatus(StreamInput in) throws IOException {
|
||||
public static StoreStatus readStoreStatus(StreamInput in) throws IOException {
|
||||
StoreStatus storeStatus = new StoreStatus();
|
||||
storeStatus.readFrom(in);
|
||||
return storeStatus;
|
||||
|
@ -31,7 +31,6 @@ import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
@ -400,11 +399,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchRequestBuilder innerHits(InnerHitsBuilder innerHitsBuilder) {
|
||||
sourceBuilder().innerHits(innerHitsBuilder);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all rescorers on the builder and sets the first one. To use multiple rescore windows use
|
||||
* {@link #addRescorer(org.elasticsearch.search.rescore.RescoreBuilder, int)}.
|
||||
|
@ -150,7 +150,7 @@ public class MultiPhrasePrefixQuery extends Query {
|
||||
}
|
||||
}
|
||||
if (terms.isEmpty()) {
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("No terms supplied for " + MultiPhrasePrefixQuery.class.getName());
|
||||
}
|
||||
query.add(terms.toArray(Term.class), position);
|
||||
return query.build();
|
||||
|
@ -44,8 +44,8 @@ public class Queries {
|
||||
}
|
||||
|
||||
/** Return a query that matches no document. */
|
||||
public static Query newMatchNoDocsQuery() {
|
||||
return new BooleanQuery.Builder().build();
|
||||
public static Query newMatchNoDocsQuery(String reason) {
|
||||
return new MatchNoDocsQuery(reason);
|
||||
}
|
||||
|
||||
public static Query newNestedFilter() {
|
||||
|
@ -344,7 +344,7 @@ public class Setting<T> extends ToXContentToBytes {
|
||||
return get(primary);
|
||||
}
|
||||
if (fallbackSetting == null) {
|
||||
return get(secondary);
|
||||
return get(secondary);
|
||||
}
|
||||
if (exists(secondary)) {
|
||||
return get(secondary);
|
||||
@ -599,7 +599,6 @@ public class Setting<T> extends ToXContentToBytes {
|
||||
|
||||
return new Setting<List<T>>(new ListKey(key),
|
||||
(s) -> arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser, properties) {
|
||||
private final Pattern pattern = Pattern.compile(Pattern.quote(key)+"(\\.\\d+)?");
|
||||
@Override
|
||||
public String getRaw(Settings settings) {
|
||||
String[] array = settings.getAsArray(getKey(), null);
|
||||
@ -610,6 +609,12 @@ public class Setting<T> extends ToXContentToBytes {
|
||||
boolean hasComplexMatcher() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(Settings settings) {
|
||||
boolean exists = super.exists(settings);
|
||||
return exists || settings.get(getKey() + ".0") != null;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -175,9 +175,7 @@ public class SizeValue implements Streamable {
|
||||
}
|
||||
long singles;
|
||||
try {
|
||||
if (sValue.endsWith("b")) {
|
||||
singles = Long.parseLong(sValue.substring(0, sValue.length() - 1));
|
||||
} else if (sValue.endsWith("k") || sValue.endsWith("K")) {
|
||||
if (sValue.endsWith("k") || sValue.endsWith("K")) {
|
||||
singles = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * SizeUnit.C1);
|
||||
} else if (sValue.endsWith("m") || sValue.endsWith("M")) {
|
||||
singles = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * SizeUnit.C2);
|
||||
@ -232,4 +230,4 @@ public class SizeValue implements Streamable {
|
||||
result = 31 * result + (sizeUnit != null ? sizeUnit.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ public class KeyedLock<T> {
|
||||
this(false);
|
||||
}
|
||||
|
||||
private final ConcurrentMap<T, KeyLock> map = ConcurrentCollections.newConcurrentMap();
|
||||
private final ConcurrentMap<T, KeyLock> map = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
|
||||
|
||||
public Releasable acquire(T key) {
|
||||
assert isHeldByCurrentThread(key) == false : "lock for " + key + " is already heald by this thread";
|
||||
|
@ -41,7 +41,6 @@ import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.InfoStream;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cluster.routing.Murmur3HashFunction;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
@ -51,6 +50,7 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.lucene.uid.Versions;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.util.concurrent.KeyedLock;
|
||||
import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
@ -100,7 +100,7 @@ public class InternalEngine extends Engine {
|
||||
// we use the hashed variant since we iterate over it and check removal and additions on existing keys
|
||||
private final LiveVersionMap versionMap;
|
||||
|
||||
private final Object[] dirtyLocks;
|
||||
private final KeyedLock<BytesRef> keyedLock = new KeyedLock<>();
|
||||
|
||||
private final AtomicBoolean versionMapRefreshPending = new AtomicBoolean();
|
||||
|
||||
@ -128,10 +128,6 @@ public class InternalEngine extends Engine {
|
||||
try {
|
||||
this.lastDeleteVersionPruneTimeMSec = engineConfig.getThreadPool().estimatedTimeInMillis();
|
||||
mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings());
|
||||
this.dirtyLocks = new Object[Runtime.getRuntime().availableProcessors() * 10]; // we multiply it to have enough...
|
||||
for (int i = 0; i < dirtyLocks.length; i++) {
|
||||
dirtyLocks[i] = new Object();
|
||||
}
|
||||
throttle = new IndexThrottle();
|
||||
this.searcherFactory = new SearchFactory(logger, isClosed, engineConfig);
|
||||
try {
|
||||
@ -356,7 +352,7 @@ public class InternalEngine extends Engine {
|
||||
}
|
||||
|
||||
private boolean innerIndex(Index index) throws IOException {
|
||||
synchronized (dirtyLock(index.uid())) {
|
||||
try (Releasable ignored = acquireLock(index.uid())) {
|
||||
lastWriteNanos = index.startTime();
|
||||
final long currentVersion;
|
||||
final boolean deleted;
|
||||
@ -451,7 +447,7 @@ public class InternalEngine extends Engine {
|
||||
}
|
||||
|
||||
private void innerDelete(Delete delete) throws IOException {
|
||||
synchronized (dirtyLock(delete.uid())) {
|
||||
try (Releasable ignored = acquireLock(delete.uid())) {
|
||||
lastWriteNanos = delete.startTime();
|
||||
final long currentVersion;
|
||||
final boolean deleted;
|
||||
@ -708,7 +704,7 @@ public class InternalEngine extends Engine {
|
||||
// we only need to prune the deletes map; the current/old version maps are cleared on refresh:
|
||||
for (Map.Entry<BytesRef, VersionValue> entry : versionMap.getAllTombstones()) {
|
||||
BytesRef uid = entry.getKey();
|
||||
synchronized (dirtyLock(uid)) { // can we do it without this lock on each value? maybe batch to a set and get the lock once per set?
|
||||
try (Releasable ignored = acquireLock(uid)) { // can we do it without this lock on each value? maybe batch to a set and get the lock once per set?
|
||||
|
||||
// Must re-get it here, vs using entry.getValue(), in case the uid was indexed/deleted since we pulled the iterator:
|
||||
VersionValue versionValue = versionMap.getTombstoneUnderLock(uid);
|
||||
@ -908,13 +904,12 @@ public class InternalEngine extends Engine {
|
||||
return searcherManager;
|
||||
}
|
||||
|
||||
private Object dirtyLock(BytesRef uid) {
|
||||
int hash = Murmur3HashFunction.hash(uid.bytes, uid.offset, uid.length);
|
||||
return dirtyLocks[Math.floorMod(hash, dirtyLocks.length)];
|
||||
private Releasable acquireLock(BytesRef uid) {
|
||||
return keyedLock.acquire(uid);
|
||||
}
|
||||
|
||||
private Object dirtyLock(Term uid) {
|
||||
return dirtyLock(uid.bytes());
|
||||
private Releasable acquireLock(Term uid) {
|
||||
return acquireLock(uid.bytes());
|
||||
}
|
||||
|
||||
private long loadCurrentVersionFromIndex(Term uid) throws IOException {
|
||||
|
@ -144,7 +144,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||
if (isSameIndex(value, context.index().getName())) {
|
||||
return Queries.newMatchAllQuery();
|
||||
} else {
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value);
|
||||
}
|
||||
}
|
||||
|
||||
@ -161,7 +161,7 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
||||
}
|
||||
}
|
||||
// None of the listed index names are this one
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + values);
|
||||
}
|
||||
|
||||
private boolean isSameIndex(Object value, String indexName) {
|
||||
|
@ -36,6 +36,7 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -273,6 +274,15 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder<QB>>
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For internal usage only!
|
||||
*
|
||||
* Extracts the inner hits from the query tree.
|
||||
* While it extracts inner hits, child inner hits are inlined into the inner hit builder they belong to.
|
||||
*/
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
}
|
||||
|
||||
// Like Objects.requireNotNull(...) but instead throws a IllegalArgumentException
|
||||
protected static <T> T requireValue(T value, String message) {
|
||||
if (value == null) {
|
||||
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
@ -495,6 +496,17 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
List<QueryBuilder<?>> clauses = new ArrayList<>(filter());
|
||||
clauses.addAll(must());
|
||||
clauses.addAll(should());
|
||||
// no need to include must_not (since there will be no hits for it)
|
||||
for (QueryBuilder<?> clause : clauses) {
|
||||
InnerHitBuilder.extractInnerHits(clause, innerHits);
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean rewriteClauses(QueryRewriteContext queryRewriteContext, List<QueryBuilder<?>> builders,
|
||||
Consumer<QueryBuilder<?>> consumer) throws IOException {
|
||||
boolean changed = false;
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -235,4 +236,10 @@ public class BoostingQueryBuilder extends AbstractQueryBuilder<BoostingQueryBuil
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
InnerHitBuilder.extractInnerHits(positiveQuery, innerHits);
|
||||
InnerHitBuilder.extractInnerHits(negativeQuery, innerHits);
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -169,4 +170,9 @@ public class ConstantScoreQueryBuilder extends AbstractQueryBuilder<ConstantScor
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
InnerHitBuilder.extractInnerHits(filterBuilder, innerHits);
|
||||
}
|
||||
}
|
||||
|
@ -134,7 +134,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
|
||||
(FieldNamesFieldMapper.FieldNamesFieldType)context.getMapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
if (fieldNamesFieldType == null) {
|
||||
// can only happen when no types exist, so no docs exist either
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("Missing types in \"" + NAME + "\" query.");
|
||||
}
|
||||
|
||||
final Collection<String> fields;
|
||||
|
@ -38,10 +38,10 @@ import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -151,9 +151,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
}
|
||||
|
||||
public HasChildQueryBuilder innerHit(InnerHitBuilder innerHit) {
|
||||
innerHit.setParentChildType(type);
|
||||
innerHit.setQuery(query);
|
||||
this.innerHitBuilder = innerHit;
|
||||
this.innerHitBuilder = new InnerHitBuilder(Objects.requireNonNull(innerHit), query, type);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -274,8 +272,11 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
}
|
||||
}
|
||||
}
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, minChildren, maxChildren,
|
||||
scoreMode, innerHitBuilder);
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder(childType, iqb, scoreMode);
|
||||
if (innerHitBuilder != null) {
|
||||
hasChildQueryBuilder.innerHit(innerHitBuilder);
|
||||
}
|
||||
hasChildQueryBuilder.minMaxChildren(minChildren, maxChildren);
|
||||
hasChildQueryBuilder.queryName(queryName);
|
||||
hasChildQueryBuilder.boost(boost);
|
||||
hasChildQueryBuilder.ignoreUnmapped(ignoreUnmapped);
|
||||
@ -337,10 +338,6 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
|
||||
}
|
||||
if (innerHitBuilder != null) {
|
||||
context.addInnerHit(innerHitBuilder);
|
||||
}
|
||||
|
||||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = context.getMapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
@ -477,4 +474,11 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
if (innerHitBuilder != null) {
|
||||
innerHitBuilder.inlineInnerHits(innerHits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -33,10 +33,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
@ -127,9 +127,7 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
}
|
||||
|
||||
public HasParentQueryBuilder innerHit(InnerHitBuilder innerHit) {
|
||||
innerHit.setParentChildType(type);
|
||||
innerHit.setQuery(query);
|
||||
this.innerHit = innerHit;
|
||||
this.innerHit = new InnerHitBuilder(innerHit, query, type);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -175,10 +173,6 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
}
|
||||
}
|
||||
|
||||
if (innerHit != null) {
|
||||
context.addInnerHit(innerHit);
|
||||
}
|
||||
|
||||
Set<String> childTypes = new HashSet<>();
|
||||
ParentChildIndexFieldData parentChildIndexFieldData = null;
|
||||
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
|
||||
@ -282,8 +276,14 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
}
|
||||
}
|
||||
}
|
||||
return new HasParentQueryBuilder(parentType, iqb, score, innerHits).ignoreUnmapped(ignoreUnmapped).queryName(queryName)
|
||||
HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder(parentType, iqb, score)
|
||||
.ignoreUnmapped(ignoreUnmapped)
|
||||
.queryName(queryName)
|
||||
.boost(boost);
|
||||
if (innerHits != null) {
|
||||
queryBuilder.innerHit(innerHits);
|
||||
}
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -313,4 +313,11 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
if (innerHit!= null) {
|
||||
innerHit.inlineInnerHits(innerHits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
Query query;
|
||||
if (this.ids.isEmpty()) {
|
||||
query = Queries.newMatchNoDocsQuery();
|
||||
query = Queries.newMatchNoDocsQuery("Missing ids in \"" + this.getName() + "\" query.");
|
||||
} else {
|
||||
Collection<String> typesForQuery;
|
||||
if (types.length == 0) {
|
||||
|
@ -16,7 +16,7 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
@ -30,11 +30,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
@ -62,15 +57,12 @@ import static org.elasticsearch.common.xcontent.XContentParser.Token.END_OBJECT;
|
||||
public final class InnerHitBuilder extends ToXContentToBytes implements Writeable {
|
||||
|
||||
public static final ParseField NAME_FIELD = new ParseField("name");
|
||||
public static final ParseField NESTED_PATH_FIELD = new ParseField("path");
|
||||
public static final ParseField PARENT_CHILD_TYPE_FIELD = new ParseField("type");
|
||||
public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
|
||||
|
||||
private final static ObjectParser<InnerHitBuilder, QueryParseContext> PARSER = new ObjectParser<>("inner_hits", InnerHitBuilder::new);
|
||||
|
||||
static {
|
||||
PARSER.declareString(InnerHitBuilder::setName, NAME_FIELD);
|
||||
PARSER.declareString(InnerHitBuilder::setNestedPath, NESTED_PATH_FIELD);
|
||||
PARSER.declareString(InnerHitBuilder::setParentChildType, PARENT_CHILD_TYPE_FIELD);
|
||||
PARSER.declareInt(InnerHitBuilder::setFrom, SearchSourceBuilder.FROM_FIELD);
|
||||
PARSER.declareInt(InnerHitBuilder::setSize, SearchSourceBuilder.SIZE_FIELD);
|
||||
PARSER.declareBoolean(InnerHitBuilder::setExplain, SearchSourceBuilder.EXPLAIN_FIELD);
|
||||
@ -100,20 +92,30 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
}, SearchSourceBuilder._SOURCE_FIELD, ObjectParser.ValueType.OBJECT_OR_BOOLEAN);
|
||||
PARSER.declareObject(InnerHitBuilder::setHighlightBuilder, (p, c) -> HighlightBuilder.fromXContent(c),
|
||||
SearchSourceBuilder.HIGHLIGHT_FIELD);
|
||||
PARSER.declareObject(InnerHitBuilder::setQuery, (p, c) ->{
|
||||
PARSER.declareObject(InnerHitBuilder::setChildInnerHits, (p, c) -> {
|
||||
try {
|
||||
return c.parseInnerQueryBuilder();
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
String innerHitName = null;
|
||||
for (XContentParser.Token token = p.nextToken(); token != XContentParser.Token.END_OBJECT; token = p.nextToken()) {
|
||||
switch (token) {
|
||||
case START_OBJECT:
|
||||
InnerHitBuilder innerHitBuilder = InnerHitBuilder.fromXContent(c);
|
||||
innerHitBuilder.setName(innerHitName);
|
||||
innerHitBuilders.put(innerHitName, innerHitBuilder);
|
||||
break;
|
||||
case FIELD_NAME:
|
||||
innerHitName = p.currentName();
|
||||
break;
|
||||
default:
|
||||
throw new ParsingException(p.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in ["
|
||||
+ p.currentName() + "] but found [" + token + "]", p.getTokenLocation());
|
||||
}
|
||||
}
|
||||
return innerHitBuilders;
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner query definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder.QUERY_FIELD);
|
||||
PARSER.declareObject(InnerHitBuilder::setInnerHitsBuilder, (p, c) -> {
|
||||
try {
|
||||
return InnerHitsBuilder.fromXContent(c);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(p.getTokenLocation(), "Could not parse inner query definition", e);
|
||||
}
|
||||
}, SearchSourceBuilder.INNER_HITS_FIELD);
|
||||
}, INNER_HITS_FIELD);
|
||||
}
|
||||
|
||||
private String name;
|
||||
@ -132,8 +134,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
private List<String> fieldDataFields;
|
||||
private List<ScriptField> scriptFields;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private InnerHitsBuilder innerHitsBuilder;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
private Map<String, InnerHitBuilder> childInnerHits;
|
||||
|
||||
public InnerHitBuilder() {
|
||||
}
|
||||
@ -165,7 +167,62 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
}
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
|
||||
query = in.readNamedWriteable(QueryBuilder.class);
|
||||
innerHitsBuilder = in.readOptionalWriteable(InnerHitsBuilder::new);
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
childInnerHits = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
childInnerHits.put(in.readString(), new InnerHitBuilder(in));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private InnerHitBuilder(InnerHitBuilder other) {
|
||||
name = other.name;
|
||||
from = other.from;
|
||||
size = other.size;
|
||||
explain = other.explain;
|
||||
version = other.version;
|
||||
trackScores = other.trackScores;
|
||||
if (other.fieldNames != null) {
|
||||
fieldNames = new ArrayList<>(other.fieldNames);
|
||||
}
|
||||
if (other.fieldDataFields != null) {
|
||||
fieldDataFields = new ArrayList<>(other.fieldDataFields);
|
||||
}
|
||||
if (other.scriptFields != null) {
|
||||
scriptFields = new ArrayList<>(other.scriptFields);
|
||||
}
|
||||
if (other.fetchSourceContext != null) {
|
||||
fetchSourceContext = new FetchSourceContext(
|
||||
other.fetchSourceContext.fetchSource(), other.fetchSourceContext.includes(), other.fetchSourceContext.excludes()
|
||||
);
|
||||
}
|
||||
if (other.sorts != null) {
|
||||
sorts = new ArrayList<>(other.sorts);
|
||||
}
|
||||
highlightBuilder = other.highlightBuilder;
|
||||
if (other.childInnerHits != null) {
|
||||
childInnerHits = new HashMap<>(other.childInnerHits);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
InnerHitBuilder(InnerHitBuilder other, String nestedPath, QueryBuilder query) {
|
||||
this(other);
|
||||
this.query = query;
|
||||
this.nestedPath = nestedPath;
|
||||
if (name == null) {
|
||||
this.name = nestedPath;
|
||||
}
|
||||
}
|
||||
|
||||
InnerHitBuilder(InnerHitBuilder other, QueryBuilder query, String parentChildType) {
|
||||
this(other);
|
||||
this.query = query;
|
||||
this.parentChildType = parentChildType;
|
||||
if (name == null) {
|
||||
this.name = parentChildType;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -196,17 +253,15 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
}
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
out.writeNamedWriteable(query);
|
||||
out.writeOptionalWriteable(innerHitsBuilder);
|
||||
}
|
||||
|
||||
public InnerHitBuilder setParentChildType(String parentChildType) {
|
||||
this.parentChildType = parentChildType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setNestedPath(String nestedPath) {
|
||||
this.nestedPath = nestedPath;
|
||||
return this;
|
||||
boolean hasChildInnerHits = childInnerHits != null;
|
||||
out.writeBoolean(hasChildInnerHits);
|
||||
if (hasChildInnerHits) {
|
||||
out.writeVInt(childInnerHits.size());
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : childInnerHits.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
@ -347,72 +402,53 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
return this;
|
||||
}
|
||||
|
||||
public QueryBuilder<?> getQuery() {
|
||||
QueryBuilder<?> getQuery() {
|
||||
return query;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setQuery(QueryBuilder<?> query) {
|
||||
this.query = Objects.requireNonNull(query);
|
||||
return this;
|
||||
void setChildInnerHits(Map<String, InnerHitBuilder> childInnerHits) {
|
||||
this.childInnerHits = childInnerHits;
|
||||
}
|
||||
|
||||
public InnerHitBuilder setInnerHitsBuilder(InnerHitsBuilder innerHitsBuilder) {
|
||||
this.innerHitsBuilder = innerHitsBuilder;
|
||||
return this;
|
||||
String getParentChildType() {
|
||||
return parentChildType;
|
||||
}
|
||||
|
||||
public InnerHitsContext.BaseInnerHits buildInline(SearchContext parentSearchContext, QueryShardContext context) throws IOException {
|
||||
InnerHitsContext.BaseInnerHits innerHitsContext;
|
||||
if (nestedPath != null) {
|
||||
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
|
||||
ObjectMapper parentObjectMapper = context.nestedScope().getObjectMapper();
|
||||
innerHitsContext = new InnerHitsContext.NestedInnerHits(
|
||||
name, parentSearchContext, parentObjectMapper, nestedObjectMapper
|
||||
);
|
||||
} else if (parentChildType != null) {
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentChildType);
|
||||
innerHitsContext = new InnerHitsContext.ParentChildInnerHits(
|
||||
name, parentSearchContext, context.getMapperService(), documentMapper
|
||||
);
|
||||
} else {
|
||||
throw new IllegalStateException("Neither a nested or parent/child inner hit");
|
||||
String getNestedPath() {
|
||||
return nestedPath;
|
||||
}
|
||||
|
||||
void addChildInnerHit(InnerHitBuilder innerHitBuilder) {
|
||||
if (childInnerHits == null) {
|
||||
childInnerHits = new HashMap<>();
|
||||
}
|
||||
setupInnerHitsContext(context, innerHitsContext);
|
||||
return innerHitsContext;
|
||||
this.childInnerHits.put(innerHitBuilder.getName(), innerHitBuilder);
|
||||
}
|
||||
|
||||
/**
|
||||
* Top level inner hits are different than inline inner hits:
|
||||
* 1) Nesting. Top level inner hits can be hold nested inner hits, that why this method is recursive (via buildChildInnerHits)
|
||||
* 2) Top level inner hits query is an option, whereas with inline inner hits that is based on the nested, has_child
|
||||
* or has_parent's inner query.
|
||||
*
|
||||
* Because of these changes there are different methods for building inline (which is simpler) and top level inner
|
||||
* hits. Also top level inner hits will soon be deprecated.
|
||||
*/
|
||||
public InnerHitsContext.BaseInnerHits buildTopLevel(SearchContext parentSearchContext, QueryShardContext context,
|
||||
InnerHitsContext innerHitsContext) throws IOException {
|
||||
public InnerHitsContext.BaseInnerHits build(SearchContext parentSearchContext,
|
||||
InnerHitsContext innerHitsContext) throws IOException {
|
||||
QueryShardContext queryShardContext = parentSearchContext.getQueryShardContext();
|
||||
if (nestedPath != null) {
|
||||
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
|
||||
ObjectMapper parentObjectMapper = context.nestedScope().nextLevel(nestedObjectMapper);
|
||||
ObjectMapper nestedObjectMapper = queryShardContext.getObjectMapper(nestedPath);
|
||||
ObjectMapper parentObjectMapper = queryShardContext.nestedScope().nextLevel(nestedObjectMapper);
|
||||
InnerHitsContext.NestedInnerHits nestedInnerHits = new InnerHitsContext.NestedInnerHits(
|
||||
name, parentSearchContext, parentObjectMapper, nestedObjectMapper
|
||||
);
|
||||
setupInnerHitsContext(context, nestedInnerHits);
|
||||
if (innerHitsBuilder != null) {
|
||||
buildChildInnerHits(parentSearchContext, context, nestedInnerHits);
|
||||
setupInnerHitsContext(queryShardContext, nestedInnerHits);
|
||||
if (childInnerHits != null) {
|
||||
buildChildInnerHits(parentSearchContext, nestedInnerHits);
|
||||
}
|
||||
context.nestedScope().previousLevel();
|
||||
queryShardContext.nestedScope().previousLevel();
|
||||
innerHitsContext.addInnerHitDefinition(nestedInnerHits);
|
||||
return nestedInnerHits;
|
||||
} else if (parentChildType != null) {
|
||||
DocumentMapper documentMapper = context.getMapperService().documentMapper(parentChildType);
|
||||
DocumentMapper documentMapper = queryShardContext.getMapperService().documentMapper(parentChildType);
|
||||
InnerHitsContext.ParentChildInnerHits parentChildInnerHits = new InnerHitsContext.ParentChildInnerHits(
|
||||
name, parentSearchContext, context.getMapperService(), documentMapper
|
||||
name, parentSearchContext, queryShardContext.getMapperService(), documentMapper
|
||||
);
|
||||
setupInnerHitsContext(context, parentChildInnerHits);
|
||||
if (innerHitsBuilder != null) {
|
||||
buildChildInnerHits(parentSearchContext, context, parentChildInnerHits);
|
||||
setupInnerHitsContext(queryShardContext, parentChildInnerHits);
|
||||
if (childInnerHits != null) {
|
||||
buildChildInnerHits(parentSearchContext, parentChildInnerHits);
|
||||
}
|
||||
innerHitsContext.addInnerHitDefinition( parentChildInnerHits);
|
||||
return parentChildInnerHits;
|
||||
@ -421,12 +457,11 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
}
|
||||
}
|
||||
|
||||
private void buildChildInnerHits(SearchContext parentSearchContext, QueryShardContext context,
|
||||
InnerHitsContext.BaseInnerHits innerHits) throws IOException {
|
||||
private void buildChildInnerHits(SearchContext parentSearchContext, InnerHitsContext.BaseInnerHits innerHits) throws IOException {
|
||||
Map<String, InnerHitsContext.BaseInnerHits> childInnerHits = new HashMap<>();
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitsBuilder.getInnerHitsBuilders().entrySet()) {
|
||||
InnerHitsContext.BaseInnerHits childInnerHit = entry.getValue().buildTopLevel(
|
||||
parentSearchContext, context, new InnerHitsContext()
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : this.childInnerHits.entrySet()) {
|
||||
InnerHitsContext.BaseInnerHits childInnerHit = entry.getValue().build(
|
||||
parentSearchContext, new InnerHitsContext()
|
||||
);
|
||||
childInnerHits.put(entry.getKey(), childInnerHit);
|
||||
}
|
||||
@ -480,16 +515,23 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
innerHitsContext.parsedQuery(parsedQuery);
|
||||
}
|
||||
|
||||
public void inlineInnerHits(Map<String, InnerHitBuilder> innerHits) {
|
||||
InnerHitBuilder copy = new InnerHitBuilder(this);
|
||||
copy.parentChildType = this.parentChildType;
|
||||
copy.nestedPath = this.nestedPath;
|
||||
copy.query = this.query;
|
||||
innerHits.put(copy.getName(), copy);
|
||||
|
||||
Map<String, InnerHitBuilder> childInnerHits = new HashMap<>();
|
||||
extractInnerHits(query, childInnerHits);
|
||||
if (childInnerHits.size() > 0) {
|
||||
copy.setChildInnerHits(childInnerHits);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
||||
if (nestedPath != null) {
|
||||
builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath);
|
||||
}
|
||||
if (parentChildType != null) {
|
||||
builder.field(PARENT_CHILD_TYPE_FIELD.getPreferredName(), parentChildType);
|
||||
}
|
||||
if (name != null) {
|
||||
builder.field(NAME_FIELD.getPreferredName(), name);
|
||||
}
|
||||
@ -536,9 +578,12 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
if (highlightBuilder != null) {
|
||||
builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder, params);
|
||||
}
|
||||
builder.field(SearchSourceBuilder.QUERY_FIELD.getPreferredName(), query, params);
|
||||
if (innerHitsBuilder != null) {
|
||||
builder.field(SearchSourceBuilder.INNER_HITS_FIELD.getPreferredName(), innerHitsBuilder, params);
|
||||
if (childInnerHits != null) {
|
||||
builder.startObject(INNER_HITS_FIELD.getPreferredName());
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : childInnerHits.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue(), params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
@ -565,17 +610,26 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
||||
Objects.equals(sorts, that.sorts) &&
|
||||
Objects.equals(highlightBuilder, that.highlightBuilder) &&
|
||||
Objects.equals(query, that.query) &&
|
||||
Objects.equals(innerHitsBuilder, that.innerHitsBuilder);
|
||||
Objects.equals(childInnerHits, that.childInnerHits);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, fieldNames,
|
||||
fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, innerHitsBuilder);
|
||||
fieldDataFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits);
|
||||
}
|
||||
|
||||
public static InnerHitBuilder fromXContent(QueryParseContext context) throws IOException {
|
||||
return PARSER.parse(context.parser(), new InnerHitBuilder(), context);
|
||||
}
|
||||
|
||||
public static void extractInnerHits(QueryBuilder<?> query, Map<String, InnerHitBuilder> innerHitBuilders) {
|
||||
if (query instanceof AbstractQueryBuilder) {
|
||||
((AbstractQueryBuilder) query).extractInnerHitBuilders(innerHitBuilders);
|
||||
} else {
|
||||
throw new IllegalStateException("provided query builder [" + query.getClass() +
|
||||
"] class should inherit from AbstractQueryBuilder, but it doesn't");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -93,7 +93,7 @@ public class MatchNoneQueryBuilder extends AbstractQueryBuilder<MatchNoneQueryBu
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("User requested \"" + this.getName() + "\" query.");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -32,9 +32,9 @@ import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder> {
|
||||
@ -109,9 +109,7 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
}
|
||||
|
||||
public NestedQueryBuilder innerHit(InnerHitBuilder innerHit) {
|
||||
innerHit.setNestedPath(path);
|
||||
innerHit.setQuery(query);
|
||||
this.innerHitBuilder = innerHit;
|
||||
this.innerHitBuilder = new InnerHitBuilder(innerHit, path, query);
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -196,8 +194,14 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
}
|
||||
}
|
||||
}
|
||||
return new NestedQueryBuilder(path, query, scoreMode, innerHitBuilder).ignoreUnmapped(ignoreUnmapped).queryName(queryName)
|
||||
NestedQueryBuilder queryBuilder = new NestedQueryBuilder(path, query, scoreMode)
|
||||
.ignoreUnmapped(ignoreUnmapped)
|
||||
.queryName(queryName)
|
||||
.boost(boost);
|
||||
if (innerHitBuilder != null) {
|
||||
queryBuilder.innerHit(innerHitBuilder);
|
||||
}
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -236,9 +240,6 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
final Query childFilter;
|
||||
final Query innerQuery;
|
||||
ObjectMapper objectMapper = context.nestedScope().getObjectMapper();
|
||||
if (innerHitBuilder != null) {
|
||||
context.addInnerHit(innerHitBuilder);
|
||||
}
|
||||
if (objectMapper == null) {
|
||||
parentFilter = context.bitsetFilter(Queries.newNonNestedFilter());
|
||||
} else {
|
||||
@ -265,4 +266,11 @@ public class NestedQueryBuilder extends AbstractQueryBuilder<NestedQueryBuilder>
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
if (innerHitBuilder != null) {
|
||||
innerHitBuilder.inlineInnerHits(innerHits);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -57,12 +57,10 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.core.TextFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.percolator.PercolatorQueryCache;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.NestedScope;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
@ -185,16 +183,6 @@ public class QueryShardContext extends QueryRewriteContext {
|
||||
return isFilter;
|
||||
}
|
||||
|
||||
public void addInnerHit(InnerHitBuilder innerHitBuilder) throws IOException {
|
||||
SearchContext sc = SearchContext.current();
|
||||
if (sc == null) {
|
||||
throw new QueryShardException(this, "inner_hits unsupported");
|
||||
}
|
||||
|
||||
InnerHitsContext innerHitsContext = sc.innerHits();
|
||||
innerHitsContext.addInnerHitDefinition(innerHitBuilder.buildInline(sc, this));
|
||||
}
|
||||
|
||||
public Collection<String> simpleMatchToIndexNames(String pattern) {
|
||||
return mapperService.simpleMatchToIndexNames(pattern);
|
||||
}
|
||||
@ -373,7 +361,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
||||
private static Query toQuery(final QueryBuilder<?> queryBuilder, final QueryShardContext context) throws IOException {
|
||||
final Query query = QueryBuilder.rewriteQuery(queryBuilder, context).toQuery(context);
|
||||
if (query == null) {
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("No query left after rewrite.");
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
@ -317,7 +317,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
|
||||
throw new UnsupportedOperationException("query must be rewritten first");
|
||||
}
|
||||
if (values == null || values.isEmpty()) {
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query.");
|
||||
}
|
||||
return handleTermsQuery(values, fieldName, context);
|
||||
}
|
||||
|
@ -42,12 +42,14 @@ import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
@ -429,8 +431,15 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected void extractInnerHitBuilders(Map<String, InnerHitBuilder> innerHits) {
|
||||
InnerHitBuilder.extractInnerHits(query(), innerHits);
|
||||
}
|
||||
|
||||
public static FunctionScoreQueryBuilder fromXContent(ParseFieldRegistry<ScoreFunctionParser<?>> scoreFunctionsRegistry,
|
||||
QueryParseContext parseContext) throws IOException {
|
||||
QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
QueryBuilder<?> query = null;
|
||||
|
@ -1,126 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class InnerHitsBuilder extends ToXContentToBytes implements Writeable {
|
||||
private final Map<String, InnerHitBuilder> innerHitsBuilders;
|
||||
|
||||
public InnerHitsBuilder() {
|
||||
this.innerHitsBuilders = new HashMap<>();
|
||||
}
|
||||
|
||||
public InnerHitsBuilder(Map<String, InnerHitBuilder> innerHitsBuilders) {
|
||||
this.innerHitsBuilders = Objects.requireNonNull(innerHitsBuilders);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public InnerHitsBuilder(StreamInput in) throws IOException {
|
||||
int size = in.readVInt();
|
||||
innerHitsBuilders = new HashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
innerHitsBuilders.put(in.readString(), new InnerHitBuilder(in));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(innerHitsBuilders.size());
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitsBuilders.entrySet()) {
|
||||
out.writeString(entry.getKey());
|
||||
entry.getValue().writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
public InnerHitsBuilder addInnerHit(String name, InnerHitBuilder builder) {
|
||||
Objects.requireNonNull(name);
|
||||
Objects.requireNonNull(builder);
|
||||
this.innerHitsBuilders.put(name, builder.setName(name));
|
||||
return this;
|
||||
}
|
||||
|
||||
public Map<String, InnerHitBuilder> getInnerHitsBuilders() {
|
||||
return innerHitsBuilders;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitsBuilders.entrySet()) {
|
||||
builder.field(entry.getKey(), entry.getValue(), params);
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
|
||||
InnerHitsBuilder that = (InnerHitsBuilder) o;
|
||||
return innerHitsBuilders.equals(that.innerHitsBuilders);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return innerHitsBuilders.hashCode();
|
||||
}
|
||||
|
||||
public static InnerHitsBuilder fromXContent(QueryParseContext context) throws IOException {
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
String innerHitName = null;
|
||||
XContentParser parser = context.parser();
|
||||
for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) {
|
||||
switch (token) {
|
||||
case START_OBJECT:
|
||||
InnerHitBuilder innerHitBuilder = InnerHitBuilder.fromXContent(context);
|
||||
innerHitBuilder.setName(innerHitName);
|
||||
innerHitBuilders.put(innerHitName, innerHitBuilder);
|
||||
break;
|
||||
case FIELD_NAME:
|
||||
innerHitName = parser.currentName();
|
||||
break;
|
||||
default:
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] in ["
|
||||
+ parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
return new InnerHitsBuilder(innerHitBuilders);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -286,7 +286,11 @@ public class MatchQuery {
|
||||
}
|
||||
|
||||
protected Query zeroTermsQuery() {
|
||||
return zeroTermsQuery == DEFAULT_ZERO_TERMS_QUERY ? Queries.newMatchNoDocsQuery() : Queries.newMatchAllQuery();
|
||||
if (zeroTermsQuery == DEFAULT_ZERO_TERMS_QUERY) {
|
||||
return Queries.newMatchNoDocsQuery("Matching no documents because no terms present.");
|
||||
}
|
||||
|
||||
return Queries.newMatchAllQuery();
|
||||
}
|
||||
|
||||
private class MatchQueryBuilder extends QueryBuilder {
|
||||
|
@ -681,8 +681,8 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
|
||||
*/
|
||||
@Nullable
|
||||
public IndexMetaData verifyIndexIsDeleted(final Index index, final ClusterState clusterState) {
|
||||
// this method should only be called when we know the index is not part of the cluster state
|
||||
if (clusterState.metaData().hasIndex(index.getName())) {
|
||||
// this method should only be called when we know the index (name + uuid) is not part of the cluster state
|
||||
if (clusterState.metaData().index(index) != null) {
|
||||
throw new IllegalStateException("Cannot delete index [" + index + "], it is still part of the cluster state.");
|
||||
}
|
||||
if (nodeEnv.hasNodeFile() && FileSystemUtils.exists(nodeEnv.indexPaths(index))) {
|
||||
|
@ -83,6 +83,28 @@ public final class ConfigurationUtils {
|
||||
value.getClass().getName() + "]");
|
||||
}
|
||||
|
||||
public static Boolean readBooleanProperty(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName, boolean defaultValue) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null) {
|
||||
return defaultValue;
|
||||
} else {
|
||||
return readBoolean(processorType, processorTag, propertyName, value).booleanValue();
|
||||
}
|
||||
}
|
||||
|
||||
private static Boolean readBoolean(String processorType, String processorTag, String propertyName, Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (value instanceof Boolean) {
|
||||
return (Boolean) value;
|
||||
}
|
||||
throw newConfigurationException(processorType, processorTag, propertyName, "property isn't a boolean, but of type [" +
|
||||
value.getClass().getName() + "]");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns and removes the specified property from the specified configuration map.
|
||||
*
|
||||
|
@ -116,6 +116,18 @@ public final class IngestDocument {
|
||||
return cast(path, context, clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value contained in the document with the provided templated path
|
||||
* @param pathTemplate The path within the document in dot-notation
|
||||
* @param clazz The expected class fo the field value
|
||||
* @return the value fro the provided path if existing, null otherwise
|
||||
* @throws IllegalArgumentException if the pathTemplate is null, empty, invalid, if the field doesn't exist,
|
||||
* or if the field that is found at the provided path is not of the expected type.
|
||||
*/
|
||||
public <T> T getFieldValue(TemplateService.Template pathTemplate, Class<T> clazz) {
|
||||
return getFieldValue(renderTemplate(pathTemplate), clazz);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value contained in the document for the provided path as a byte array.
|
||||
* If the path value is a string, a base64 decode operation will happen.
|
||||
@ -141,6 +153,16 @@ public final class IngestDocument {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the document contains a value for the provided templated path
|
||||
* @param fieldPathTemplate the template for the path within the document in dot-notation
|
||||
* @return true if the document contains a value for the field, false otherwise
|
||||
* @throws IllegalArgumentException if the path is null, empty or invalid
|
||||
*/
|
||||
public boolean hasField(TemplateService.Template fieldPathTemplate) {
|
||||
return hasField(renderTemplate(fieldPathTemplate));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether the document contains a value for the provided path
|
||||
* @param path The path within the document in dot-notation
|
||||
|
@ -0,0 +1,156 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest.processor;
|
||||
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.ingest.core.AbstractProcessor;
|
||||
import org.elasticsearch.ingest.core.AbstractProcessorFactory;
|
||||
import org.elasticsearch.ingest.core.ConfigurationUtils;
|
||||
import org.elasticsearch.ingest.core.IngestDocument;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.IllformedLocaleException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Function;
|
||||
|
||||
public final class DateIndexNameProcessor extends AbstractProcessor {
|
||||
|
||||
public static final String TYPE = "date_index_name";
|
||||
|
||||
private final String field;
|
||||
private final String indexNamePrefix;
|
||||
private final String dateRounding;
|
||||
private final String indexNameFormat;
|
||||
private final DateTimeZone timezone;
|
||||
private final List<Function<String, DateTime>> dateFormats;
|
||||
|
||||
DateIndexNameProcessor(String tag, String field, List<Function<String, DateTime>> dateFormats, DateTimeZone timezone,
|
||||
String indexNamePrefix, String dateRounding, String indexNameFormat) {
|
||||
super(tag);
|
||||
this.field = field;
|
||||
this.timezone = timezone;
|
||||
this.dateFormats = dateFormats;
|
||||
this.indexNamePrefix = indexNamePrefix;
|
||||
this.dateRounding = dateRounding;
|
||||
this.indexNameFormat = indexNameFormat;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument) throws Exception {
|
||||
String date = ingestDocument.getFieldValue(field, String.class);
|
||||
|
||||
DateTime dateTime = null;
|
||||
Exception lastException = null;
|
||||
for (Function<String, DateTime> dateParser : dateFormats) {
|
||||
try {
|
||||
dateTime = dateParser.apply(date);
|
||||
} catch (Exception e) {
|
||||
//try the next parser and keep track of the exceptions
|
||||
lastException = ExceptionsHelper.useOrSuppress(lastException, e);
|
||||
}
|
||||
}
|
||||
|
||||
if (dateTime == null) {
|
||||
throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException);
|
||||
}
|
||||
|
||||
DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat);
|
||||
StringBuilder builder = new StringBuilder()
|
||||
.append('<')
|
||||
.append(indexNamePrefix)
|
||||
.append('{')
|
||||
.append(formatter.print(dateTime)).append("||/").append(dateRounding)
|
||||
.append('{').append(indexNameFormat).append('|').append(timezone).append('}')
|
||||
.append('}')
|
||||
.append('>');
|
||||
String dynamicIndexName = builder.toString();
|
||||
ingestDocument.setFieldValue(IngestDocument.MetaData.INDEX.getFieldName(), dynamicIndexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return TYPE;
|
||||
}
|
||||
|
||||
String getField() {
|
||||
return field;
|
||||
}
|
||||
|
||||
String getIndexNamePrefix() {
|
||||
return indexNamePrefix;
|
||||
}
|
||||
|
||||
String getDateRounding() {
|
||||
return dateRounding;
|
||||
}
|
||||
|
||||
String getIndexNameFormat() {
|
||||
return indexNameFormat;
|
||||
}
|
||||
|
||||
DateTimeZone getTimezone() {
|
||||
return timezone;
|
||||
}
|
||||
|
||||
List<Function<String, DateTime>> getDateFormats() {
|
||||
return dateFormats;
|
||||
}
|
||||
|
||||
public static final class Factory extends AbstractProcessorFactory<DateIndexNameProcessor> {
|
||||
|
||||
@Override
|
||||
protected DateIndexNameProcessor doCreate(String tag, Map<String, Object> config) throws Exception {
|
||||
String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "locale");
|
||||
String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "timezone");
|
||||
DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString);
|
||||
Locale locale = Locale.ENGLISH;
|
||||
if (localeString != null) {
|
||||
try {
|
||||
locale = (new Locale.Builder()).setLanguageTag(localeString).build();
|
||||
} catch (IllformedLocaleException e) {
|
||||
throw new IllegalArgumentException("Invalid language tag specified: " + localeString);
|
||||
}
|
||||
}
|
||||
List<String> dateFormatStrings = ConfigurationUtils.readOptionalList(TYPE, tag, config, "date_formats");
|
||||
if (dateFormatStrings == null) {
|
||||
dateFormatStrings = Collections.singletonList("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
|
||||
}
|
||||
List<Function<String, DateTime>> dateFormats = new ArrayList<>(dateFormatStrings.size());
|
||||
for (String format : dateFormatStrings) {
|
||||
DateFormat dateFormat = DateFormat.fromString(format);
|
||||
dateFormats.add(dateFormat.getFunction(format, timezone, locale));
|
||||
}
|
||||
|
||||
String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
|
||||
String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", "");
|
||||
String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding");
|
||||
String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd");
|
||||
return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefix, dateRounding, indexNameFormat);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -36,15 +36,25 @@ public final class SetProcessor extends AbstractProcessor {
|
||||
|
||||
public static final String TYPE = "set";
|
||||
|
||||
private final boolean overrideEnabled;
|
||||
private final TemplateService.Template field;
|
||||
private final ValueSource value;
|
||||
|
||||
SetProcessor(String tag, TemplateService.Template field, ValueSource value) {
|
||||
SetProcessor(String tag, TemplateService.Template field, ValueSource value) {
|
||||
this(tag, field, value, true);
|
||||
}
|
||||
|
||||
SetProcessor(String tag, TemplateService.Template field, ValueSource value, boolean overrideEnabled) {
|
||||
super(tag);
|
||||
this.overrideEnabled = overrideEnabled;
|
||||
this.field = field;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public boolean isOverrideEnabled() {
|
||||
return overrideEnabled;
|
||||
}
|
||||
|
||||
public TemplateService.Template getField() {
|
||||
return field;
|
||||
}
|
||||
@ -55,7 +65,9 @@ public final class SetProcessor extends AbstractProcessor {
|
||||
|
||||
@Override
|
||||
public void execute(IngestDocument document) {
|
||||
document.setFieldValue(field, value);
|
||||
if (overrideEnabled || document.hasField(field) == false || document.getFieldValue(field, Object.class) == null) {
|
||||
document.setFieldValue(field, value);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -75,7 +87,12 @@ public final class SetProcessor extends AbstractProcessor {
|
||||
public SetProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
|
||||
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
|
||||
Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value");
|
||||
return new SetProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService));
|
||||
boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", true);
|
||||
return new SetProcessor(
|
||||
processorTag,
|
||||
templateService.compile(field),
|
||||
ValueSource.wrap(value, templateService),
|
||||
overrideEnabled);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.ingest.core.TemplateService;
|
||||
import org.elasticsearch.ingest.processor.AppendProcessor;
|
||||
import org.elasticsearch.ingest.processor.ConvertProcessor;
|
||||
import org.elasticsearch.ingest.processor.DateProcessor;
|
||||
import org.elasticsearch.ingest.processor.DateIndexNameProcessor;
|
||||
import org.elasticsearch.ingest.processor.FailProcessor;
|
||||
import org.elasticsearch.ingest.processor.ForEachProcessor;
|
||||
import org.elasticsearch.ingest.processor.GsubProcessor;
|
||||
@ -76,6 +77,7 @@ public class NodeModule extends AbstractModule {
|
||||
registerProcessor(GsubProcessor.TYPE, (templateService, registry) -> new GsubProcessor.Factory());
|
||||
registerProcessor(FailProcessor.TYPE, (templateService, registry) -> new FailProcessor.Factory(templateService));
|
||||
registerProcessor(ForEachProcessor.TYPE, (templateService, registry) -> new ForEachProcessor.Factory(registry));
|
||||
registerProcessor(DateIndexNameProcessor.TYPE, (templateService, registry) -> new DateIndexNameProcessor.Factory());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -303,15 +303,15 @@ public class RestTable {
|
||||
String resolution = request.param("bytes");
|
||||
if ("b".equals(resolution)) {
|
||||
return Long.toString(v.bytes());
|
||||
} else if ("k".equals(resolution)) {
|
||||
} else if ("k".equals(resolution) || "kb".equals(resolution)) {
|
||||
return Long.toString(v.kb());
|
||||
} else if ("m".equals(resolution)) {
|
||||
} else if ("m".equals(resolution) || "mb".equals(resolution)) {
|
||||
return Long.toString(v.mb());
|
||||
} else if ("g".equals(resolution)) {
|
||||
} else if ("g".equals(resolution) || "gb".equals(resolution)) {
|
||||
return Long.toString(v.gb());
|
||||
} else if ("t".equals(resolution)) {
|
||||
} else if ("t".equals(resolution) || "tb".equals(resolution)) {
|
||||
return Long.toString(v.tb());
|
||||
} else if ("p".equals(resolution)) {
|
||||
} else if ("p".equals(resolution) || "pb".equals(resolution)) {
|
||||
return Long.toString(v.pb());
|
||||
} else {
|
||||
return v.toString();
|
||||
@ -320,7 +320,7 @@ public class RestTable {
|
||||
if (value instanceof SizeValue) {
|
||||
SizeValue v = (SizeValue) value;
|
||||
String resolution = request.param("size");
|
||||
if ("b".equals(resolution)) {
|
||||
if ("".equals(resolution)) {
|
||||
return Long.toString(v.singles());
|
||||
} else if ("k".equals(resolution)) {
|
||||
return Long.toString(v.kilo());
|
||||
@ -339,7 +339,11 @@ public class RestTable {
|
||||
if (value instanceof TimeValue) {
|
||||
TimeValue v = (TimeValue) value;
|
||||
String resolution = request.param("time");
|
||||
if ("ms".equals(resolution)) {
|
||||
if ("nanos".equals(resolution)) {
|
||||
return Long.toString(v.nanos());
|
||||
} else if ("micros".equals(resolution)) {
|
||||
return Long.toString(v.micros());
|
||||
} else if ("ms".equals(resolution)) {
|
||||
return Long.toString(v.millis());
|
||||
} else if ("s".equals(resolution)) {
|
||||
return Long.toString(v.seconds());
|
||||
@ -347,6 +351,8 @@ public class RestTable {
|
||||
return Long.toString(v.minutes());
|
||||
} else if ("h".equals(resolution)) {
|
||||
return Long.toString(v.hours());
|
||||
} else if ("d".equals(resolution)) {
|
||||
return Long.toString(v.days());
|
||||
} else {
|
||||
return v.toString();
|
||||
}
|
||||
|
@ -82,12 +82,6 @@ public final class ScriptMetaData implements MetaData.Custom {
|
||||
parser.nextToken();
|
||||
switch (parser.currentName()) {
|
||||
case "script":
|
||||
if (parser.nextToken() == Token.VALUE_STRING) {
|
||||
return parser.text();
|
||||
} else {
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
break;
|
||||
case "template":
|
||||
if (parser.nextToken() == Token.VALUE_STRING) {
|
||||
return parser.text();
|
||||
|
@ -62,7 +62,7 @@ import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
import org.elasticsearch.index.search.stats.StatsGroupsParseElement;
|
||||
import org.elasticsearch.index.shard.IndexEventListener;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
@ -88,7 +88,6 @@ import org.elasticsearch.search.fetch.ShardFetchRequest;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsContext.FieldDataField;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsContext.ScriptField;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
@ -679,12 +678,24 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
||||
context.queryBoost(indexBoost);
|
||||
}
|
||||
}
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
if (source.query() != null) {
|
||||
InnerHitBuilder.extractInnerHits(source.query(), innerHitBuilders);
|
||||
context.parsedQuery(queryShardContext.toQuery(source.query()));
|
||||
}
|
||||
if (source.postFilter() != null) {
|
||||
InnerHitBuilder.extractInnerHits(source.postFilter(), innerHitBuilders);
|
||||
context.parsedPostFilter(queryShardContext.toQuery(source.postFilter()));
|
||||
}
|
||||
if (innerHitBuilders.size() > 0) {
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : innerHitBuilders.entrySet()) {
|
||||
try {
|
||||
entry.getValue().build(context, context.innerHits());
|
||||
} catch (IOException e) {
|
||||
throw new SearchContextException(context, "failed to build inner_hits", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (source.sorts() != null) {
|
||||
try {
|
||||
Optional<Sort> optionalSort = SortBuilder.buildSort(source.sorts(), context.getQueryShardContext());
|
||||
@ -754,25 +765,6 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
||||
throw new SearchContextException(context, "failed to create SearchContextHighlighter", e);
|
||||
}
|
||||
}
|
||||
if (source.innerHits() != null) {
|
||||
for (Map.Entry<String, InnerHitBuilder> entry : source.innerHits().getInnerHitsBuilders().entrySet()) {
|
||||
try {
|
||||
// This is the same logic in QueryShardContext#toQuery() where we reset also twice.
|
||||
// Personally I think a reset at the end is sufficient, but I kept the logic consistent with this method.
|
||||
|
||||
// The reason we need to invoke reset at all here is because inner hits may modify the QueryShardContext#nestedScope,
|
||||
// so we need to reset at the end.
|
||||
queryShardContext.reset();
|
||||
InnerHitBuilder innerHitBuilder = entry.getValue();
|
||||
InnerHitsContext innerHitsContext = context.innerHits();
|
||||
innerHitBuilder.buildTopLevel(context, queryShardContext, innerHitsContext);
|
||||
} catch (IOException e) {
|
||||
throw new SearchContextException(context, "failed to create InnerHitsContext", e);
|
||||
} finally {
|
||||
queryShardContext.reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (source.scriptFields() != null) {
|
||||
for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
|
||||
SearchScript searchScript = context.scriptService().search(context.lookup(), field.script(), ScriptContext.Standard.SEARCH,
|
||||
|
@ -40,7 +40,6 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
@ -93,7 +92,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost");
|
||||
public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations", "aggs");
|
||||
public static final ParseField HIGHLIGHT_FIELD = new ParseField("highlight");
|
||||
public static final ParseField INNER_HITS_FIELD = new ParseField("inner_hits");
|
||||
public static final ParseField SUGGEST_FIELD = new ParseField("suggest");
|
||||
public static final ParseField RESCORE_FIELD = new ParseField("rescore");
|
||||
public static final ParseField STATS_FIELD = new ParseField("stats");
|
||||
@ -156,8 +154,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
|
||||
private SuggestBuilder suggestBuilder;
|
||||
|
||||
private InnerHitsBuilder innerHitsBuilder;
|
||||
|
||||
private List<RescoreBuilder<?>> rescoreBuilders;
|
||||
|
||||
private ObjectFloatHashMap<String> indexBoost = null;
|
||||
@ -205,14 +201,11 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
boolean hasIndexBoost = in.readBoolean();
|
||||
if (hasIndexBoost) {
|
||||
int size = in.readVInt();
|
||||
indexBoost = new ObjectFloatHashMap<String>(size);
|
||||
indexBoost = new ObjectFloatHashMap<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
indexBoost.put(in.readString(), in.readFloat());
|
||||
}
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
innerHitsBuilder = new InnerHitsBuilder(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
minScore = in.readFloat();
|
||||
}
|
||||
@ -303,11 +296,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
out.writeFloat(indexBoost.get(key.value));
|
||||
}
|
||||
}
|
||||
boolean hasInnerHitsBuilder = innerHitsBuilder != null;
|
||||
out.writeBoolean(hasInnerHitsBuilder);
|
||||
if (hasInnerHitsBuilder) {
|
||||
innerHitsBuilder.writeTo(out);
|
||||
}
|
||||
boolean hasMinScore = minScore != null;
|
||||
out.writeBoolean(hasMinScore);
|
||||
if (hasMinScore) {
|
||||
@ -653,15 +641,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
return highlightBuilder;
|
||||
}
|
||||
|
||||
public SearchSourceBuilder innerHits(InnerHitsBuilder innerHitsBuilder) {
|
||||
this.innerHitsBuilder = innerHitsBuilder;
|
||||
return this;
|
||||
}
|
||||
|
||||
public InnerHitsBuilder innerHits() {
|
||||
return innerHitsBuilder;
|
||||
}
|
||||
|
||||
public SearchSourceBuilder suggest(SuggestBuilder suggestBuilder) {
|
||||
this.suggestBuilder = suggestBuilder;
|
||||
return this;
|
||||
@ -957,7 +936,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
rewrittenBuilder.from = from;
|
||||
rewrittenBuilder.highlightBuilder = highlightBuilder;
|
||||
rewrittenBuilder.indexBoost = indexBoost;
|
||||
rewrittenBuilder.innerHitsBuilder = innerHitsBuilder;
|
||||
rewrittenBuilder.minScore = minScore;
|
||||
rewrittenBuilder.postQueryBuilder = postQueryBuilder;
|
||||
rewrittenBuilder.profile = profile;
|
||||
@ -1051,8 +1029,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
aggregations = aggParsers.parseAggregators(context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, HIGHLIGHT_FIELD)) {
|
||||
highlightBuilder = HighlightBuilder.fromXContent(context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, INNER_HITS_FIELD)) {
|
||||
innerHitsBuilder = InnerHitsBuilder.fromXContent(context);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SUGGEST_FIELD)) {
|
||||
suggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
@ -1235,10 +1211,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
builder.field(HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder);
|
||||
}
|
||||
|
||||
if (innerHitsBuilder != null) {
|
||||
builder.field(INNER_HITS_FIELD.getPreferredName(), innerHitsBuilder, params);
|
||||
}
|
||||
|
||||
if (suggestBuilder != null) {
|
||||
builder.field(SUGGEST_FIELD.getPreferredName(), suggestBuilder);
|
||||
}
|
||||
@ -1379,7 +1351,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from,
|
||||
highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
|
||||
highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
|
||||
size, sorts, searchAfterBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile);
|
||||
}
|
||||
|
||||
@ -1400,7 +1372,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
&& Objects.equals(from, other.from)
|
||||
&& Objects.equals(highlightBuilder, other.highlightBuilder)
|
||||
&& Objects.equals(indexBoost, other.indexBoost)
|
||||
&& Objects.equals(innerHitsBuilder, other.innerHitsBuilder)
|
||||
&& Objects.equals(minScore, other.minScore)
|
||||
&& Objects.equals(postQueryBuilder, other.postQueryBuilder)
|
||||
&& Objects.equals(queryBuilder, other.queryBuilder)
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.elasticsearch.snapshots;
|
||||
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
|
||||
@ -43,7 +44,7 @@ public class SnapshotUtils {
|
||||
* @return filtered out indices
|
||||
*/
|
||||
public static List<String> filterIndices(List<String> availableIndices, String[] selectedIndices, IndicesOptions indicesOptions) {
|
||||
if (selectedIndices == null || selectedIndices.length == 0) {
|
||||
if (IndexNameExpressionResolver.isAllIndices(Arrays.asList(selectedIndices))) {
|
||||
return availableIndices;
|
||||
}
|
||||
Set<String> result = null;
|
||||
|
@ -20,14 +20,21 @@
|
||||
package org.elasticsearch.action.admin.cluster.allocation;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
||||
@ -68,6 +75,101 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase {
|
||||
assertThat(cae.getShard().getIndexName(), equalTo("test"));
|
||||
assertFalse(cae.isPrimary());
|
||||
assertFalse(cae.isAssigned());
|
||||
assertThat("expecting a remaining delay, got: " + cae.getRemainingDelayNanos(), cae.getRemainingDelayNanos(), greaterThan(0L));
|
||||
assertThat("expecting a remaining delay, got: " + cae.getRemainingDelayMillis(), cae.getRemainingDelayMillis(), greaterThan(0L));
|
||||
}
|
||||
|
||||
public void testUnassignedShards() throws Exception {
|
||||
logger.info("--> starting 3 nodes");
|
||||
String noAttrNode = internalCluster().startNode();
|
||||
String barAttrNode = internalCluster().startNode(Settings.builder().put("node.attr.bar", "baz"));
|
||||
String fooBarAttrNode = internalCluster().startNode(Settings.builder()
|
||||
.put("node.attr.foo", "bar")
|
||||
.put("node.attr.bar", "baz"));
|
||||
|
||||
// Wait for all 3 nodes to be up
|
||||
logger.info("--> waiting for 3 nodes to be up");
|
||||
client().admin().cluster().health(Requests.clusterHealthRequest().waitForNodes("3")).actionGet();
|
||||
|
||||
client().admin().indices().prepareCreate("anywhere")
|
||||
.setSettings(Settings.builder()
|
||||
.put("index.number_of_shards", 5)
|
||||
.put("index.number_of_replicas", 1))
|
||||
.get();
|
||||
|
||||
client().admin().indices().prepareCreate("only-baz")
|
||||
.setSettings(Settings.builder()
|
||||
.put("index.routing.allocation.include.bar", "baz")
|
||||
.put("index.number_of_shards", 5)
|
||||
.put("index.number_of_replicas", 1))
|
||||
.get();
|
||||
|
||||
client().admin().indices().prepareCreate("only-foo")
|
||||
.setSettings(Settings.builder()
|
||||
.put("index.routing.allocation.include.foo", "bar")
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 1))
|
||||
.get();
|
||||
|
||||
ensureGreen("anywhere", "only-baz");
|
||||
ensureYellow("only-foo");
|
||||
|
||||
ClusterAllocationExplainResponse resp = client().admin().cluster().prepareAllocationExplain()
|
||||
.setIndex("only-foo")
|
||||
.setShard(0)
|
||||
.setPrimary(false)
|
||||
.get();
|
||||
ClusterAllocationExplanation cae = resp.getExplanation();
|
||||
assertThat(cae.getShard().getIndexName(), equalTo("only-foo"));
|
||||
assertFalse(cae.isPrimary());
|
||||
assertFalse(cae.isAssigned());
|
||||
assertThat(UnassignedInfo.Reason.INDEX_CREATED, equalTo(cae.getUnassignedInfo().getReason()));
|
||||
assertThat("expecting no remaining delay: " + cae.getRemainingDelayMillis(), cae.getRemainingDelayMillis(), equalTo(0L));
|
||||
|
||||
Map<DiscoveryNode, NodeExplanation> explanations = cae.getNodeExplanations();
|
||||
|
||||
Float noAttrWeight = -1f;
|
||||
Float barAttrWeight = -1f;
|
||||
Float fooBarAttrWeight = -1f;
|
||||
for (Map.Entry<DiscoveryNode, NodeExplanation> entry : explanations.entrySet()) {
|
||||
DiscoveryNode node = entry.getKey();
|
||||
String nodeName = node.getName();
|
||||
NodeExplanation explanation = entry.getValue();
|
||||
ClusterAllocationExplanation.FinalDecision finalDecision = explanation.getFinalDecision();
|
||||
String finalExplanation = explanation.getFinalExplanation();
|
||||
ClusterAllocationExplanation.StoreCopy storeCopy = explanation.getStoreCopy();
|
||||
Decision d = explanation.getDecision();
|
||||
float weight = explanation.getWeight();
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = explanation.getStoreStatus();
|
||||
|
||||
assertEquals(d.type(), Decision.Type.NO);
|
||||
if (noAttrNode.equals(nodeName)) {
|
||||
assertThat(d.toString(), containsString("node does not match index include filters [foo:\"bar\"]"));
|
||||
noAttrWeight = weight;
|
||||
assertNull(storeStatus);
|
||||
assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
explanation.getFinalExplanation());
|
||||
assertEquals(ClusterAllocationExplanation.FinalDecision.NO, finalDecision);
|
||||
} else if (barAttrNode.equals(nodeName)) {
|
||||
assertThat(d.toString(), containsString("node does not match index include filters [foo:\"bar\"]"));
|
||||
barAttrWeight = weight;
|
||||
assertNull(storeStatus);
|
||||
assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
explanation.getFinalExplanation());
|
||||
assertEquals(ClusterAllocationExplanation.FinalDecision.NO, finalDecision);
|
||||
} else if (fooBarAttrNode.equals(nodeName)) {
|
||||
assertThat(d.toString(), containsString("the shard cannot be allocated on the same node id"));
|
||||
fooBarAttrWeight = weight;
|
||||
assertEquals(storeStatus.getAllocationStatus(),
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY);
|
||||
assertEquals(ClusterAllocationExplanation.FinalDecision.NO, finalDecision);
|
||||
assertEquals(ClusterAllocationExplanation.StoreCopy.AVAILABLE, storeCopy);
|
||||
assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
explanation.getFinalExplanation());
|
||||
} else {
|
||||
fail("unexpected node with name: " + nodeName +
|
||||
", I have: " + noAttrNode + ", " + barAttrNode + ", " + fooBarAttrNode);
|
||||
}
|
||||
}
|
||||
assertFalse(barAttrWeight == fooBarAttrWeight);
|
||||
}
|
||||
}
|
||||
|
@ -43,16 +43,22 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase {
|
||||
assertEquals(false, cae.isPrimary());
|
||||
assertNull(cae.getAssignedNodeId());
|
||||
assertNotNull(cae.getUnassignedInfo());
|
||||
Decision d = cae.getNodeDecisions().values().iterator().next();
|
||||
NodeExplanation explanation = cae.getNodeExplanations().values().iterator().next();
|
||||
ClusterAllocationExplanation.FinalDecision fd = explanation.getFinalDecision();
|
||||
ClusterAllocationExplanation.StoreCopy storeCopy = explanation.getStoreCopy();
|
||||
String finalExplanation = explanation.getFinalExplanation();
|
||||
Decision d = explanation.getDecision();
|
||||
assertNotNull("should have a decision", d);
|
||||
assertEquals(Decision.Type.NO, d.type());
|
||||
assertEquals(ClusterAllocationExplanation.FinalDecision.NO, fd);
|
||||
assertEquals(ClusterAllocationExplanation.StoreCopy.AVAILABLE, storeCopy);
|
||||
assertTrue(d.toString(), d.toString().contains("NO(the shard cannot be allocated on the same node id"));
|
||||
assertTrue(d instanceof Decision.Multi);
|
||||
Decision.Multi md = (Decision.Multi) d;
|
||||
Decision ssd = md.getDecisions().get(0);
|
||||
assertEquals(Decision.Type.NO, ssd.type());
|
||||
assertTrue(ssd.toString(), ssd.toString().contains("NO(the shard cannot be allocated on the same node id"));
|
||||
Float weight = cae.getNodeWeights().values().iterator().next();
|
||||
Float weight = explanation.getWeight();
|
||||
assertNotNull("should have a weight", weight);
|
||||
|
||||
resp = client().admin().cluster().prepareAllocationExplain().setIndex("test").setShard(0).setPrimary(true).get();
|
||||
@ -64,16 +70,22 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase {
|
||||
assertEquals(true, cae.isPrimary());
|
||||
assertNotNull("shard should have assigned node id", cae.getAssignedNodeId());
|
||||
assertNull("assigned shard should not have unassigned info", cae.getUnassignedInfo());
|
||||
d = cae.getNodeDecisions().values().iterator().next();
|
||||
explanation = cae.getNodeExplanations().values().iterator().next();
|
||||
d = explanation.getDecision();
|
||||
fd = explanation.getFinalDecision();
|
||||
storeCopy = explanation.getStoreCopy();
|
||||
finalExplanation = explanation.getFinalExplanation();
|
||||
assertNotNull("should have a decision", d);
|
||||
assertEquals(Decision.Type.NO, d.type());
|
||||
assertEquals(ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED, fd);
|
||||
assertEquals(ClusterAllocationExplanation.StoreCopy.AVAILABLE, storeCopy);
|
||||
assertTrue(d.toString(), d.toString().contains("NO(the shard cannot be allocated on the same node id"));
|
||||
assertTrue(d instanceof Decision.Multi);
|
||||
md = (Decision.Multi) d;
|
||||
ssd = md.getDecisions().get(0);
|
||||
assertEquals(Decision.Type.NO, ssd.type());
|
||||
assertTrue(ssd.toString(), ssd.toString().contains("NO(the shard cannot be allocated on the same node id"));
|
||||
weight = cae.getNodeWeights().values().iterator().next();
|
||||
weight = explanation.getWeight();
|
||||
assertNotNull("should have a weight", weight);
|
||||
|
||||
resp = client().admin().cluster().prepareAllocationExplain().useAnyUnassignedShard().get();
|
||||
|
@ -19,17 +19,36 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.allocation;
|
||||
|
||||
import org.apache.lucene.index.CorruptIndexException;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingHelper;
|
||||
import org.elasticsearch.cluster.routing.UnassignedInfo;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.DummyTransportAddress;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.emptySet;
|
||||
@ -39,6 +58,131 @@ import static java.util.Collections.emptySet;
|
||||
*/
|
||||
public final class ClusterAllocationExplanationTests extends ESTestCase {
|
||||
|
||||
private Index i = new Index("foo", "uuid");
|
||||
private ShardRouting primaryShard = ShardRouting.newUnassigned(i, 0, null, true,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
|
||||
private ShardRouting replicaShard = ShardRouting.newUnassigned(i, 0, null, false,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
|
||||
private IndexMetaData indexMetaData = IndexMetaData.builder("foo")
|
||||
.settings(Settings.builder()
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, "uuid"))
|
||||
.putActiveAllocationIds(0, new HashSet<String>(Arrays.asList("aid1", "aid2")))
|
||||
.numberOfShards(1)
|
||||
.numberOfReplicas(1)
|
||||
.build();
|
||||
private DiscoveryNode node = new DiscoveryNode("node-0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT);
|
||||
private static Decision.Multi yesDecision = new Decision.Multi();
|
||||
private static Decision.Multi noDecision = new Decision.Multi();
|
||||
|
||||
static {
|
||||
yesDecision.add(Decision.single(Decision.Type.YES, "yes label", "yes please"));
|
||||
noDecision.add(Decision.single(Decision.Type.NO, "no label", "no thanks"));
|
||||
}
|
||||
|
||||
|
||||
private NodeExplanation makeNodeExplanation(boolean primary, boolean isAssigned, boolean hasErr, boolean hasActiveId) {
|
||||
Float nodeWeight = randomFloat();
|
||||
Exception e = hasErr ? new ElasticsearchException("stuff's broke, yo") : null;
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, e);
|
||||
String assignedNodeId;
|
||||
if (isAssigned) {
|
||||
assignedNodeId = "node-0";
|
||||
} else {
|
||||
assignedNodeId = "node-9";
|
||||
}
|
||||
Set<String> activeAllocationIds = new HashSet<>();
|
||||
if (hasActiveId) {
|
||||
activeAllocationIds.add("eggplant");
|
||||
}
|
||||
|
||||
return TransportClusterAllocationExplainAction.calculateNodeExplanation(primary ? primaryShard : replicaShard,
|
||||
indexMetaData, node, noDecision, nodeWeight, storeStatus, assignedNodeId, activeAllocationIds);
|
||||
}
|
||||
|
||||
private void assertExplanations(NodeExplanation ne, String finalExplanation, ClusterAllocationExplanation.FinalDecision finalDecision,
|
||||
ClusterAllocationExplanation.StoreCopy storeCopy) {
|
||||
assertEquals(finalExplanation, ne.getFinalExplanation());
|
||||
assertEquals(finalDecision, ne.getFinalDecision());
|
||||
assertEquals(storeCopy, ne.getStoreCopy());
|
||||
}
|
||||
|
||||
public void testDecisionAndExplanation() {
|
||||
Exception e = new IOException("stuff's broke, yo");
|
||||
Exception corruptE = new CorruptIndexException("stuff's corrupt, yo", "");
|
||||
Float nodeWeight = randomFloat();
|
||||
Set<String> activeAllocationIds = new HashSet<>();
|
||||
activeAllocationIds.add("eggplant");
|
||||
ShardRouting primaryStartedShard = ShardRouting.newUnassigned(i, 0, null, true,
|
||||
new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, "foo"));
|
||||
assertTrue(primaryStartedShard.allocatedPostIndexCreate(indexMetaData));
|
||||
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, e);
|
||||
NodeExplanation ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node,
|
||||
yesDecision, nodeWeight, storeStatus, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the copy of the shard cannot be read",
|
||||
ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.IO_ERROR);
|
||||
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight,
|
||||
null, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the shard can be assigned",
|
||||
ClusterAllocationExplanation.FinalDecision.YES, ClusterAllocationExplanation.StoreCopy.NONE);
|
||||
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryStartedShard, indexMetaData, node, yesDecision,
|
||||
nodeWeight, null, "", activeAllocationIds);
|
||||
assertExplanations(ne, "there is no copy of the shard available",
|
||||
ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.NONE);
|
||||
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, noDecision, nodeWeight,
|
||||
null, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.NONE);
|
||||
|
||||
storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null);
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, noDecision, nodeWeight,
|
||||
storeStatus, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the shard cannot be assigned because one or more allocation decider returns a 'NO' decision",
|
||||
ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.AVAILABLE);
|
||||
|
||||
storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, corruptE);
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight,
|
||||
storeStatus, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the copy of the shard is corrupt",
|
||||
ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.CORRUPT);
|
||||
|
||||
storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "banana",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null);
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight,
|
||||
storeStatus, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the shard can be assigned",
|
||||
ClusterAllocationExplanation.FinalDecision.YES, ClusterAllocationExplanation.StoreCopy.STALE);
|
||||
|
||||
storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "banana",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null);
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryStartedShard, indexMetaData, node, yesDecision,
|
||||
nodeWeight, storeStatus, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the copy of the shard is stale, allocation ids do not match",
|
||||
ClusterAllocationExplanation.FinalDecision.NO, ClusterAllocationExplanation.StoreCopy.STALE);
|
||||
|
||||
storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null);
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight,
|
||||
storeStatus, "node-0", activeAllocationIds);
|
||||
assertExplanations(ne, "the shard is already assigned to this node",
|
||||
ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED, ClusterAllocationExplanation.StoreCopy.AVAILABLE);
|
||||
|
||||
storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null);
|
||||
ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node, yesDecision, nodeWeight,
|
||||
storeStatus, "", activeAllocationIds);
|
||||
assertExplanations(ne, "the shard can be assigned and the node contains a valid copy of the shard data",
|
||||
ClusterAllocationExplanation.FinalDecision.YES, ClusterAllocationExplanation.StoreCopy.AVAILABLE);
|
||||
}
|
||||
|
||||
public void testDecisionEquality() {
|
||||
Decision.Multi d = new Decision.Multi();
|
||||
Decision.Multi d2 = new Decision.Multi();
|
||||
@ -53,21 +197,19 @@ public final class ClusterAllocationExplanationTests extends ESTestCase {
|
||||
|
||||
public void testExplanationSerialization() throws Exception {
|
||||
ShardId shard = new ShardId("test", "uuid", 0);
|
||||
Map<DiscoveryNode, Decision> nodeToDecisions = new HashMap<>();
|
||||
Map<DiscoveryNode, Float> nodeToWeight = new HashMap<>();
|
||||
for (int i = randomIntBetween(2, 5); i > 0; i--) {
|
||||
DiscoveryNode dn = new DiscoveryNode("node-" + i, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT);
|
||||
Decision.Multi d = new Decision.Multi();
|
||||
d.add(Decision.single(Decision.Type.NO, "no label", "because I said no"));
|
||||
d.add(Decision.single(Decision.Type.YES, "yes label", "yes please"));
|
||||
d.add(Decision.single(Decision.Type.THROTTLE, "throttle label", "wait a sec"));
|
||||
nodeToDecisions.put(dn, d);
|
||||
nodeToWeight.put(dn, randomFloat());
|
||||
}
|
||||
|
||||
long remainingDelay = randomIntBetween(0, 500);
|
||||
ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shard, true, "assignedNode", null,
|
||||
nodeToDecisions, nodeToWeight, remainingDelay);
|
||||
Map<DiscoveryNode, NodeExplanation> nodeExplanations = new HashMap<>(1);
|
||||
Float nodeWeight = randomFloat();
|
||||
Set<String> activeAllocationIds = new HashSet<>();
|
||||
activeAllocationIds.add("eggplant");
|
||||
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null);
|
||||
NodeExplanation ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node,
|
||||
yesDecision, nodeWeight, storeStatus, "", activeAllocationIds);
|
||||
nodeExplanations.put(ne.getNode(), ne);
|
||||
ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shard, true,
|
||||
"assignedNode", remainingDelay, null, nodeExplanations);
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
cae.writeTo(out);
|
||||
StreamInput in = StreamInput.wrap(out.bytes());
|
||||
@ -77,10 +219,45 @@ public final class ClusterAllocationExplanationTests extends ESTestCase {
|
||||
assertTrue(cae2.isAssigned());
|
||||
assertEquals("assignedNode", cae2.getAssignedNodeId());
|
||||
assertNull(cae2.getUnassignedInfo());
|
||||
for (Map.Entry<DiscoveryNode, Decision> entry : cae2.getNodeDecisions().entrySet()) {
|
||||
assertEquals(nodeToDecisions.get(entry.getKey()), entry.getValue());
|
||||
assertEquals(remainingDelay, cae2.getRemainingDelayMillis());
|
||||
for (Map.Entry<DiscoveryNode, NodeExplanation> entry : cae2.getNodeExplanations().entrySet()) {
|
||||
DiscoveryNode node = entry.getKey();
|
||||
NodeExplanation explanation = entry.getValue();
|
||||
IndicesShardStoresResponse.StoreStatus status = explanation.getStoreStatus();
|
||||
assertNotNull(explanation.getStoreStatus());
|
||||
assertNotNull(explanation.getDecision());
|
||||
assertEquals(nodeWeight, explanation.getWeight());
|
||||
}
|
||||
assertEquals(nodeToWeight, cae2.getNodeWeights());
|
||||
assertEquals(remainingDelay, cae2.getRemainingDelayNanos());
|
||||
}
|
||||
|
||||
public void testExplanationToXContent() throws Exception {
|
||||
ShardId shardId = new ShardId("foo", "uuid", 0);
|
||||
long remainingDelay = 42;
|
||||
Decision.Multi d = new Decision.Multi();
|
||||
d.add(Decision.single(Decision.Type.NO, "no label", "because I said no"));
|
||||
d.add(Decision.single(Decision.Type.YES, "yes label", "yes please"));
|
||||
d.add(Decision.single(Decision.Type.THROTTLE, "throttle label", "wait a sec"));
|
||||
Float nodeWeight = 1.5f;
|
||||
Set<String> allocationIds = new HashSet<>();
|
||||
allocationIds.add("bar");
|
||||
IndicesShardStoresResponse.StoreStatus storeStatus = new IndicesShardStoresResponse.StoreStatus(node, 42, "eggplant",
|
||||
IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, new ElasticsearchException("stuff's broke, yo"));
|
||||
NodeExplanation ne = TransportClusterAllocationExplainAction.calculateNodeExplanation(primaryShard, indexMetaData, node,
|
||||
d, nodeWeight, storeStatus, "node-0", allocationIds);
|
||||
Map<DiscoveryNode, NodeExplanation> nodeExplanations = new HashMap<>(1);
|
||||
nodeExplanations.put(ne.getNode(), ne);
|
||||
ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shardId, true,
|
||||
"assignedNode", remainingDelay, null, nodeExplanations);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
cae.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("{\"shard\":{\"index\":\"foo\",\"index_uuid\":\"uuid\",\"id\":0,\"primary\":true},\"assigned\":true," +
|
||||
"\"assigned_node_id\":\"assignedNode\",\"nodes\":{\"node-0\":{\"node_name\":\"\",\"node_attribute" +
|
||||
"s\":{},\"store\":{\"shard_copy\":\"IO_ERROR\",\"store_exception\":\"ElasticsearchException[stuff" +
|
||||
"'s broke, yo]\"},\"final_decision\":\"ALREADY_ASSIGNED\",\"final_explanation\":\"the shard is al" +
|
||||
"ready assigned to this node\",\"weight\":1.5,\"decisions\":[{\"decider\":\"no label\",\"decision" +
|
||||
"\":\"NO\",\"explanation\":\"because I said no\"},{\"decider\":\"yes label\",\"decision\":\"YES\"" +
|
||||
",\"explanation\":\"yes please\"},{\"decider\":\"throttle label\",\"decision\":\"THROTTLE\",\"exp" +
|
||||
"lanation\":\"wait a sec\"}]}}}",
|
||||
builder.string());
|
||||
}
|
||||
}
|
||||
|
@ -294,11 +294,13 @@ public class SettingTests extends ESTestCase {
|
||||
Setting<List<String>> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(),
|
||||
Property.Dynamic, Property.NodeScope);
|
||||
List<String> value = listSetting.get(Settings.EMPTY);
|
||||
assertFalse(listSetting.exists(Settings.EMPTY));
|
||||
assertEquals(1, value.size());
|
||||
assertEquals("foo,bar", value.get(0));
|
||||
|
||||
List<String> input = Arrays.asList("test", "test1, test2", "test", ",,,,");
|
||||
Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
|
||||
assertTrue(listSetting.exists(builder.build()));
|
||||
value = listSetting.get(builder.build());
|
||||
assertEquals(input.size(), value.size());
|
||||
assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0]));
|
||||
@ -311,6 +313,7 @@ public class SettingTests extends ESTestCase {
|
||||
value = listSetting.get(builder.build());
|
||||
assertEquals(input.size(), value.size());
|
||||
assertArrayEquals(value.toArray(new String[0]), input.toArray(new String[0]));
|
||||
assertTrue(listSetting.exists(builder.build()));
|
||||
|
||||
AtomicReference<List<String>> ref = new AtomicReference<>();
|
||||
AbstractScopedSettings.SettingUpdater<List<String>> settingUpdater = listSetting.newUpdater(ref::set, logger);
|
||||
|
@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.io.JsonStringEncoder;
|
||||
|
||||
import org.apache.lucene.search.BoostQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.spans.SpanBoostQuery;
|
||||
|
@ -24,10 +24,12 @@ import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
||||
@ -56,9 +58,9 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase<ExistsQueryBu
|
||||
String fieldPattern = queryBuilder.fieldName();
|
||||
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
|
||||
if (getCurrentTypes().length == 0) {
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||
assertThat(booleanQuery.clauses().size(), equalTo(0));
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
|
||||
assertThat(matchNoDocsQuery.toString(null), containsString("Missing types in \"exists\" query."));
|
||||
} else {
|
||||
assertThat(query, instanceOf(ConstantScoreQuery.class));
|
||||
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) query;
|
||||
@ -79,11 +81,11 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase<ExistsQueryBu
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"exists\" : {\n" +
|
||||
" \"field\" : \"user\",\n" +
|
||||
" \"boost\" : 42.0\n" +
|
||||
" }\n" +
|
||||
"{\n" +
|
||||
" \"exists\" : {\n" +
|
||||
" \"field\" : \"user\",\n" +
|
||||
" \"boost\" : 42.0\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
ExistsQueryBuilder parsed = (ExistsQueryBuilder) parseQuery(json);
|
||||
|
@ -42,7 +42,6 @@ import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.Script.ScriptParseException;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
@ -53,6 +52,8 @@ import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
@ -125,18 +126,24 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
||||
assertEquals(queryBuilder.scoreMode(), lpq.getScoreMode()); // WTF is this why do we have two?
|
||||
}
|
||||
if (queryBuilder.innerHit() != null) {
|
||||
assertNotNull(SearchContext.current());
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
assertNotNull(searchContext);
|
||||
if (query != null) {
|
||||
assertNotNull(SearchContext.current().innerHits());
|
||||
assertEquals(1, SearchContext.current().innerHits().getInnerHits().size());
|
||||
assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||
for (InnerHitBuilder builder : innerHitBuilders.values()) {
|
||||
builder.build(searchContext, searchContext.innerHits());
|
||||
}
|
||||
assertNotNull(searchContext.innerHits());
|
||||
assertEquals(1, searchContext.innerHits().getInnerHits().size());
|
||||
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
|
||||
InnerHitsContext.BaseInnerHits innerHits =
|
||||
SearchContext.current().innerHits().getInnerHits().get(queryBuilder.innerHit().getName());
|
||||
searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName());
|
||||
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
|
||||
assertEquals(innerHits.sort().getSort().length, 1);
|
||||
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME_2);
|
||||
} else {
|
||||
assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0));
|
||||
assertThat(searchContext.innerHits().getInnerHits().size(), equalTo(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -188,7 +195,6 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
||||
" \"boost\" : 2.0,\n" +
|
||||
" \"_name\" : \"WNzYMJKRwePuRBh\",\n" +
|
||||
" \"inner_hits\" : {\n" +
|
||||
" \"type\" : \"child\",\n" +
|
||||
" \"name\" : \"inner_hits_name\",\n" +
|
||||
" \"from\" : 0,\n" +
|
||||
" \"size\" : 100,\n" +
|
||||
@ -199,18 +205,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
||||
" \"mapped_string\" : {\n" +
|
||||
" \"order\" : \"asc\"\n" +
|
||||
" }\n" +
|
||||
" } ],\n" +
|
||||
" \"query\" : {\n" +
|
||||
" \"range\" : {\n" +
|
||||
" \"mapped_string\" : {\n" +
|
||||
" \"from\" : \"agJhRET\",\n" +
|
||||
" \"to\" : \"zvqIq\",\n" +
|
||||
" \"include_lower\" : true,\n" +
|
||||
" \"include_upper\" : true,\n" +
|
||||
" \"boost\" : 1.0\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" } ]\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
@ -223,11 +218,11 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
||||
assertEquals(query, queryBuilder.childType(), "child");
|
||||
assertEquals(query, queryBuilder.scoreMode(), ScoreMode.Avg);
|
||||
assertNotNull(query, queryBuilder.innerHit());
|
||||
assertEquals(query, queryBuilder.innerHit(), new InnerHitBuilder().setParentChildType("child")
|
||||
InnerHitBuilder expected = new InnerHitBuilder(new InnerHitBuilder(), queryBuilder.query(), "child")
|
||||
.setName("inner_hits_name")
|
||||
.setSize(100)
|
||||
.addSort(new FieldSortBuilder("mapped_string").order(SortOrder.ASC))
|
||||
.setQuery(queryBuilder.query()));
|
||||
.addSort(new FieldSortBuilder("mapped_string").order(SortOrder.ASC));
|
||||
assertEquals(query, queryBuilder.innerHit(), expected);
|
||||
|
||||
}
|
||||
public void testToQueryInnerQueryType() throws IOException {
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
@ -34,7 +33,6 @@ import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.script.Script.ScriptParseException;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
@ -43,7 +41,8 @@ import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
@ -108,18 +107,24 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
||||
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
|
||||
}
|
||||
if (queryBuilder.innerHit() != null) {
|
||||
assertNotNull(SearchContext.current());
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
assertNotNull(searchContext);
|
||||
if (query != null) {
|
||||
assertNotNull(SearchContext.current().innerHits());
|
||||
assertEquals(1, SearchContext.current().innerHits().getInnerHits().size());
|
||||
assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
|
||||
InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits()
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||
for (InnerHitBuilder builder : innerHitBuilders.values()) {
|
||||
builder.build(searchContext, searchContext.innerHits());
|
||||
}
|
||||
assertNotNull(searchContext.innerHits());
|
||||
assertEquals(1, searchContext.innerHits().getInnerHits().size());
|
||||
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
|
||||
InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits()
|
||||
.getInnerHits().get(queryBuilder.innerHit().getName());
|
||||
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
|
||||
assertEquals(innerHits.sort().getSort().length, 1);
|
||||
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME_2);
|
||||
} else {
|
||||
assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0));
|
||||
assertThat(searchContext.innerHits().getInnerHits().size(), equalTo(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,16 +21,15 @@ package org.elasticsearch.index.query;
|
||||
|
||||
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
@ -88,8 +87,7 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
if (queryBuilder.ids().size() == 0) {
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
assertThat(((BooleanQuery)query).clauses().size(), equalTo(0));
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
} else {
|
||||
assertThat(query, instanceOf(TermsQuery.class));
|
||||
}
|
||||
|
@ -16,11 +16,13 @@
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
@ -29,6 +31,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
@ -41,8 +44,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
@ -87,7 +89,7 @@ public class InnerHitBuilderTests extends ESTestCase {
|
||||
|
||||
public void testFromAndToXContent() throws Exception {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
InnerHitBuilder innerHit = randomInnerHits();
|
||||
InnerHitBuilder innerHit = randomInnerHits(true, false);
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
|
||||
if (randomBoolean()) {
|
||||
builder.prettyPrint();
|
||||
@ -111,7 +113,7 @@ public class InnerHitBuilderTests extends ESTestCase {
|
||||
assertTrue("inner it is not equal to self", firstInnerHit.equals(firstInnerHit));
|
||||
assertThat("same inner hit's hashcode returns different values if called multiple times", firstInnerHit.hashCode(),
|
||||
equalTo(firstInnerHit.hashCode()));
|
||||
assertThat("different inner hits should not be equal", mutate(firstInnerHit), not(equalTo(firstInnerHit)));
|
||||
assertThat("different inner hits should not be equal", mutate(serializedCopy(firstInnerHit)), not(equalTo(firstInnerHit)));
|
||||
|
||||
InnerHitBuilder secondBuilder = serializedCopy(firstInnerHit);
|
||||
assertTrue("inner hit is not equal to self", secondBuilder.equals(secondBuilder));
|
||||
@ -133,18 +135,83 @@ public class InnerHitBuilderTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public static InnerHitBuilder randomInnerHits() {
|
||||
return randomInnerHits(true);
|
||||
public void testInlineLeafInnerHitsNestedQuery() throws Exception {
|
||||
InnerHitBuilder leafInnerHits = randomInnerHits();
|
||||
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None);
|
||||
nestedQueryBuilder.innerHit(leafInnerHits);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
nestedQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public static InnerHitBuilder randomInnerHits(boolean recursive) {
|
||||
InnerHitBuilder innerHits = new InnerHitBuilder();
|
||||
if (randomBoolean()) {
|
||||
innerHits.setNestedPath(randomAsciiOfLengthBetween(1, 16));
|
||||
} else {
|
||||
innerHits.setParentChildType(randomAsciiOfLengthBetween(1, 16));
|
||||
}
|
||||
public void testInlineLeafInnerHitsHasChildQuery() throws Exception {
|
||||
InnerHitBuilder leafInnerHits = randomInnerHits();
|
||||
HasChildQueryBuilder hasChildQueryBuilder = new HasChildQueryBuilder("type", new MatchAllQueryBuilder(), ScoreMode.None)
|
||||
.innerHit(leafInnerHits);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
hasChildQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public void testInlineLeafInnerHitsHasParentQuery() throws Exception {
|
||||
InnerHitBuilder leafInnerHits = randomInnerHits();
|
||||
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder("type", new MatchAllQueryBuilder(), false)
|
||||
.innerHit(leafInnerHits);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
hasParentQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public void testInlineLeafInnerHitsNestedQueryViaBoolQuery() {
|
||||
InnerHitBuilder leafInnerHits = randomInnerHits();
|
||||
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
|
||||
.innerHit(leafInnerHits);
|
||||
BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder().should(nestedQueryBuilder);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
boolQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public void testInlineLeafInnerHitsNestedQueryViaConstantScoreQuery() {
|
||||
InnerHitBuilder leafInnerHits = randomInnerHits();
|
||||
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
|
||||
.innerHit(leafInnerHits);
|
||||
ConstantScoreQueryBuilder constantScoreQueryBuilder = new ConstantScoreQueryBuilder(nestedQueryBuilder);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
constantScoreQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public void testInlineLeafInnerHitsNestedQueryViaBoostingQuery() {
|
||||
InnerHitBuilder leafInnerHits1 = randomInnerHits();
|
||||
NestedQueryBuilder nestedQueryBuilder1 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
|
||||
.innerHit(leafInnerHits1);
|
||||
InnerHitBuilder leafInnerHits2 = randomInnerHits();
|
||||
NestedQueryBuilder nestedQueryBuilder2 = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
|
||||
.innerHit(leafInnerHits2);
|
||||
BoostingQueryBuilder constantScoreQueryBuilder = new BoostingQueryBuilder(nestedQueryBuilder1, nestedQueryBuilder2);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
constantScoreQueryBuilder.extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits1.getName()), notNullValue());
|
||||
assertThat(innerHitBuilders.get(leafInnerHits2.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public void testInlineLeafInnerHitsNestedQueryViaFunctionScoreQuery() {
|
||||
InnerHitBuilder leafInnerHits = randomInnerHits();
|
||||
NestedQueryBuilder nestedQueryBuilder = new NestedQueryBuilder("path", new MatchAllQueryBuilder(), ScoreMode.None)
|
||||
.innerHit(leafInnerHits);
|
||||
FunctionScoreQueryBuilder functionScoreQueryBuilder = new FunctionScoreQueryBuilder(nestedQueryBuilder);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
((AbstractQueryBuilder) functionScoreQueryBuilder).extractInnerHitBuilders(innerHitBuilders);
|
||||
assertThat(innerHitBuilders.get(leafInnerHits.getName()), notNullValue());
|
||||
}
|
||||
|
||||
public static InnerHitBuilder randomInnerHits() {
|
||||
return randomInnerHits(true, true);
|
||||
}
|
||||
|
||||
public static InnerHitBuilder randomInnerHits(boolean recursive, boolean includeQueryTypeOrPath) {
|
||||
InnerHitBuilder innerHits = new InnerHitBuilder();
|
||||
innerHits.setName(randomAsciiOfLengthBetween(1, 16));
|
||||
innerHits.setFrom(randomIntBetween(0, 128));
|
||||
innerHits.setSize(randomIntBetween(0, 128));
|
||||
@ -170,54 +237,76 @@ public class InnerHitBuilderTests extends ESTestCase {
|
||||
);
|
||||
}
|
||||
innerHits.setHighlightBuilder(HighlightBuilderTests.randomHighlighterBuilder());
|
||||
if (randomBoolean()) {
|
||||
innerHits.setQuery(new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16)));
|
||||
}
|
||||
if (recursive && randomBoolean()) {
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
int size = randomIntBetween(1, 16);
|
||||
for (int i = 0; i < size; i++) {
|
||||
innerHitsBuilder.addInnerHit(randomAsciiOfLengthBetween(1, 16), randomInnerHits(false));
|
||||
innerHits.addChildInnerHit(randomInnerHits(false, includeQueryTypeOrPath));
|
||||
}
|
||||
innerHits.setInnerHitsBuilder(innerHitsBuilder);
|
||||
}
|
||||
|
||||
return innerHits;
|
||||
if (includeQueryTypeOrPath) {
|
||||
QueryBuilder query = new MatchQueryBuilder(randomAsciiOfLengthBetween(1, 16), randomAsciiOfLengthBetween(1, 16));
|
||||
if (randomBoolean()) {
|
||||
return new InnerHitBuilder(innerHits, randomAsciiOfLength(8), query);
|
||||
} else {
|
||||
return new InnerHitBuilder(innerHits, query, randomAsciiOfLength(8));
|
||||
}
|
||||
} else {
|
||||
return innerHits;
|
||||
}
|
||||
}
|
||||
|
||||
static InnerHitBuilder mutate(InnerHitBuilder innerHits) throws IOException {
|
||||
InnerHitBuilder copy = serializedCopy(innerHits);
|
||||
int surprise = randomIntBetween(0, 10);
|
||||
public void testCopyConstructor() throws Exception {
|
||||
InnerHitBuilder original = randomInnerHits();
|
||||
InnerHitBuilder copy = original.getNestedPath() != null ?
|
||||
new InnerHitBuilder(original, original.getNestedPath(), original.getQuery()) :
|
||||
new InnerHitBuilder(original, original.getQuery(), original.getParentChildType());
|
||||
assertThat(copy, equalTo(original));
|
||||
copy = mutate(copy);
|
||||
assertThat(copy, not(equalTo(original)));
|
||||
}
|
||||
|
||||
static InnerHitBuilder mutate(InnerHitBuilder instance) throws IOException {
|
||||
int surprise = randomIntBetween(0, 11);
|
||||
switch (surprise) {
|
||||
case 0:
|
||||
copy.setFrom(randomValueOtherThan(innerHits.getFrom(), () -> randomIntBetween(0, 128)));
|
||||
instance.setFrom(randomValueOtherThan(instance.getFrom(), () -> randomIntBetween(0, 128)));
|
||||
break;
|
||||
case 1:
|
||||
copy.setSize(randomValueOtherThan(innerHits.getSize(), () -> randomIntBetween(0, 128)));
|
||||
instance.setSize(randomValueOtherThan(instance.getSize(), () -> randomIntBetween(0, 128)));
|
||||
break;
|
||||
case 2:
|
||||
copy.setExplain(!copy.isExplain());
|
||||
instance.setExplain(!instance.isExplain());
|
||||
break;
|
||||
case 3:
|
||||
copy.setVersion(!copy.isVersion());
|
||||
instance.setVersion(!instance.isVersion());
|
||||
break;
|
||||
case 4:
|
||||
copy.setTrackScores(!copy.isTrackScores());
|
||||
instance.setTrackScores(!instance.isTrackScores());
|
||||
break;
|
||||
case 5:
|
||||
copy.setName(randomValueOtherThan(innerHits.getName(), () -> randomAsciiOfLengthBetween(1, 16)));
|
||||
instance.setName(randomValueOtherThan(instance.getName(), () -> randomAsciiOfLengthBetween(1, 16)));
|
||||
break;
|
||||
case 6:
|
||||
copy.setFieldDataFields(randomValueOtherThan(copy.getFieldDataFields(), () -> {
|
||||
return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16));
|
||||
}));
|
||||
if (randomBoolean()) {
|
||||
instance.setFieldDataFields(randomValueOtherThan(instance.getFieldDataFields(), () -> {
|
||||
return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16));
|
||||
}));
|
||||
} else {
|
||||
instance.addFieldDataField(randomAsciiOfLengthBetween(1, 16));
|
||||
}
|
||||
break;
|
||||
case 7:
|
||||
copy.setScriptFields(randomValueOtherThan(copy.getScriptFields(), () -> {
|
||||
return randomListStuff(16, InnerHitBuilderTests::randomScript);}));
|
||||
if (randomBoolean()) {
|
||||
instance.setScriptFields(randomValueOtherThan(instance.getScriptFields(), () -> {
|
||||
return randomListStuff(16, InnerHitBuilderTests::randomScript);}));
|
||||
} else {
|
||||
SearchSourceBuilder.ScriptField script = randomScript();
|
||||
instance.addScriptField(script.fieldName(), script.script());
|
||||
}
|
||||
break;
|
||||
case 8:
|
||||
copy.setFetchSourceContext(randomValueOtherThan(copy.getFetchSourceContext(), () -> {
|
||||
instance.setFetchSourceContext(randomValueOtherThan(instance.getFetchSourceContext(), () -> {
|
||||
FetchSourceContext randomFetchSourceContext;
|
||||
if (randomBoolean()) {
|
||||
randomFetchSourceContext = new FetchSourceContext(randomBoolean());
|
||||
@ -231,21 +320,34 @@ public class InnerHitBuilderTests extends ESTestCase {
|
||||
}));
|
||||
break;
|
||||
case 9:
|
||||
final List<SortBuilder<?>> sortBuilders = randomValueOtherThan(copy.getSorts(), () -> {
|
||||
List<SortBuilder<?>> builders = randomListStuff(16,
|
||||
() -> SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values())));
|
||||
return builders;
|
||||
});
|
||||
copy.setSorts(sortBuilders);
|
||||
if (randomBoolean()) {
|
||||
final List<SortBuilder<?>> sortBuilders = randomValueOtherThan(instance.getSorts(), () -> {
|
||||
List<SortBuilder<?>> builders = randomListStuff(16,
|
||||
() -> SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)).order(randomFrom(SortOrder.values())));
|
||||
return builders;
|
||||
});
|
||||
instance.setSorts(sortBuilders);
|
||||
} else {
|
||||
instance.addSort(SortBuilders.fieldSort(randomAsciiOfLengthBetween(5, 20)));
|
||||
}
|
||||
break;
|
||||
case 10:
|
||||
copy.setHighlightBuilder(randomValueOtherThan(copy.getHighlightBuilder(),
|
||||
instance.setHighlightBuilder(randomValueOtherThan(instance.getHighlightBuilder(),
|
||||
HighlightBuilderTests::randomHighlighterBuilder));
|
||||
break;
|
||||
case 11:
|
||||
if (instance.getFieldNames() == null || randomBoolean()) {
|
||||
instance.setFieldNames(randomValueOtherThan(instance.getFieldNames(), () -> {
|
||||
return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16));
|
||||
}));
|
||||
} else {
|
||||
instance.getFieldNames().add(randomAsciiOfLengthBetween(1, 16));
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("unexpected surprise [" + surprise + "]");
|
||||
}
|
||||
return copy;
|
||||
return instance;
|
||||
}
|
||||
|
||||
static SearchSourceBuilder.ScriptField randomScript() {
|
@ -19,12 +19,11 @@
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
||||
public class MatchNoneQueryBuilderTests extends AbstractQueryTestCase<MatchNoneQueryBuilder> {
|
||||
@ -36,9 +35,7 @@ public class MatchNoneQueryBuilderTests extends AbstractQueryTestCase<MatchNoneQ
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(MatchNoneQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||
assertThat(booleanQuery.clauses().size(), equalTo(0));
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
}
|
||||
|
||||
public void testFromJson() throws IOException {
|
||||
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import java.io.IOException;
|
||||
import static org.hamcrest.CoreMatchers.either;
|
||||
@ -72,7 +73,7 @@ public class MatchPhrasePrefixQueryBuilderTests extends AbstractQueryTestCase<Ma
|
||||
assertThat(query, notNullValue());
|
||||
assertThat(query,
|
||||
either(instanceOf(BooleanQuery.class)).or(instanceOf(MultiPhrasePrefixQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -68,7 +69,7 @@ public class MatchPhraseQueryBuilderTests extends AbstractQueryTestCase<MatchPhr
|
||||
protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, notNullValue());
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)).or(instanceOf(MatchNoDocsQuery.class)));
|
||||
}
|
||||
|
||||
public void testIllegalValues() {
|
||||
|
@ -30,7 +30,7 @@ import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
@ -127,7 +127,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
|
||||
switch (queryBuilder.type()) {
|
||||
case BOOLEAN:
|
||||
assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(ExtendedCommonTermsQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class))
|
||||
.or(instanceOf(TermQuery.class)).or(instanceOf(FuzzyQuery.class)).or(instanceOf(MatchNoDocsQuery.class))
|
||||
.or(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class)));
|
||||
break;
|
||||
case PHRASE:
|
||||
|
@ -27,12 +27,12 @@ import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.FuzzyQuery;
|
||||
import org.apache.lucene.search.LegacyNumericRangeQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.PointRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.lucene.all.AllTermQuery;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||
import org.elasticsearch.index.search.MatchQuery;
|
||||
|
||||
|
@ -21,20 +21,25 @@ package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
@ -66,11 +71,11 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
||||
protected NestedQueryBuilder doCreateTestQueryBuilder() {
|
||||
NestedQueryBuilder nqb = new NestedQueryBuilder("nested1", RandomQueryBuilder.createQuery(random()),
|
||||
RandomPicks.randomFrom(random(), ScoreMode.values()));
|
||||
if (SearchContext.current() != null) {
|
||||
if (randomBoolean()) {
|
||||
nqb.innerHit(new InnerHitBuilder()
|
||||
.setName(randomAsciiOfLengthBetween(1, 10))
|
||||
.setSize(randomIntBetween(0, 100))
|
||||
.addSort(new FieldSortBuilder(STRING_FIELD_NAME).order(SortOrder.ASC)));
|
||||
.addSort(new FieldSortBuilder(INT_FIELD_NAME).order(SortOrder.ASC)));
|
||||
}
|
||||
nqb.ignoreUnmapped(randomBoolean());
|
||||
return nqb;
|
||||
@ -87,17 +92,23 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
||||
//TODO how to assert this?
|
||||
}
|
||||
if (queryBuilder.innerHit() != null) {
|
||||
assertNotNull(SearchContext.current());
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
assertNotNull(searchContext);
|
||||
if (query != null) {
|
||||
assertNotNull(SearchContext.current().innerHits());
|
||||
assertEquals(1, SearchContext.current().innerHits().getInnerHits().size());
|
||||
assertTrue(SearchContext.current().innerHits().getInnerHits().containsKey("inner_hits_name"));
|
||||
InnerHitsContext.BaseInnerHits innerHits = SearchContext.current().innerHits().getInnerHits().get("inner_hits_name");
|
||||
assertEquals(innerHits.size(), 100);
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
|
||||
for (InnerHitBuilder builder : innerHitBuilders.values()) {
|
||||
builder.build(searchContext, searchContext.innerHits());
|
||||
}
|
||||
assertNotNull(searchContext.innerHits());
|
||||
assertEquals(1, searchContext.innerHits().getInnerHits().size());
|
||||
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
|
||||
InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName());
|
||||
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
|
||||
assertEquals(innerHits.sort().getSort().length, 1);
|
||||
assertEquals(innerHits.sort().getSort()[0].getField(), STRING_FIELD_NAME);
|
||||
assertEquals(innerHits.sort().getSort()[0].getField(), INT_FIELD_NAME);
|
||||
} else {
|
||||
assertThat(SearchContext.current().innerHits().getInnerHits().size(), equalTo(0));
|
||||
assertThat(searchContext.innerHits().getInnerHits().size(), equalTo(0));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -163,6 +174,36 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase<NestedQueryBu
|
||||
assertEquals(json, ScoreMode.Avg, parsed.scoreMode());
|
||||
}
|
||||
|
||||
/**
|
||||
* override superclass test, because here we need to take care that mutation doesn't happen inside
|
||||
* `inner_hits` structure, because we don't parse them yet and so no exception will be triggered
|
||||
* for any mutation there.
|
||||
*/
|
||||
@Override
|
||||
public void testUnknownObjectException() throws IOException {
|
||||
String validQuery = createTestQueryBuilder().toString();
|
||||
assertThat(validQuery, containsString("{"));
|
||||
int endPosition = validQuery.indexOf("inner_hits");
|
||||
if (endPosition == -1) {
|
||||
endPosition = validQuery.length() - 1;
|
||||
}
|
||||
for (int insertionPosition = 0; insertionPosition < endPosition; insertionPosition++) {
|
||||
if (validQuery.charAt(insertionPosition) == '{') {
|
||||
String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " +
|
||||
validQuery.substring(insertionPosition) + "}";
|
||||
try {
|
||||
parseQuery(testQuery);
|
||||
fail("some parsing exception expected for query: " + testQuery);
|
||||
} catch (ParsingException | Script.ScriptParseException | ElasticsearchParseException e) {
|
||||
// different kinds of exception wordings depending on location
|
||||
// of mutation, so no simple asserts possible here
|
||||
} catch (JsonParseException e) {
|
||||
// mutation produced invalid json
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testIgnoreUnmapped() throws IOException {
|
||||
final NestedQueryBuilder queryBuilder = new NestedQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
|
||||
queryBuilder.ignoreUnmapped(true);
|
||||
|
@ -31,6 +31,7 @@ import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.lucene.search.MatchNoDocsQuery;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
@ -93,41 +94,51 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
||||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||
|
||||
// we only do the check below for string fields (otherwise we'd have to decode the values)
|
||||
if (queryBuilder.fieldName().equals(INT_FIELD_NAME) || queryBuilder.fieldName().equals(DOUBLE_FIELD_NAME)
|
||||
|| queryBuilder.fieldName().equals(BOOLEAN_FIELD_NAME) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// expected returned terms depending on whether we have a terms query or a terms lookup query
|
||||
List<Object> terms;
|
||||
if (queryBuilder.termsLookup() != null) {
|
||||
terms = randomTerms;
|
||||
if (queryBuilder.termsLookup() == null && (queryBuilder.values() == null || queryBuilder.values().isEmpty())) {
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
|
||||
assertThat(matchNoDocsQuery.toString(), containsString("No terms supplied for \"terms\" query."));
|
||||
} else if (queryBuilder.termsLookup() != null && randomTerms.size() == 0){
|
||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||
MatchNoDocsQuery matchNoDocsQuery = (MatchNoDocsQuery) query;
|
||||
assertThat(matchNoDocsQuery.toString(), containsString("No terms supplied for \"terms\" query."));
|
||||
} else {
|
||||
terms = queryBuilder.values();
|
||||
}
|
||||
assertThat(query, instanceOf(BooleanQuery.class));
|
||||
BooleanQuery booleanQuery = (BooleanQuery) query;
|
||||
|
||||
// compare whether we have the expected list of terms returned
|
||||
final List<Term> booleanTerms = new ArrayList<>();
|
||||
for (BooleanClause booleanClause : booleanQuery) {
|
||||
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
|
||||
assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class));
|
||||
Term term = ((TermQuery) booleanClause.getQuery()).getTerm();
|
||||
booleanTerms.add(term);
|
||||
}
|
||||
CollectionUtil.timSort(booleanTerms);
|
||||
List<Term> expectedTerms = new ArrayList<>();
|
||||
for (Object term : terms) {
|
||||
if (term != null) { // terms lookup filters this out
|
||||
expectedTerms.add(new Term(queryBuilder.fieldName(), term.toString()));
|
||||
// we only do the check below for string fields (otherwise we'd have to decode the values)
|
||||
if (queryBuilder.fieldName().equals(INT_FIELD_NAME) || queryBuilder.fieldName().equals(DOUBLE_FIELD_NAME)
|
||||
|| queryBuilder.fieldName().equals(BOOLEAN_FIELD_NAME) || queryBuilder.fieldName().equals(DATE_FIELD_NAME)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// expected returned terms depending on whether we have a terms query or a terms lookup query
|
||||
List<Object> terms;
|
||||
if (queryBuilder.termsLookup() != null) {
|
||||
terms = randomTerms;
|
||||
} else {
|
||||
terms = queryBuilder.values();
|
||||
}
|
||||
|
||||
// compare whether we have the expected list of terms returned
|
||||
final List<Term> booleanTerms = new ArrayList<>();
|
||||
for (BooleanClause booleanClause : booleanQuery) {
|
||||
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
|
||||
assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class));
|
||||
Term term = ((TermQuery) booleanClause.getQuery()).getTerm();
|
||||
booleanTerms.add(term);
|
||||
}
|
||||
CollectionUtil.timSort(booleanTerms);
|
||||
List<Term> expectedTerms = new ArrayList<>();
|
||||
for (Object term : terms) {
|
||||
if (term != null) { // terms lookup filters this out
|
||||
expectedTerms.add(new Term(queryBuilder.fieldName(), term.toString()));
|
||||
}
|
||||
}
|
||||
CollectionUtil.timSort(expectedTerms);
|
||||
assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms.size(), booleanTerms.size());
|
||||
assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms, booleanTerms);
|
||||
}
|
||||
CollectionUtil.timSort(expectedTerms);
|
||||
assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms.size(), booleanTerms.size());
|
||||
assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms, booleanTerms);
|
||||
}
|
||||
|
||||
public void testEmtpyFieldName() {
|
||||
|
@ -1,140 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query.support;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
|
||||
public class InnerHitsBuilderTests extends ESTestCase {
|
||||
|
||||
private static final int NUMBER_OF_TESTBUILDERS = 20;
|
||||
private static NamedWriteableRegistry namedWriteableRegistry;
|
||||
private static IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
namedWriteableRegistry = null;
|
||||
indicesQueriesRegistry = null;
|
||||
}
|
||||
|
||||
public void testSerialization() throws Exception {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
InnerHitsBuilder original = randomInnerHits();
|
||||
InnerHitsBuilder deserialized = serializedCopy(original);
|
||||
assertEquals(deserialized, original);
|
||||
assertEquals(deserialized.hashCode(), original.hashCode());
|
||||
assertNotSame(deserialized, original);
|
||||
}
|
||||
}
|
||||
|
||||
public void testFromAndToXContent() throws Exception {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
InnerHitsBuilder innerHits = randomInnerHits();
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
|
||||
if (randomBoolean()) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
innerHits.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
|
||||
XContentParser parser = XContentHelper.createParser(builder.bytes());
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY);
|
||||
parser.nextToken();
|
||||
InnerHitsBuilder secondInnerHits = InnerHitsBuilder.fromXContent(context);
|
||||
assertThat(innerHits, not(sameInstance(secondInnerHits)));
|
||||
assertThat(innerHits, equalTo(secondInnerHits));
|
||||
assertThat(innerHits.hashCode(), equalTo(secondInnerHits.hashCode()));
|
||||
}
|
||||
}
|
||||
|
||||
public void testEqualsAndHashcode() throws IOException {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
InnerHitsBuilder firstInnerHits = randomInnerHits();
|
||||
assertFalse("inner hit is equal to null", firstInnerHits.equals(null));
|
||||
assertFalse("inner hit is equal to incompatible type", firstInnerHits.equals(""));
|
||||
assertTrue("inner it is not equal to self", firstInnerHits.equals(firstInnerHits));
|
||||
assertThat("same inner hit's hashcode returns different values if called multiple times", firstInnerHits.hashCode(),
|
||||
equalTo(firstInnerHits.hashCode()));
|
||||
|
||||
InnerHitsBuilder secondBuilder = serializedCopy(firstInnerHits);
|
||||
assertTrue("inner hit is not equal to self", secondBuilder.equals(secondBuilder));
|
||||
assertTrue("inner hit is not equal to its copy", firstInnerHits.equals(secondBuilder));
|
||||
assertTrue("equals is not symmetric", secondBuilder.equals(firstInnerHits));
|
||||
assertThat("inner hits copy's hashcode is different from original hashcode", secondBuilder.hashCode(),
|
||||
equalTo(firstInnerHits.hashCode()));
|
||||
|
||||
InnerHitsBuilder thirdBuilder = serializedCopy(secondBuilder);
|
||||
assertTrue("inner hit is not equal to self", thirdBuilder.equals(thirdBuilder));
|
||||
assertTrue("inner hit is not equal to its copy", secondBuilder.equals(thirdBuilder));
|
||||
assertThat("inner hit copy's hashcode is different from original hashcode", secondBuilder.hashCode(),
|
||||
equalTo(thirdBuilder.hashCode()));
|
||||
assertTrue("equals is not transitive", firstInnerHits.equals(thirdBuilder));
|
||||
assertThat("inner hit copy's hashcode is different from original hashcode", firstInnerHits.hashCode(),
|
||||
equalTo(thirdBuilder.hashCode()));
|
||||
assertTrue("equals is not symmetric", thirdBuilder.equals(secondBuilder));
|
||||
assertTrue("equals is not symmetric", thirdBuilder.equals(firstInnerHits));
|
||||
}
|
||||
}
|
||||
|
||||
public static InnerHitsBuilder randomInnerHits() {
|
||||
InnerHitsBuilder innerHits = new InnerHitsBuilder();
|
||||
int numInnerHits = randomIntBetween(0, 12);
|
||||
for (int i = 0; i < numInnerHits; i++) {
|
||||
innerHits.addInnerHit(randomAsciiOfLength(5), InnerHitBuilderTests.randomInnerHits());
|
||||
}
|
||||
return innerHits;
|
||||
}
|
||||
|
||||
private static InnerHitsBuilder serializedCopy(InnerHitsBuilder original) throws IOException {
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
original.writeTo(output);
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
|
||||
return new InnerHitsBuilder(in);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -21,8 +21,10 @@ package org.elasticsearch.indices;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.AliasAction;
|
||||
import org.elasticsearch.cluster.metadata.IndexGraveyard;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
@ -283,6 +285,36 @@ public class IndicesServiceTests extends ESSingleNodeTestCase {
|
||||
indicesService.deleteIndex(test.index(), "finished with test");
|
||||
}
|
||||
|
||||
/**
|
||||
* This test checks an edge case where, if a node had an index (lets call it A with UUID 1), then
|
||||
* deleted it (so a tombstone entry for A will exist in the cluster state), then created
|
||||
* a new index A with UUID 2, then shutdown, when the node comes back online, it will look at the
|
||||
* tombstones for deletions, and it should proceed with trying to delete A with UUID 1 and not
|
||||
* throw any errors that the index still exists in the cluster state. This is a case of ensuring
|
||||
* that tombstones that have the same name as current valid indices don't cause confusion by
|
||||
* trying to delete an index that exists.
|
||||
* See https://github.com/elastic/elasticsearch/issues/18054
|
||||
*/
|
||||
public void testIndexAndTombstoneWithSameNameOnStartup() throws Exception {
|
||||
final String indexName = "test";
|
||||
final Index index = new Index(indexName, UUIDs.randomBase64UUID());
|
||||
final IndicesService indicesService = getIndicesService();
|
||||
final Settings idxSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||
.put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())
|
||||
.build();
|
||||
final IndexMetaData indexMetaData = new IndexMetaData.Builder(index.getName())
|
||||
.settings(idxSettings)
|
||||
.numberOfShards(1)
|
||||
.numberOfReplicas(0)
|
||||
.build();
|
||||
final Index tombstonedIndex = new Index(indexName, UUIDs.randomBase64UUID());
|
||||
final IndexGraveyard graveyard = IndexGraveyard.builder().addTombstone(tombstonedIndex).build();
|
||||
final MetaData metaData = MetaData.builder().put(indexMetaData, true).indexGraveyard(graveyard).build();
|
||||
final ClusterState clusterState = new ClusterState.Builder(new ClusterName("testCluster")).metaData(metaData).build();
|
||||
// if all goes well, this won't throw an exception, otherwise, it will throw an IllegalStateException
|
||||
indicesService.verifyIndexIsDeleted(tombstonedIndex, clusterState);
|
||||
}
|
||||
|
||||
private static class DanglingListener implements LocalAllocateDangledIndices.Listener {
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
|
||||
|
@ -34,7 +34,6 @@ import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.sameInstance;
|
||||
import static org.mockito.Matchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
|
||||
@ -45,6 +44,8 @@ public class ConfigurationUtilsTests extends ESTestCase {
|
||||
public void setConfig() {
|
||||
config = new HashMap<>();
|
||||
config.put("foo", "bar");
|
||||
config.put("boolVal", true);
|
||||
config.put("null", null);
|
||||
config.put("arr", Arrays.asList("1", "2", "3"));
|
||||
List<Integer> list = new ArrayList<>();
|
||||
list.add(2);
|
||||
@ -68,6 +69,24 @@ public class ConfigurationUtilsTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testReadBooleanProperty() {
|
||||
Boolean val = ConfigurationUtils.readBooleanProperty(null, null, config, "boolVal", false);
|
||||
assertThat(val, equalTo(true));
|
||||
}
|
||||
|
||||
public void testReadNullBooleanProperty() {
|
||||
Boolean val = ConfigurationUtils.readBooleanProperty(null, null, config, "null", false);
|
||||
assertThat(val, equalTo(false));
|
||||
}
|
||||
|
||||
public void testReadBooleanPropertyInvalidType() {
|
||||
try {
|
||||
ConfigurationUtils.readBooleanProperty(null, null, config, "arr", true);
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), equalTo("[arr] property isn't a boolean, but of type [java.util.Arrays$ArrayList]"));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(talevy): Issue with generics. This test should fail, "int" is of type List<Integer>
|
||||
public void testOptional_InvalidType() {
|
||||
List<String> val = ConfigurationUtils.readList(null, null, config, "int");
|
||||
|
@ -199,7 +199,7 @@ public class IngestDocumentTests extends ESTestCase {
|
||||
|
||||
public void testGetFieldValueNull() {
|
||||
try {
|
||||
ingestDocument.getFieldValue(null, String.class);
|
||||
ingestDocument.getFieldValue((String) null, String.class);
|
||||
fail("get field value should have failed");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
|
||||
@ -263,7 +263,7 @@ public class IngestDocumentTests extends ESTestCase {
|
||||
|
||||
public void testHasFieldNull() {
|
||||
try {
|
||||
ingestDocument.hasField(null);
|
||||
ingestDocument.hasField((String) null);
|
||||
fail("has field should have failed");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("path cannot be null nor empty"));
|
||||
|
@ -0,0 +1,99 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.ingest.processor;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class DateIndexNameFactoryTests extends ESTestCase {
|
||||
|
||||
public void testDefaults() throws Exception {
|
||||
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("date_rounding", "y");
|
||||
|
||||
DateIndexNameProcessor processor = factory.create(config);
|
||||
assertThat(processor.getDateFormats().size(), Matchers.equalTo(1));
|
||||
assertThat(processor.getField(), Matchers.equalTo("_field"));
|
||||
assertThat(processor.getIndexNamePrefix(), Matchers.equalTo(""));
|
||||
assertThat(processor.getDateRounding(), Matchers.equalTo("y"));
|
||||
assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyy-MM-dd"));
|
||||
assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC));
|
||||
}
|
||||
|
||||
public void testSpecifyOptionalSettings() throws Exception {
|
||||
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("index_name_prefix", "_prefix");
|
||||
config.put("date_rounding", "y");
|
||||
config.put("date_formats", Arrays.asList("UNIX", "UNIX_MS"));
|
||||
|
||||
DateIndexNameProcessor processor = factory.create(config);
|
||||
assertThat(processor.getDateFormats().size(), Matchers.equalTo(2));
|
||||
|
||||
config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("index_name_prefix", "_prefix");
|
||||
config.put("date_rounding", "y");
|
||||
config.put("index_name_format", "yyyyMMdd");
|
||||
|
||||
processor = factory.create(config);
|
||||
assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd"));
|
||||
|
||||
config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("index_name_prefix", "_prefix");
|
||||
config.put("date_rounding", "y");
|
||||
config.put("timezone", "+02:00");
|
||||
|
||||
processor = factory.create(config);
|
||||
assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.forOffsetHours(2)));
|
||||
|
||||
config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("index_name_prefix", "_prefix");
|
||||
config.put("date_rounding", "y");
|
||||
|
||||
processor = factory.create(config);
|
||||
assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix"));
|
||||
}
|
||||
|
||||
public void testRequiredFields() throws Exception {
|
||||
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("date_rounding", "y");
|
||||
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config));
|
||||
assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing"));
|
||||
|
||||
config.clear();
|
||||
config.put("field", "_field");
|
||||
e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config));
|
||||
assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.ingest.processor;
|
||||
|
||||
import org.elasticsearch.ingest.core.IngestDocument;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
|
||||
public class DateIndexNameProcessorTests extends ESTestCase {
|
||||
|
||||
public void testJodaPattern() throws Exception {
|
||||
Function<String, DateTime> function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT);
|
||||
DateIndexNameProcessor processor = new DateIndexNameProcessor(
|
||||
"_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC,
|
||||
"events-", "y", "yyyyMMdd"
|
||||
);
|
||||
|
||||
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
|
||||
Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z"));
|
||||
processor.execute(document);
|
||||
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{20160425||/y{yyyyMMdd|UTC}}>"));
|
||||
}
|
||||
|
||||
public void testTAI64N()throws Exception {
|
||||
Function<String, DateTime> function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null);
|
||||
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
|
||||
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
|
||||
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
|
||||
Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"));
|
||||
dateProcessor.execute(document);
|
||||
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{20121222||/m{yyyyMMdd|UTC}}>"));
|
||||
}
|
||||
|
||||
public void testUnixMs()throws Exception {
|
||||
Function<String, DateTime> function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null);
|
||||
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
|
||||
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
|
||||
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
|
||||
Collections.singletonMap("_field", "1000500"));
|
||||
dateProcessor.execute(document);
|
||||
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
|
||||
}
|
||||
|
||||
public void testUnix()throws Exception {
|
||||
Function<String, DateTime> function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
|
||||
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
|
||||
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
|
||||
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
|
||||
Collections.singletonMap("_field", "1000.5"));
|
||||
dateProcessor.execute(document);
|
||||
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
|
||||
}
|
||||
|
||||
}
|
@ -22,7 +22,6 @@ package org.elasticsearch.ingest.processor;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.ingest.TestTemplateService;
|
||||
import org.elasticsearch.ingest.core.AbstractProcessorFactory;
|
||||
import org.elasticsearch.ingest.core.Processor;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
@ -51,6 +50,22 @@ public class SetProcessorFactoryTests extends ESTestCase {
|
||||
assertThat(setProcessor.getTag(), equalTo(processorTag));
|
||||
assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1"));
|
||||
assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1"));
|
||||
assertThat(setProcessor.isOverrideEnabled(), equalTo(true));
|
||||
}
|
||||
|
||||
public void testCreateWithOverride() throws Exception {
|
||||
boolean overrideEnabled = randomBoolean();
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "field1");
|
||||
config.put("value", "value1");
|
||||
config.put("override", overrideEnabled);
|
||||
String processorTag = randomAsciiOfLength(10);
|
||||
config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
|
||||
SetProcessor setProcessor = factory.create(config);
|
||||
assertThat(setProcessor.getTag(), equalTo(processorTag));
|
||||
assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1"));
|
||||
assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1"));
|
||||
assertThat(setProcessor.isOverrideEnabled(), equalTo(overrideEnabled));
|
||||
}
|
||||
|
||||
public void testCreateNoFieldPresent() throws Exception {
|
||||
|
@ -38,7 +38,7 @@ public class SetProcessorTests extends ESTestCase {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument);
|
||||
Object fieldValue = RandomDocumentPicks.randomFieldValue(random());
|
||||
Processor processor = createSetProcessor(fieldName, fieldValue);
|
||||
Processor processor = createSetProcessor(fieldName, fieldValue, true);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.hasField(fieldName), equalTo(true));
|
||||
assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue));
|
||||
@ -50,7 +50,7 @@ public class SetProcessorTests extends ESTestCase {
|
||||
IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
|
||||
Object fieldValue = RandomDocumentPicks.randomFieldValue(random());
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), testIngestDocument, fieldValue);
|
||||
Processor processor = createSetProcessor(fieldName, fieldValue);
|
||||
Processor processor = createSetProcessor(fieldName, fieldValue, true);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.hasField(fieldName), equalTo(true));
|
||||
assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue));
|
||||
@ -59,7 +59,7 @@ public class SetProcessorTests extends ESTestCase {
|
||||
public void testSetFieldsTypeMismatch() throws Exception {
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
|
||||
ingestDocument.setFieldValue("field", "value");
|
||||
Processor processor = createSetProcessor("field.inner", "value");
|
||||
Processor processor = createSetProcessor("field.inner", "value", true);
|
||||
try {
|
||||
processor.execute(ingestDocument);
|
||||
fail("processor execute should have failed");
|
||||
@ -68,16 +68,47 @@ public class SetProcessorTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public void testSetNewFieldWithOverrideDisabled() throws Exception {
|
||||
IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
|
||||
String fieldName = RandomDocumentPicks.randomFieldName(random());
|
||||
Object fieldValue = RandomDocumentPicks.randomFieldValue(random());
|
||||
Processor processor = createSetProcessor(fieldName, fieldValue, false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.hasField(fieldName), equalTo(true));
|
||||
assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue));
|
||||
}
|
||||
|
||||
public void testSetExistingFieldWithOverrideDisabled() throws Exception {
|
||||
IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
|
||||
Object fieldValue = "foo";
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
|
||||
Processor processor = createSetProcessor(fieldName, "bar", false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.hasField(fieldName), equalTo(true));
|
||||
assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(fieldValue));
|
||||
}
|
||||
|
||||
public void testSetExistingNullFieldWithOverrideDisabled() throws Exception {
|
||||
IngestDocument ingestDocument = new IngestDocument(new HashMap<>(), new HashMap<>());
|
||||
Object fieldValue = null;
|
||||
Object newValue = "bar";
|
||||
String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, fieldValue);
|
||||
Processor processor = createSetProcessor(fieldName, newValue, false);
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.hasField(fieldName), equalTo(true));
|
||||
assertThat(ingestDocument.getFieldValue(fieldName, Object.class), equalTo(newValue));
|
||||
}
|
||||
|
||||
public void testSetMetadata() throws Exception {
|
||||
IngestDocument.MetaData randomMetaData = randomFrom(IngestDocument.MetaData.values());
|
||||
Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value");
|
||||
Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value", true);
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
processor.execute(ingestDocument);
|
||||
assertThat(ingestDocument.getFieldValue(randomMetaData.getFieldName(), String.class), Matchers.equalTo("_value"));
|
||||
}
|
||||
|
||||
private static Processor createSetProcessor(String fieldName, Object fieldValue) {
|
||||
private static Processor createSetProcessor(String fieldName, Object fieldValue, boolean overrideEnabled) {
|
||||
TemplateService templateService = TestTemplateService.instance();
|
||||
return new SetProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService));
|
||||
return new SetProcessor(randomAsciiOfLength(10), templateService.compile(fieldName), ValueSource.wrap(fieldValue, templateService), overrideEnabled);
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.query.functionscore.WeightBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
@ -1827,35 +1827,6 @@ public class PercolatorIT extends ESIntegTestCase {
|
||||
assertThat(response1.getMatches()[0].getId().string(), equalTo("1"));
|
||||
}
|
||||
|
||||
public void testFailNicelyWithInnerHits() throws Exception {
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject()
|
||||
.startObject("mapping")
|
||||
.startObject("properties")
|
||||
.startObject("nested")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
|
||||
assertAcked(prepareCreate(INDEX_NAME)
|
||||
.addMapping(TYPE_NAME, "query", "type=percolator")
|
||||
.addMapping("mapping", mapping));
|
||||
try {
|
||||
client().prepareIndex(INDEX_NAME, TYPE_NAME, "1")
|
||||
.setSource(jsonBuilder().startObject().field("query", nestedQuery("nested", matchQuery("nested.name", "value"), ScoreMode.Avg).innerHit(new InnerHitBuilder())).endObject())
|
||||
.execute().actionGet();
|
||||
fail("Expected a parse error, because inner_hits isn't supported in the percolate api");
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getCause(), instanceOf(QueryShardException.class));
|
||||
assertThat(e.getCause().getMessage(), containsString("inner_hits unsupported"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testParentChild() throws Exception {
|
||||
// We don't fail p/c queries, but those queries are unusable because only a single document can be provided in
|
||||
// the percolate api
|
||||
|
@ -52,8 +52,6 @@ import org.elasticsearch.index.query.AbstractQueryTestCase;
|
||||
import org.elasticsearch.index.query.EmptyQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilderTests;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
@ -410,14 +408,6 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
||||
if (randomBoolean()) {
|
||||
builder.suggest(SuggestBuilderTests.randomSuggestBuilder());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
int num = randomIntBetween(0, 3);
|
||||
for (int i = 0; i < num; i++) {
|
||||
innerHitsBuilder.addInnerHit(randomAsciiOfLengthBetween(5, 20), InnerHitBuilderTests.randomInnerHits());
|
||||
}
|
||||
builder.innerHits(innerHitsBuilder);
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
int numRescores = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < numRescores; i++) {
|
||||
|
@ -22,14 +22,11 @@ package org.elasticsearch.search.innerhits;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.support.InnerHitsBuilder;
|
||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
@ -68,8 +65,6 @@ import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class InnerHitsIT extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
@ -112,105 +107,62 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder()
|
||||
.setNestedPath("comments")
|
||||
.setQuery(matchQuery("comments.message", "fox"))
|
||||
);
|
||||
// Inner hits can be defined in two ways: 1) with the query 2) as separate inner_hit definition
|
||||
SearchRequest[] searchRequests = new SearchRequest[]{
|
||||
client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(
|
||||
new InnerHitBuilder().setName("comment"))).request(),
|
||||
client().prepareSearch("articles").setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg))
|
||||
.innerHits(innerHitsBuilder).request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(2L));
|
||||
assertThat(innerHits.getHits().length, equalTo(2));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1));
|
||||
}
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setName("comment"))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(2L));
|
||||
assertThat(innerHits.getHits().length, equalTo(2));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1));
|
||||
|
||||
innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder()
|
||||
.setQuery(matchQuery("comments.message", "elephant")).setNestedPath("comments")
|
||||
);
|
||||
// Inner hits can be defined in two ways: 1) with the query 2) as
|
||||
// separate inner_hit definition
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg))
|
||||
.innerHits(innerHitsBuilder).request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("comment"))).request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("comment").addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)))).request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("2"));
|
||||
assertThat(response.getHits().getAt(0).getShard(), notNullValue());
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(3L));
|
||||
assertThat(innerHits.getHits().length, equalTo(3));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1));
|
||||
assertThat(innerHits.getAt(2).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2));
|
||||
}
|
||||
InnerHitBuilder innerHit = new InnerHitBuilder();
|
||||
innerHit.setNestedPath("comments");
|
||||
innerHit.setQuery(matchQuery("comments.message", "fox"));
|
||||
innerHit.setHighlightBuilder(new HighlightBuilder().field("comments.message"));
|
||||
innerHit.setExplain(true);
|
||||
innerHit.addFieldDataField("comments.message");
|
||||
innerHit.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()));
|
||||
innerHit.setSize(1);
|
||||
innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comments", innerHit);
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg))
|
||||
.innerHits(innerHitsBuilder).request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(
|
||||
new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message"))
|
||||
.setExplain(true)
|
||||
.addFieldDataField("comments.message")
|
||||
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
.setSize(1)
|
||||
)).request()
|
||||
};
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setName("comment"))
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("2"));
|
||||
assertThat(response.getHits().getAt(0).getShard(), notNullValue());
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(3L));
|
||||
assertThat(innerHits.getHits().length, equalTo(3));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1));
|
||||
assertThat(innerHits.getAt(2).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2));
|
||||
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
assertNoFailures(response);
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
|
||||
assertThat(innerHits.getTotalHits(), equalTo(2L));
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
|
||||
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(comments.message:fox in"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
|
||||
}
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(
|
||||
new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message"))
|
||||
.setExplain(true)
|
||||
.addFieldDataField("comments.message")
|
||||
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
.setSize(1)
|
||||
)).get();
|
||||
assertNoFailures(response);
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
|
||||
assertThat(innerHits.getTotalHits(), equalTo(2L));
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
|
||||
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(comments.message:fox in"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
|
||||
}
|
||||
|
||||
public void testRandomNested() throws Exception {
|
||||
@ -237,38 +189,16 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requestBuilders);
|
||||
|
||||
int size = randomIntBetween(0, numDocs);
|
||||
SearchResponse searchResponse;
|
||||
if (randomBoolean()) {
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("a", new InnerHitBuilder().setNestedPath("field1")
|
||||
// Sort order is DESC, because we reverse the inner objects during indexing!
|
||||
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)).setSize(size));
|
||||
innerHitsBuilder.addInnerHit("b", new InnerHitBuilder().setNestedPath("field2")
|
||||
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)).setSize(size));
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.innerHits(innerHitsBuilder)
|
||||
.get();
|
||||
} else {
|
||||
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
|
||||
if (randomBoolean()) {
|
||||
boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("a").setSize(size)
|
||||
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC))));
|
||||
boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("b")
|
||||
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)).setSize(size)));
|
||||
} else {
|
||||
boolQuery.should(constantScoreQuery(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("a")
|
||||
.setSize(size).addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)))));
|
||||
boolQuery.should(constantScoreQuery(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("b")
|
||||
.setSize(size).addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)))));
|
||||
}
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(boolQuery)
|
||||
.setSize(numDocs)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
}
|
||||
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
|
||||
boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("a").setSize(size)
|
||||
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC))));
|
||||
boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder().setName("b")
|
||||
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)).setSize(size)));
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setQuery(boolQuery)
|
||||
.setSize(numDocs)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, numDocs);
|
||||
@ -313,102 +243,59 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
requests.add(client().prepareIndex("articles", "comment", "6").setParent("2").setSource("message", "elephant scared by mice x y"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder().setParentChildType("comment")
|
||||
.setQuery(matchQuery("message", "fox")));
|
||||
SearchRequest[] searchRequests = new SearchRequest[]{
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder().setName("comment")))
|
||||
.request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("1"));
|
||||
assertThat(response.getHits().getAt(0).getShard(), notNullValue());
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("1"));
|
||||
assertThat(response.getHits().getAt(0).getShard(), notNullValue());
|
||||
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(2L));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(2L));
|
||||
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(1).type(), equalTo("comment"));
|
||||
}
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("2"));
|
||||
assertThat(innerHits.getAt(1).type(), equalTo("comment"));
|
||||
|
||||
innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder().setParentChildType("comment")
|
||||
.setQuery(matchQuery("message", "elephant")));
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.request(),
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.request()
|
||||
};
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("2"));
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("2"));
|
||||
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(3L));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.totalHits(), equalTo(3L));
|
||||
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
|
||||
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("5"));
|
||||
assertThat(innerHits.getAt(1).type(), equalTo("comment"));
|
||||
assertThat(innerHits.getAt(2).getId(), equalTo("6"));
|
||||
assertThat(innerHits.getAt(2).type(), equalTo("comment"));
|
||||
}
|
||||
InnerHitBuilder innerHit = new InnerHitBuilder();
|
||||
innerHit.setQuery(matchQuery("message", "fox"));
|
||||
innerHit.setParentChildType("comment");
|
||||
innerHit.setHighlightBuilder(new HighlightBuilder().field("message"));
|
||||
innerHit.setExplain(true);
|
||||
innerHit.addFieldDataField("message");
|
||||
innerHit.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()));
|
||||
innerHit.setSize(1);
|
||||
innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", innerHit);
|
||||
searchRequests = new SearchRequest[] {
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.request(),
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
|
||||
assertThat(innerHits.getAt(0).type(), equalTo("comment"));
|
||||
assertThat(innerHits.getAt(1).getId(), equalTo("5"));
|
||||
assertThat(innerHits.getAt(1).type(), equalTo("comment"));
|
||||
assertThat(innerHits.getAt(2).getId(), equalTo("6"));
|
||||
assertThat(innerHits.getAt(2).type(), equalTo("comment"));
|
||||
|
||||
client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder()
|
||||
.addFieldDataField("message")
|
||||
.setHighlightBuilder(new HighlightBuilder().field("message"))
|
||||
.setExplain(true).setSize(1)
|
||||
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE,
|
||||
MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
)
|
||||
).request() };
|
||||
|
||||
for (SearchRequest searchRequest : searchRequests) {
|
||||
SearchResponse response = client().search(searchRequest).actionGet();
|
||||
assertNoFailures(response);
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
|
||||
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(message:fox"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
|
||||
}
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(
|
||||
hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(
|
||||
new InnerHitBuilder()
|
||||
.addFieldDataField("message")
|
||||
.setHighlightBuilder(new HighlightBuilder().field("message"))
|
||||
.setExplain(true).setSize(1)
|
||||
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE,
|
||||
MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
)
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
|
||||
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(message:fox"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
|
||||
}
|
||||
|
||||
public void testRandomParentChild() throws Exception {
|
||||
@ -442,33 +329,17 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requestBuilders);
|
||||
|
||||
int size = randomIntBetween(0, numDocs);
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("a", new InnerHitBuilder().setParentChildType("child1").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size));
|
||||
innerHitsBuilder.addInnerHit("b", new InnerHitBuilder().setParentChildType("child2").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size));
|
||||
SearchResponse searchResponse;
|
||||
if (randomBoolean()) {
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
.setTypes("parent")
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.innerHits(innerHitsBuilder)
|
||||
.get();
|
||||
} else {
|
||||
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
|
||||
if (randomBoolean()) {
|
||||
boolQuery.should(hasChildQuery("child1", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("a").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size)));
|
||||
boolQuery.should(hasChildQuery("child2", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("b").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size)));
|
||||
} else {
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("a").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size))));
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("b").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size))));
|
||||
}
|
||||
searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
.setTypes("parent")
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.setQuery(boolQuery)
|
||||
.get();
|
||||
}
|
||||
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery(), ScoreMode.None)
|
||||
.innerHit(new InnerHitBuilder().setName("a").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size))));
|
||||
boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery(), ScoreMode.None)
|
||||
.innerHit(new InnerHitBuilder().setName("b").addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size))));
|
||||
SearchResponse searchResponse = client().prepareSearch("idx")
|
||||
.setSize(numDocs)
|
||||
.setTypes("parent")
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.setQuery(boolQuery)
|
||||
.get();
|
||||
|
||||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, numDocs);
|
||||
@ -560,19 +431,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
requests.add(client().prepareIndex("articles", "remark", "2").setParent("2").setRouting("2").setSource("message", "bad"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder innerInnerHitsBuilder = new InnerHitsBuilder();
|
||||
innerInnerHitsBuilder.addInnerHit("remark", new InnerHitBuilder()
|
||||
.setParentChildType("remark")
|
||||
.setQuery(matchQuery("message", "good"))
|
||||
);
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder()
|
||||
.setParentChildType("comment")
|
||||
.setQuery(hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None))
|
||||
.setInnerHitsBuilder(innerInnerHitsBuilder));
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None), ScoreMode.None))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.setQuery(hasChildQuery("comment",
|
||||
hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
@ -590,18 +452,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
|
||||
assertThat(innerHits.getAt(0).type(), equalTo("remark"));
|
||||
|
||||
innerInnerHitsBuilder = new InnerHitsBuilder();
|
||||
innerInnerHitsBuilder.addInnerHit("remark", new InnerHitBuilder()
|
||||
.setParentChildType("remark")
|
||||
.setQuery(matchQuery("message", "bad")));
|
||||
innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder()
|
||||
.setParentChildType("comment")
|
||||
.setQuery(hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None))
|
||||
.setInnerHitsBuilder(innerInnerHitsBuilder));
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(hasChildQuery("comment", hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None), ScoreMode.None))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.setQuery(hasChildQuery("comment",
|
||||
hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder()),
|
||||
ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
|
||||
assertNoFailures(response);
|
||||
@ -662,24 +516,18 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder innerInnerHitsBuilder = new InnerHitsBuilder();
|
||||
innerInnerHitsBuilder.addInnerHit("remark", new InnerHitBuilder()
|
||||
.setNestedPath("comments.remarks")
|
||||
.setQuery(matchQuery("comments.remarks.message", "good")));
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder()
|
||||
.setNestedPath("comments")
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg))
|
||||
.setInnerHitsBuilder(innerInnerHitsBuilder)
|
||||
);
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg), ScoreMode.Avg))
|
||||
.innerHits(innerHitsBuilder).get();
|
||||
.setQuery(
|
||||
nestedQuery("comments",
|
||||
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setName("remark")),
|
||||
ScoreMode.Avg).innerHit(new InnerHitBuilder())
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
|
||||
assertThat(innerHits.totalHits(), equalTo(1L));
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("1"));
|
||||
@ -711,24 +559,18 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks"));
|
||||
assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0));
|
||||
|
||||
innerInnerHitsBuilder = new InnerHitsBuilder();
|
||||
innerInnerHitsBuilder.addInnerHit("remark", new InnerHitBuilder()
|
||||
.setNestedPath("comments.remarks")
|
||||
.setQuery(matchQuery("comments.remarks.message", "bad")));
|
||||
innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("comment", new InnerHitBuilder()
|
||||
.setNestedPath("comments")
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg))
|
||||
.setInnerHitsBuilder(innerInnerHitsBuilder));
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments", nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg), ScoreMode.Avg))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.get();
|
||||
.setQuery(
|
||||
nestedQuery("comments",
|
||||
nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setName("remark")),
|
||||
ScoreMode.Avg).innerHit(new InnerHitBuilder())
|
||||
).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("2"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
|
||||
assertThat(innerHits.totalHits(), equalTo(1L));
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getId(), equalTo("2"));
|
||||
@ -863,22 +705,21 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
requests.add(client().prepareIndex("royals", "baron", "baron4").setParent("earl4").setRouting("king").setSource("{}"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder innerInnerHitsBuilder = new InnerHitsBuilder();
|
||||
innerInnerHitsBuilder.addInnerHit("barons", new InnerHitBuilder().setParentChildType("baron"));
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("earls", new InnerHitBuilder()
|
||||
.setParentChildType("earl")
|
||||
.addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC))
|
||||
.setSize(4)
|
||||
.setInnerHitsBuilder(innerInnerHitsBuilder)
|
||||
);
|
||||
innerInnerHitsBuilder = new InnerHitsBuilder();
|
||||
innerInnerHitsBuilder.addInnerHit("kings", new InnerHitBuilder().setParentChildType("king"));
|
||||
innerHitsBuilder.addInnerHit("princes", new InnerHitBuilder().setParentChildType("prince")
|
||||
.setInnerHitsBuilder(innerInnerHitsBuilder));
|
||||
SearchResponse response = client().prepareSearch("royals")
|
||||
.setTypes("duke")
|
||||
.innerHits(innerHitsBuilder)
|
||||
.setQuery(boolQuery()
|
||||
.filter(hasParentQuery("prince",
|
||||
hasParentQuery("king", matchAllQuery(), false).innerHit(new InnerHitBuilder().setName("kings")),
|
||||
false).innerHit(new InnerHitBuilder().setName("princes"))
|
||||
)
|
||||
.filter(hasChildQuery("earl",
|
||||
hasChildQuery("baron", matchAllQuery(), ScoreMode.None).innerHit(new InnerHitBuilder().setName("barons")),
|
||||
ScoreMode.None).innerHit(new InnerHitBuilder()
|
||||
.addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC))
|
||||
.setName("earls")
|
||||
.setSize(4))
|
||||
)
|
||||
)
|
||||
.get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
|
||||
@ -1086,25 +927,4 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertHitCount(response, 1);
|
||||
}
|
||||
|
||||
public void testTopLevelInnerHitsWithQueryInnerHits() throws Exception {
|
||||
// top level inner hits shouldn't overwrite query inner hits definitions
|
||||
|
||||
assertAcked(prepareCreate("index1").addMapping("child", "_parent", "type=parent"));
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
requests.add(client().prepareIndex("index1", "parent", "1").setSource("{}"));
|
||||
requests.add(client().prepareIndex("index1", "child", "2").setParent("1").setSource("{}"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
InnerHitsBuilder innerHitsBuilder = new InnerHitsBuilder();
|
||||
innerHitsBuilder.addInnerHit("my-inner-hit", new InnerHitBuilder().setParentChildType("child"));
|
||||
SearchResponse response = client().prepareSearch("index1")
|
||||
.setQuery(hasChildQuery("child", new MatchAllQueryBuilder(), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.innerHits(innerHitsBuilder)
|
||||
.get();
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(2));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getId(), equalTo("2"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("my-inner-hit").getAt(0).getId(), equalTo("2"));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ public class SnapshotUtilsTests extends ESTestCase {
|
||||
public void testIndexNameFiltering() {
|
||||
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{}, new String[]{"foo", "bar", "baz"});
|
||||
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"*"}, new String[]{"foo", "bar", "baz"});
|
||||
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"_all"}, new String[]{"foo", "bar", "baz"});
|
||||
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"foo", "bar", "baz"}, new String[]{"foo", "bar", "baz"});
|
||||
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"foo"}, new String[]{"foo"});
|
||||
assertIndexNameFiltering(new String[]{"foo", "bar", "baz"}, new String[]{"baz", "not_available"}, new String[]{"baz"});
|
||||
|
@ -32,7 +32,8 @@ public abstract class ESThreadPoolTestCase extends ESTestCase {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException(name);
|
||||
assert "same".equals(name);
|
||||
return null;
|
||||
}
|
||||
|
||||
protected final ThreadPoolStats.Stats stats(final ThreadPool threadPool, final String name) {
|
||||
|
@ -193,7 +193,6 @@ public class ScalingThreadPoolTests extends ESThreadPoolTestCase {
|
||||
} catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
assertThat(stats(threadPool, threadPoolName).getCompleted(), equalTo(128L));
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -111,9 +111,9 @@ def wait_for_node_startup(es_dir, timeout=60, header={}):
|
||||
conn = None
|
||||
try:
|
||||
time.sleep(1)
|
||||
host, port = get_host_from_ports_file(es_dir)
|
||||
conn = HTTPConnection(host=host, port=port, timeout=timeout)
|
||||
conn.request('GET', '', headers=header)
|
||||
host = get_host_from_ports_file(es_dir)
|
||||
conn = HTTPConnection(host, timeout=1)
|
||||
conn.request('GET', '/', headers=header)
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
return True
|
||||
@ -160,7 +160,7 @@ def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS)
|
||||
# way we keep the executing host unmodified since we don't have to import the key into the default keystore
|
||||
gpg_home_dir = os.path.join(current_artifact_dir, "gpg_home_dir")
|
||||
os.makedirs(gpg_home_dir, 0o700)
|
||||
run('gpg --homedir %s --keyserver pgp.mit.edu --recv-key D88E42B4' % gpg_home_dir)
|
||||
run('gpg --homedir %s --keyserver pool.sks-keyservers.net --recv-key D88E42B4' % gpg_home_dir)
|
||||
run('cd %s && gpg --homedir %s --verify %s' % (current_artifact_dir, gpg_home_dir, os.path.basename(gpg_file)))
|
||||
print(' ' + '*' * 80)
|
||||
print()
|
||||
@ -170,9 +170,7 @@ def download_and_verify(version, hash, files, base_url, plugins=DEFAULT_PLUGINS)
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
def get_host_from_ports_file(es_dir):
|
||||
first_host_with_port = read_fully(os.path.join(es_dir, 'logs/http.ports')).splitlines()[0]
|
||||
host = urlparse('http://%s' % first_host_with_port)
|
||||
return host.hostname, host.port
|
||||
return read_fully(os.path.join(es_dir, 'logs/http.ports')).splitlines()[0]
|
||||
|
||||
def smoke_test_release(release, files, expected_hash, plugins):
|
||||
for release_file in files:
|
||||
@ -199,7 +197,7 @@ def smoke_test_release(release, files, expected_hash, plugins):
|
||||
headers = { 'Authorization' : 'Basic %s' % base64.b64encode(b"es_admin:foobar").decode("UTF-8") }
|
||||
es_shield_path = os.path.join(es_dir, 'bin/x-pack/users')
|
||||
print(" Install dummy shield user")
|
||||
run('%s; %s useradd es_admin -r admin -p foobar' % (java_exe(), es_shield_path))
|
||||
run('%s; %s useradd es_admin -r superuser -p foobar' % (java_exe(), es_shield_path))
|
||||
else:
|
||||
headers = {}
|
||||
print(' Starting elasticsearch deamon from [%s]' % es_dir)
|
||||
@ -214,9 +212,9 @@ def smoke_test_release(release, files, expected_hash, plugins):
|
||||
print('*' * 80)
|
||||
raise RuntimeError('server didn\'t start up')
|
||||
try: # we now get / and /_nodes to fetch basic infos like hashes etc and the installed plugins
|
||||
host,port = get_host_from_ports_file(es_dir)
|
||||
conn = HTTPConnection(host=host, port=port, timeout=20)
|
||||
conn.request('GET', '', headers=headers)
|
||||
host = get_host_from_ports_file(es_dir)
|
||||
conn = HTTPConnection(host, timeout=20)
|
||||
conn.request('GET', '/', headers=headers)
|
||||
res = conn.getresponse()
|
||||
if res.status == 200:
|
||||
version = json.loads(res.read().decode("utf-8"))['version']
|
||||
|
@ -361,12 +361,14 @@ are:
|
||||
`m`:: Minute
|
||||
`s`:: Second
|
||||
`ms`:: Milli-second
|
||||
`micros`:: Micro-second
|
||||
`nanos`:: Nano-second
|
||||
|
||||
[[size-units]]
|
||||
[[byte-units]]
|
||||
[float]
|
||||
=== Data size units
|
||||
=== Byte size units
|
||||
|
||||
Whenever the size of data needs to be specified, eg when setting a buffer size
|
||||
Whenever the byte size of data needs to be specified, eg when setting a buffer size
|
||||
parameter, the value must specify the unit, like `10kb` for 10 kilobytes. The
|
||||
supported units are:
|
||||
|
||||
@ -378,6 +380,23 @@ supported units are:
|
||||
`tb`:: Terabytes
|
||||
`pb`:: Petabytes
|
||||
|
||||
[[size-units]]
|
||||
[float]
|
||||
=== Unit-less quantities
|
||||
|
||||
Unit-less quantities means that they don't have a "unit" like "bytes" or "Hertz" or "meter" or "long tonne".
|
||||
|
||||
If one of these quantities is large we'll print it out like 10m for 10,000,000 or 7k for 7,000. We'll still print 87
|
||||
when we mean 87 though. These are the supported multipliers:
|
||||
|
||||
[horizontal]
|
||||
``:: Single
|
||||
`k`:: Kilo
|
||||
`m`:: Mega
|
||||
`g`:: Giga
|
||||
`t`:: Tera
|
||||
`p`:: Peta
|
||||
|
||||
[[distance-units]]
|
||||
[float]
|
||||
=== Distance Units
|
||||
|
@ -74,8 +74,8 @@ with `bulk.`.
|
||||
[[numeric-formats]]
|
||||
=== Numeric formats
|
||||
|
||||
Many commands provide a few types of numeric output, either a byte
|
||||
value or a time value. By default, these types are human-formatted,
|
||||
Many commands provide a few types of numeric output, either a byte, size
|
||||
or a time value. By default, these types are human-formatted,
|
||||
for example, `3.5mb` instead of `3763212`. The human values are not
|
||||
sortable numerically, so in order to operate on these values where
|
||||
order is important, you can change it.
|
||||
@ -95,6 +95,12 @@ green wiki1 3 0 10000 413 103776272 103776272
|
||||
green foo 1 0 227 0 2065131 2065131
|
||||
--------------------------------------------------
|
||||
|
||||
If you want to change the <<time-units,time units>>, use `time` parameter.
|
||||
|
||||
If you want to change the <<size-units,size units>>, use `size` parameter.
|
||||
|
||||
If you want to change the <<byte-units,byte units>>, use `bytes` parameter.
|
||||
|
||||
[float]
|
||||
=== Response as text, json, smile, yaml or cbor
|
||||
|
||||
|
@ -14,13 +14,14 @@ associated with it.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
$ curl -XPUT 'http://localhost:9200/twitter/'
|
||||
|
||||
$ curl -XPUT 'http://localhost:9200/twitter/' -d '
|
||||
index :
|
||||
number_of_shards : 3 <1>
|
||||
number_of_replicas : 2 <2>
|
||||
'
|
||||
$ curl -XPUT 'http://localhost:9200/twitter/' -d '{
|
||||
"settings" : {
|
||||
"index" : {
|
||||
"number_of_shards" : 3 <1>
|
||||
"number_of_replicas" : 2 <2>
|
||||
}
|
||||
}
|
||||
}'
|
||||
--------------------------------------------------
|
||||
<1> Default for `number_of_shards` is 5
|
||||
<2> Default for `number_of_replicas` is 1 (ie one replica for each primary shard)
|
||||
|
@ -739,6 +739,67 @@ Here is an example that adds the parsed date to the `timestamp` field based on t
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
[[date-index-name-processor]]
|
||||
=== Date Index Name Processor
|
||||
|
||||
The purpose of this processor is to point documents to the right time based index based
|
||||
on a date or timestamp field in a document by using the <<date-math-index-names, date math index name support>>.
|
||||
|
||||
The processor sets the `_index` meta field with a date math index name expression based on the provided index name
|
||||
prefix, a date or timestamp field in the documents being processed and the provided date rounding.
|
||||
|
||||
First this processor fetches the date or timestamp from a field in the document being processed. Optionally
|
||||
date formatting can be configured on how the field's value should be parsed into a date. Then this date,
|
||||
the provided index name prefix and the provided date rounding get formatted into a date math index name expression.
|
||||
Also here optionally date formatting can be specified on how the date should be formatted into a date math index name
|
||||
expression.
|
||||
|
||||
An example pipeline that points documents to a monthly index that starts with a `myindex-` prefix based on a
|
||||
date in the `date1` field:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _ingest/pipeline/1
|
||||
{
|
||||
"processors" : [
|
||||
{
|
||||
"date_index_name" : {
|
||||
"field" : "date1",
|
||||
"index_name_prefix" : "myindex-",
|
||||
"date_rounding" : "m"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Using that pipeline for an index request:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /myindex/type/1?pipeline=1
|
||||
{
|
||||
"date1" : "2016-04-25T12:02:01.789Z"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
The above request will not index this document into the `myindex` index, but into the `myindex-2016-04-01` index.
|
||||
This is because the date is being rounded by month.
|
||||
|
||||
[[date-index-name-options]]
|
||||
.Date index name options
|
||||
[options="header"]
|
||||
|======
|
||||
| Name | Required | Default | Description
|
||||
| `field` | yes | - | The field to get the date or timestamp from.
|
||||
| `index_name_prefix` | no | - | A prefix of the index name to be prepended before the printed date.
|
||||
| `date_rounding` | yes | - | How to round the date when formatting the date into the index name. Valid values are: `y` (year), `m` (month), `w` (week), `d` (day), `h` (hour), `m` (minute) and `s` (second).
|
||||
| `date_formats ` | no | yyyy-MM-dd'T'HH:mm:ss.SSSZ | An array of the expected date formats for parsing dates / timestamps in the document being preprocessed. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N.
|
||||
| `timezone` | no | UTC | The timezone to use when parsing the date and when date math index supports resolves expressions into concrete index names.
|
||||
| `locale` | no | ENGLISH | The locale to use when parsing the date from the document being preprocessed, relevant when parsing month names or week days.
|
||||
| `index_name_format` | no | yyyy-MM-dd | The format to be used when printing the parsed date into the index name. An valid Joda pattern is expected here.
|
||||
|======
|
||||
|
||||
[[fail-processor]]
|
||||
=== Fail Processor
|
||||
Raises an exception. This is useful for when
|
||||
@ -1179,6 +1240,7 @@ its value will be replaced with the provided one.
|
||||
| Name | Required | Default | Description
|
||||
| `field` | yes | - | The field to insert, upsert, or update
|
||||
| `value` | yes | - | The value to be set for the field
|
||||
| `override`| no | true | If processor will update fields with pre-existing non-null-valued field. When set to `false`, such fields will not be touched.
|
||||
|======
|
||||
|
||||
[source,js]
|
||||
|
@ -6,9 +6,10 @@ Each document indexed is associated with a <<mapping-type-field,`_type`>> (see
|
||||
indexed as its value can be derived automatically from the
|
||||
<<mapping-uid-field,`_uid`>> field.
|
||||
|
||||
The value of the `_id` field is accessible in queries and scripts, but _not_
|
||||
in aggregations or when sorting, where the <<mapping-uid-field,`_uid`>> field
|
||||
should be used instead:
|
||||
The value of the `_id` field is accessible in certain queries (`term`,
|
||||
`terms`, `match`, `query_string`, `simple_query_string`) and scripts, but
|
||||
_not_ in aggregations or when sorting, where the <<mapping-uid-field,`_uid`>>
|
||||
field should be used instead:
|
||||
|
||||
[source,js]
|
||||
--------------------------
|
||||
|
@ -30,7 +30,7 @@ PUT my_index
|
||||
--------------------------------
|
||||
// AUTOSENSE
|
||||
|
||||
<1> The `title` and `content` fields with be included in the `_all` field.
|
||||
<1> The `title` and `content` fields will be included in the `_all` field.
|
||||
<2> The `date` field will not be included in the `_all` field.
|
||||
|
||||
TIP: The `include_in_all` setting is allowed to have different settings for
|
||||
|
@ -29,7 +29,7 @@ string:: <<text,`text`>> and <<keyword,`keyword`>>
|
||||
[float]
|
||||
=== Specialised datatypes
|
||||
|
||||
<<ip>>:: `ip` for IPv4 addresses
|
||||
<<ip>>:: `ip` for IPv4 and IPv6 addresses
|
||||
<<search-suggesters-completion,Completion datatype>>::
|
||||
`completion` to provide auto-complete suggestions
|
||||
<<token-count>>:: `token_count` to count the number of tokens in a string
|
||||
|
@ -16,6 +16,8 @@ to 5.x the `.scripts` index will remain to exist, so it can be used by a script
|
||||
the stored scripts from the `.scripts` index into the cluster state. The format of the scripts
|
||||
hasn't changed.
|
||||
|
||||
===== Python migration script
|
||||
|
||||
The following Python script can be used to import your indexed scripts into the cluster state
|
||||
as stored scripts:
|
||||
|
||||
@ -32,9 +34,37 @@ for doc in helpers.scan(es, index=".scripts", preserve_order=True):
|
||||
-----------------------------------
|
||||
|
||||
This script makes use of the official Elasticsearch Python client and
|
||||
therefor you need to make sure that your have installed the client in your
|
||||
environment. For more information on this please visit the
|
||||
https://www.elastic.co/guide/en/elasticsearch/client/python-api/current/index.html[elasticsearch-py page].
|
||||
therefore you need to make sure that your have installed the client in your
|
||||
environment. For more information on this please see
|
||||
https://www.elastic.co/guide/en/elasticsearch/client/python-api/current/index.html[`elasticsearch-py`].
|
||||
|
||||
===== Perl migration script
|
||||
|
||||
The following Perl script can be used to import your indexed scripts into the cluster state
|
||||
as stored scripts:
|
||||
|
||||
[source,perl]
|
||||
-----------------------------------
|
||||
use Search::Elasticsearch;
|
||||
|
||||
my $es = Search::Elasticsearch->new( nodes => 'localhost:9200');
|
||||
my $scroll = $es->scroll_helper( index => '.scripts', sort => '_doc');
|
||||
|
||||
while (my $doc = $scroll->next) {
|
||||
$e->put_script(
|
||||
lang => $doc->{_type},
|
||||
id => $doc->{_id},
|
||||
body => $doc->{_source}
|
||||
);
|
||||
}
|
||||
-----------------------------------
|
||||
|
||||
This script makes use of the official Elasticsearch Perl client and
|
||||
therefore you need to make sure that your have installed the client in your
|
||||
environment. For more information on this please see
|
||||
https://metacpan.org/pod/Search::Elasticsearch[`Search::Elasticsearch`].
|
||||
|
||||
===== Verifying script migration
|
||||
|
||||
After you have moved the scripts via the provided script or otherwise then you can verify with the following
|
||||
request if the migration has happened successfully:
|
||||
|
@ -151,5 +151,6 @@ specifying the sort order with the `order` option.
|
||||
|
||||
==== Inner hits
|
||||
|
||||
* The format of top level inner hits has been changed to be more readable. All options are now set on the same level.
|
||||
So the `path` and `type` options are specified on the same level where `query` and other options are specified.
|
||||
* Top level inner hits syntax has been removed. Inner hits can now only be specified as part of the `nested`,
|
||||
`has_child` and `has_parent` queries. Use cases previously only possible with top level inner hits can now be done
|
||||
with inner hits defined inside the query dsl.
|
||||
|
@ -226,78 +226,4 @@ An example of a response snippet that could be generated from the above search r
|
||||
}
|
||||
},
|
||||
...
|
||||
--------------------------------------------------
|
||||
|
||||
[[top-level-inner-hits]]
|
||||
==== top level inner hits
|
||||
|
||||
Besides defining inner hits on query and filters, inner hits can also be defined as a top level construct alongside the
|
||||
`query` and `aggregations` definition. The main reason for using the top level inner hits definition is to let the
|
||||
inner hits return documents that don't match with the main query. Also inner hits definitions can be nested via the
|
||||
top level notation. Other than that, the inner hit definition inside the query should be used because that is the most
|
||||
compact way for defining inner hits.
|
||||
|
||||
The following snippet explains the basic structure of inner hits defined at the top level of the search request body:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"inner_hits" : {
|
||||
"<inner_hits_name>" : {
|
||||
"<path|type>" : {
|
||||
"<path-to-nested-object-field|child-or-parent-type>" : {
|
||||
<inner_hits_body>
|
||||
[,"inner_hits" : { [<sub_inner_hits>]+ } ]?
|
||||
}
|
||||
}
|
||||
}
|
||||
[,"<inner_hits_name_2>" : { ... } ]*
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Inside the `inner_hits` definition, first the name of the inner hit is defined then whether the inner_hit
|
||||
is a nested by defining `path` or a parent/child based definition by defining `type`. The next object layer contains
|
||||
the name of the nested object field if the inner_hits is nested or the parent or child type if the inner_hit definition
|
||||
is parent/child based.
|
||||
|
||||
Multiple inner hit definitions can be defined in a single request. In the `<inner_hits_body>` any option for features
|
||||
that `inner_hits` support can be defined. Optionally another `inner_hits` definition can be defined in the `<inner_hits_body>`.
|
||||
|
||||
An example that shows the use of nested inner hits via the top level notation:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"query" : {
|
||||
"nested" : {
|
||||
"path" : "comments",
|
||||
"query" : {
|
||||
"match" : {"comments.message" : "[actual query]"}
|
||||
}
|
||||
}
|
||||
},
|
||||
"inner_hits" : {
|
||||
"comment" : { <1>
|
||||
"path" : "comments", <2>
|
||||
"query" : {
|
||||
"match" : {"comments.message" : "[different query]"} <3>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
<1> The inner hit definition with the name `comment`.
|
||||
<2> The path option refers to the nested object field `comments`
|
||||
<3> A query that runs to collect the nested inner documents for each search hit returned. If no query is defined all nested
|
||||
inner documents will be included belonging to a search hit. This shows that it only make sense to the top level
|
||||
inner hit definition if no query or a different query is specified.
|
||||
|
||||
Additional options that are only available when using the top level inner hits notation:
|
||||
|
||||
[horizontal]
|
||||
`path`:: Defines the nested scope where hits will be collected from.
|
||||
`type`:: Defines the parent or child type score where hits will be collected from.
|
||||
`query`:: Defines the query that will run in the defined nested, parent or child scope to collect and score hits. By default all document in the scope will be matched.
|
||||
|
||||
Either `path` or `type` must be defined. The `path` or `type` defines the scope from where hits are fetched and
|
||||
used as inner hits.
|
||||
--------------------------------------------------
|
@ -12,15 +12,16 @@
|
||||
- match: { nodes.$master.ingest.processors.0.type: append }
|
||||
- match: { nodes.$master.ingest.processors.1.type: convert }
|
||||
- match: { nodes.$master.ingest.processors.2.type: date }
|
||||
- match: { nodes.$master.ingest.processors.3.type: fail }
|
||||
- match: { nodes.$master.ingest.processors.4.type: foreach }
|
||||
- match: { nodes.$master.ingest.processors.5.type: grok }
|
||||
- match: { nodes.$master.ingest.processors.6.type: gsub }
|
||||
- match: { nodes.$master.ingest.processors.7.type: join }
|
||||
- match: { nodes.$master.ingest.processors.8.type: lowercase }
|
||||
- match: { nodes.$master.ingest.processors.9.type: remove }
|
||||
- match: { nodes.$master.ingest.processors.10.type: rename }
|
||||
- match: { nodes.$master.ingest.processors.11.type: set }
|
||||
- match: { nodes.$master.ingest.processors.12.type: split }
|
||||
- match: { nodes.$master.ingest.processors.13.type: trim }
|
||||
- match: { nodes.$master.ingest.processors.14.type: uppercase }
|
||||
- match: { nodes.$master.ingest.processors.3.type: date_index_name }
|
||||
- match: { nodes.$master.ingest.processors.4.type: fail }
|
||||
- match: { nodes.$master.ingest.processors.5.type: foreach }
|
||||
- match: { nodes.$master.ingest.processors.6.type: grok }
|
||||
- match: { nodes.$master.ingest.processors.7.type: gsub }
|
||||
- match: { nodes.$master.ingest.processors.8.type: join }
|
||||
- match: { nodes.$master.ingest.processors.9.type: lowercase }
|
||||
- match: { nodes.$master.ingest.processors.10.type: remove }
|
||||
- match: { nodes.$master.ingest.processors.11.type: rename }
|
||||
- match: { nodes.$master.ingest.processors.12.type: set }
|
||||
- match: { nodes.$master.ingest.processors.13.type: split }
|
||||
- match: { nodes.$master.ingest.processors.14.type: trim }
|
||||
- match: { nodes.$master.ingest.processors.15.type: uppercase }
|
||||
|
@ -1,3 +1,5 @@
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
@ -49,6 +51,35 @@ dependencies {
|
||||
compile 'org.codehaus.jackson:jackson-xc:1.9.2'
|
||||
}
|
||||
|
||||
// needed to be consistent with ssl host checking
|
||||
String host = InetAddress.getLoopbackAddress().getHostAddress();
|
||||
|
||||
// location of keystore and files to generate it
|
||||
File keystore = new File(project.buildDir, 'keystore/test-node.jks')
|
||||
|
||||
// generate the keystore
|
||||
task createKey(type: LoggedExec) {
|
||||
doFirst {
|
||||
project.delete(keystore.parentFile)
|
||||
keystore.parentFile.mkdirs()
|
||||
}
|
||||
executable = 'keytool'
|
||||
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
|
||||
args '-genkey',
|
||||
'-alias', 'test-node',
|
||||
'-keystore', keystore,
|
||||
'-keyalg', 'RSA',
|
||||
'-keysize', '2048',
|
||||
'-validity', '712',
|
||||
'-dname', 'CN=' + host,
|
||||
'-keypass', 'keypass',
|
||||
'-storepass', 'keypass'
|
||||
}
|
||||
|
||||
// add keystore to test classpath: it expects it there
|
||||
sourceSets.test.resources.srcDir(keystore.parentFile)
|
||||
processTestResources.dependsOn(createKey)
|
||||
|
||||
dependencyLicenses {
|
||||
mapping from: /azure-.*/, to: 'azure'
|
||||
mapping from: /jackson-.*/, to: 'jackson'
|
||||
|
@ -25,6 +25,11 @@ import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider;
|
||||
import org.elasticsearch.discovery.azure.AzureUnicastHostsProvider.Deployment;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.util.function.Function;
|
||||
|
||||
public interface AzureComputeService {
|
||||
|
||||
@ -43,19 +48,30 @@ public interface AzureComputeService {
|
||||
public static final Setting<KeyStoreType> KEYSTORE_TYPE_SETTING =
|
||||
new Setting<>("cloud.azure.management.keystore.type", KeyStoreType.pkcs12.name(), KeyStoreType::fromString,
|
||||
Property.NodeScope, Property.Filtered);
|
||||
|
||||
// so that it can overridden for tests
|
||||
public static final Setting<URI> ENDPOINT_SETTING = new Setting<URI>("cloud.azure.management.endpoint",
|
||||
"https://management.core.windows.net/", s -> {
|
||||
try {
|
||||
return new URI(s);
|
||||
} catch (URISyntaxException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}, Property.NodeScope);
|
||||
}
|
||||
|
||||
final class Discovery {
|
||||
public static final Setting<TimeValue> REFRESH_SETTING =
|
||||
Setting.positiveTimeSetting("discovery.azure.refresh_interval", TimeValue.timeValueSeconds(0), Property.NodeScope);
|
||||
|
||||
public static final Setting<AzureUnicastHostsProvider.HostType> HOST_TYPE_SETTING =
|
||||
new Setting<>("discovery.azure.host.type", AzureUnicastHostsProvider.HostType.PRIVATE_IP.name(),
|
||||
AzureUnicastHostsProvider.HostType::fromString, Property.NodeScope);
|
||||
|
||||
public static final String ENDPOINT_NAME = "discovery.azure.endpoint.name";
|
||||
public static final String DEPLOYMENT_NAME = "discovery.azure.deployment.name";
|
||||
public static final String DEPLOYMENT_SLOT = "discovery.azure.deployment.slot";
|
||||
public static final Setting<String> ENDPOINT_NAME_SETTING = new Setting<>("discovery.azure.endpoint.name", "elasticsearch",
|
||||
Function.identity(), Property.NodeScope);
|
||||
public static final Setting<String> DEPLOYMENT_NAME_SETTING = Setting.simpleString("discovery.azure.deployment.name",
|
||||
Property.NodeScope);
|
||||
public static final Setting<Deployment> DEPLOYMENT_SLOT_SETTING = new Setting<>("discovery.azure.deployment.slot",
|
||||
Deployment.PRODUCTION.name(), Deployment::fromString, Property.NodeScope);
|
||||
}
|
||||
|
||||
HostedServiceGetDetailedResponse getServiceDetails();
|
||||
|
@ -33,8 +33,6 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -42,10 +40,6 @@ import java.net.URISyntaxException;
|
||||
public class AzureComputeServiceImpl extends AbstractLifecycleComponent<AzureComputeServiceImpl>
|
||||
implements AzureComputeService {
|
||||
|
||||
static final class Azure {
|
||||
private static final String ENDPOINT = "https://management.core.windows.net/";
|
||||
}
|
||||
|
||||
private final ComputeManagementClient computeManagementClient;
|
||||
private final String serviceName;
|
||||
|
||||
@ -59,18 +53,18 @@ public class AzureComputeServiceImpl extends AbstractLifecycleComponent<AzureCom
|
||||
String keystorePassword = Management.KEYSTORE_PASSWORD_SETTING.get(settings);
|
||||
KeyStoreType keystoreType = Management.KEYSTORE_TYPE_SETTING.get(settings);
|
||||
|
||||
// Check that we have all needed properties
|
||||
Configuration configuration;
|
||||
try {
|
||||
configuration = ManagementConfiguration.configure(new URI(Azure.ENDPOINT),
|
||||
subscriptionId, keystorePath, keystorePassword, keystoreType);
|
||||
} catch (IOException|URISyntaxException e) {
|
||||
logger.error("can not start azure client: {}", e.getMessage());
|
||||
computeManagementClient = null;
|
||||
return;
|
||||
}
|
||||
logger.trace("creating new Azure client for [{}], [{}]", subscriptionId, serviceName);
|
||||
computeManagementClient = ComputeManagementService.create(configuration);
|
||||
ComputeManagementClient result;
|
||||
try {
|
||||
// Check that we have all needed properties
|
||||
Configuration configuration = ManagementConfiguration.configure(Management.ENDPOINT_SETTING.get(settings),
|
||||
subscriptionId, keystorePath, keystorePassword, keystoreType);
|
||||
result = ComputeManagementService.create(configuration);
|
||||
} catch (IOException e) {
|
||||
logger.error("can not start azure client: {}", e.getMessage());
|
||||
result = null;
|
||||
}
|
||||
this.computeManagementClient = result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -30,8 +30,10 @@ import org.elasticsearch.cloud.azure.AzureServiceRemoteException;
|
||||
import org.elasticsearch.cloud.azure.management.AzureComputeService;
|
||||
import org.elasticsearch.cloud.azure.management.AzureComputeService.Discovery;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.network.InetAddresses;
|
||||
import org.elasticsearch.common.network.NetworkAddress;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -92,7 +94,7 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic
|
||||
return deployment;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
throw new IllegalArgumentException("invalid value for deployment type [" + string + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@ -123,21 +125,14 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic
|
||||
this.refreshInterval = Discovery.REFRESH_SETTING.get(settings);
|
||||
|
||||
this.hostType = Discovery.HOST_TYPE_SETTING.get(settings);
|
||||
this.publicEndpointName = settings.get(Discovery.ENDPOINT_NAME, "elasticsearch");
|
||||
this.publicEndpointName = Discovery.ENDPOINT_NAME_SETTING.get(settings);
|
||||
|
||||
// Deployment name could be set with discovery.azure.deployment.name
|
||||
// Default to cloud.azure.management.cloud.service.name
|
||||
this.deploymentName = settings.get(Discovery.DEPLOYMENT_NAME);
|
||||
this.deploymentName = Discovery.DEPLOYMENT_NAME_SETTING.get(settings);
|
||||
|
||||
// Reading deployment_slot
|
||||
String strDeployment = settings.get(Discovery.DEPLOYMENT_SLOT, Deployment.PRODUCTION.deployment);
|
||||
Deployment tmpDeployment = Deployment.fromString(strDeployment);
|
||||
if (tmpDeployment == null) {
|
||||
logger.warn("wrong value for [{}]: [{}]. falling back to [{}]...", Discovery.DEPLOYMENT_SLOT, strDeployment,
|
||||
Deployment.PRODUCTION.deployment);
|
||||
tmpDeployment = Deployment.PRODUCTION;
|
||||
}
|
||||
this.deploymentSlot = tmpDeployment.slot;
|
||||
this.deploymentSlot = Discovery.DEPLOYMENT_SLOT_SETTING.get(settings).slot;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -191,7 +186,7 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic
|
||||
}
|
||||
|
||||
// If provided, we check the deployment name
|
||||
if (deploymentName != null && !deploymentName.equals(deployment.getName())) {
|
||||
if (Strings.hasLength(deploymentName) && !deploymentName.equals(deployment.getName())) {
|
||||
logger.debug("current deployment name [{}] different from [{}]. skipping...",
|
||||
deployment.getName(), deploymentName);
|
||||
continue;
|
||||
@ -219,7 +214,7 @@ public class AzureUnicastHostsProvider extends AbstractComponent implements Unic
|
||||
if (privateIp.equals(ipAddress)) {
|
||||
logger.trace("adding ourselves {}", NetworkAddress.format(ipAddress));
|
||||
}
|
||||
networkAddress = NetworkAddress.format(privateIp);
|
||||
networkAddress = InetAddresses.toUriString(privateIp);
|
||||
} else {
|
||||
logger.trace("no private ip provided. ignoring [{}]...", instance.getInstanceName());
|
||||
}
|
||||
|
@ -75,5 +75,8 @@ public class AzureDiscoveryPlugin extends Plugin {
|
||||
settingsModule.registerSetting(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING);
|
||||
settingsModule.registerSetting(AzureComputeService.Management.SERVICE_NAME_SETTING);
|
||||
settingsModule.registerSetting(AzureComputeService.Discovery.HOST_TYPE_SETTING);
|
||||
settingsModule.registerSetting(AzureComputeService.Discovery.DEPLOYMENT_NAME_SETTING);
|
||||
settingsModule.registerSetting(AzureComputeService.Discovery.DEPLOYMENT_SLOT_SETTING);
|
||||
settingsModule.registerSetting(AzureComputeService.Discovery.ENDPOINT_NAME_SETTING);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,285 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.azure;
|
||||
|
||||
import com.microsoft.windowsazure.management.compute.models.DeploymentSlot;
|
||||
import com.microsoft.windowsazure.management.compute.models.DeploymentStatus;
|
||||
import com.sun.net.httpserver.Headers;
|
||||
import com.sun.net.httpserver.HttpsConfigurator;
|
||||
import com.sun.net.httpserver.HttpsServer;
|
||||
import org.elasticsearch.cloud.azure.management.AzureComputeService;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.discovery.DiscoveryModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.transport.TransportSettings;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
import javax.xml.XMLConstants;
|
||||
import javax.xml.stream.XMLOutputFactory;
|
||||
import javax.xml.stream.XMLStreamException;
|
||||
import javax.xml.stream.XMLStreamWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.StringWriter;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.security.KeyStore;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
|
||||
|
||||
@ESIntegTestCase.SuppressLocalMode
|
||||
@ESIntegTestCase.ClusterScope(numDataNodes = 2, numClientNodes = 0)
|
||||
@SuppressForbidden(reason = "use http server")
|
||||
// TODO this should be a IT but currently all ITs in this project run against a real cluster
|
||||
public class AzureDiscoveryClusterFormationTests extends ESIntegTestCase {
|
||||
|
||||
public static class TestPlugin extends Plugin {
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return AzureDiscoveryClusterFormationTests.class.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String description() {
|
||||
return AzureDiscoveryClusterFormationTests.class.getName();
|
||||
}
|
||||
|
||||
public void onModule(SettingsModule settingsModule) {
|
||||
settingsModule.registerSetting(AzureComputeService.Management.ENDPOINT_SETTING);
|
||||
}
|
||||
}
|
||||
|
||||
private static HttpsServer httpsServer;
|
||||
private static Path logDir;
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(AzureDiscoveryPlugin.class, TestPlugin.class);
|
||||
}
|
||||
|
||||
private static Path keyStoreFile;
|
||||
|
||||
@BeforeClass
|
||||
public static void setupKeyStore() throws IOException {
|
||||
Path tempDir = createTempDir();
|
||||
keyStoreFile = tempDir.resolve("test-node.jks");
|
||||
try (InputStream stream = AzureDiscoveryClusterFormationTests.class.getResourceAsStream("/test-node.jks")) {
|
||||
assertNotNull("can't find keystore file", stream);
|
||||
Files.copy(stream, keyStoreFile);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings nodeSettings(int nodeOrdinal) {
|
||||
Path resolve = logDir.resolve(Integer.toString(nodeOrdinal));
|
||||
try {
|
||||
Files.createDirectory(resolve);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
|
||||
.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), AzureDiscoveryPlugin.AZURE)
|
||||
.put(Environment.PATH_LOGS_SETTING.getKey(), resolve)
|
||||
.put(TransportSettings.PORT.getKey(), 0)
|
||||
.put(Node.WRITE_PORTS_FIELD_SETTING.getKey(), "true")
|
||||
.put(AzureComputeService.Management.ENDPOINT_SETTING.getKey(), "https://" + InetAddress.getLoopbackAddress().getHostAddress() +
|
||||
":" + httpsServer.getAddress().getPort())
|
||||
.put(Environment.PATH_CONF_SETTING.getKey(), keyStoreFile.getParent().toAbsolutePath())
|
||||
.put(AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey(), keyStoreFile.toAbsolutePath())
|
||||
.put(AzureComputeService.Discovery.HOST_TYPE_SETTING.getKey(), AzureUnicastHostsProvider.HostType.PUBLIC_IP.name())
|
||||
.put(AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey(), "keypass")
|
||||
.put(AzureComputeService.Management.KEYSTORE_TYPE_SETTING.getKey(), "jks")
|
||||
.put(AzureComputeService.Management.SERVICE_NAME_SETTING.getKey(), "myservice")
|
||||
.put(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey(), "subscription")
|
||||
.put(AzureComputeService.Discovery.DEPLOYMENT_NAME_SETTING.getKey(), "mydeployment")
|
||||
.put(AzureComputeService.Discovery.ENDPOINT_NAME_SETTING.getKey(), "myendpoint")
|
||||
.put(AzureComputeService.Discovery.DEPLOYMENT_SLOT_SETTING.getKey(), AzureUnicastHostsProvider.Deployment.PRODUCTION.name())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates mock EC2 endpoint providing the list of started nodes to the DescribeInstances API call
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void startHttpd() throws Exception {
|
||||
logDir = createTempDir();
|
||||
SSLContext sslContext = getSSLContext();
|
||||
httpsServer = HttpsServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0);
|
||||
httpsServer.setHttpsConfigurator(new HttpsConfigurator(sslContext));
|
||||
httpsServer.createContext("/subscription/services/hostedservices/myservice", (s) -> {
|
||||
Headers headers = s.getResponseHeaders();
|
||||
headers.add("Content-Type", "text/xml; charset=UTF-8");
|
||||
XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory();
|
||||
xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true);
|
||||
StringWriter out = new StringWriter();
|
||||
XMLStreamWriter sw;
|
||||
try {
|
||||
sw = xmlOutputFactory.createXMLStreamWriter(out);
|
||||
sw.writeStartDocument();
|
||||
|
||||
String namespace = "http://schemas.microsoft.com/windowsazure";
|
||||
sw.setDefaultNamespace(namespace);
|
||||
sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "HostedService", namespace);
|
||||
{
|
||||
sw.writeStartElement("Deployments");
|
||||
{
|
||||
Path[] files = FileSystemUtils.files(logDir);
|
||||
for (int i = 0; i < files.length; i++) {
|
||||
Path resolve = files[i].resolve("transport.ports");
|
||||
if (Files.exists(resolve)) {
|
||||
List<String> addresses = Files.readAllLines(resolve);
|
||||
Collections.shuffle(addresses, random());
|
||||
String address = addresses.get(0);
|
||||
int indexOfLastColon = address.lastIndexOf(':');
|
||||
String host = address.substring(0, indexOfLastColon);
|
||||
String port = address.substring(indexOfLastColon + 1);
|
||||
|
||||
sw.writeStartElement("Deployment");
|
||||
{
|
||||
sw.writeStartElement("Name");
|
||||
sw.writeCharacters("mydeployment");
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("DeploymentSlot");
|
||||
sw.writeCharacters(DeploymentSlot.Production.name());
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("Status");
|
||||
sw.writeCharacters(DeploymentStatus.Running.name());
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("RoleInstanceList");
|
||||
{
|
||||
sw.writeStartElement("RoleInstance");
|
||||
{
|
||||
sw.writeStartElement("RoleName");
|
||||
sw.writeCharacters(UUID.randomUUID().toString());
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("IpAddress");
|
||||
sw.writeCharacters(host);
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("InstanceEndpoints");
|
||||
{
|
||||
sw.writeStartElement("InstanceEndpoint");
|
||||
{
|
||||
sw.writeStartElement("Name");
|
||||
sw.writeCharacters("myendpoint");
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("Vip");
|
||||
sw.writeCharacters(host);
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeStartElement("PublicPort");
|
||||
sw.writeCharacters(port);
|
||||
sw.writeEndElement();
|
||||
}
|
||||
sw.writeEndElement();
|
||||
}
|
||||
sw.writeEndElement();
|
||||
}
|
||||
sw.writeEndElement();
|
||||
}
|
||||
sw.writeEndElement();
|
||||
}
|
||||
sw.writeEndElement();
|
||||
}
|
||||
}
|
||||
}
|
||||
sw.writeEndElement();
|
||||
}
|
||||
sw.writeEndElement();
|
||||
|
||||
sw.writeEndDocument();
|
||||
sw.flush();
|
||||
|
||||
final byte[] responseAsBytes = out.toString().getBytes(StandardCharsets.UTF_8);
|
||||
s.sendResponseHeaders(200, responseAsBytes.length);
|
||||
OutputStream responseBody = s.getResponseBody();
|
||||
responseBody.write(responseAsBytes);
|
||||
responseBody.close();
|
||||
} catch (XMLStreamException e) {
|
||||
Loggers.getLogger(AzureDiscoveryClusterFormationTests.class).error("Failed serializing XML", e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
});
|
||||
|
||||
httpsServer.start();
|
||||
}
|
||||
|
||||
private static SSLContext getSSLContext() throws Exception {
|
||||
char[] passphrase = "keypass".toCharArray();
|
||||
KeyStore ks = KeyStore.getInstance("JKS");
|
||||
try (InputStream stream = AzureDiscoveryClusterFormationTests.class.getResourceAsStream("/test-node.jks")) {
|
||||
assertNotNull("can't find keystore file", stream);
|
||||
ks.load(stream, passphrase);
|
||||
}
|
||||
KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
|
||||
kmf.init(ks, passphrase);
|
||||
TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509");
|
||||
tmf.init(ks);
|
||||
SSLContext ssl = SSLContext.getInstance("TLS");
|
||||
ssl.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
|
||||
return ssl;
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void stopHttpd() throws IOException {
|
||||
for (int i = 0; i < internalCluster().size(); i++) {
|
||||
// shut them all down otherwise we get spammed with connection refused exceptions
|
||||
internalCluster().stopRandomDataNode();
|
||||
}
|
||||
httpsServer.stop(0);
|
||||
httpsServer = null;
|
||||
logDir = null;
|
||||
}
|
||||
|
||||
public void testJoin() throws ExecutionException, InterruptedException {
|
||||
// only wait for the cluster to form
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(2)).get());
|
||||
// add one more node and wait for it to join
|
||||
internalCluster().startDataOnlyNodeAsync().get();
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(3)).get());
|
||||
}
|
||||
}
|
@ -248,7 +248,7 @@ public class Ec2DiscoveryClusterFormationTests extends ESIntegTestCase {
|
||||
logDir = null;
|
||||
}
|
||||
|
||||
public void testJoin() throws ExecutionException, InterruptedException, XMLStreamException {
|
||||
public void testJoin() throws ExecutionException, InterruptedException {
|
||||
// only wait for the cluster to form
|
||||
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(2)).get());
|
||||
// add one more node and wait for it to join
|
||||
|
@ -12,15 +12,16 @@
|
||||
- match: { nodes.$master.ingest.processors.1.type: attachment }
|
||||
- match: { nodes.$master.ingest.processors.2.type: convert }
|
||||
- match: { nodes.$master.ingest.processors.3.type: date }
|
||||
- match: { nodes.$master.ingest.processors.4.type: fail }
|
||||
- match: { nodes.$master.ingest.processors.5.type: foreach }
|
||||
- match: { nodes.$master.ingest.processors.6.type: gsub }
|
||||
- match: { nodes.$master.ingest.processors.7.type: join }
|
||||
- match: { nodes.$master.ingest.processors.8.type: lowercase }
|
||||
- match: { nodes.$master.ingest.processors.9.type: remove }
|
||||
- match: { nodes.$master.ingest.processors.10.type: rename }
|
||||
- match: { nodes.$master.ingest.processors.11.type: set }
|
||||
- match: { nodes.$master.ingest.processors.12.type: split }
|
||||
- match: { nodes.$master.ingest.processors.13.type: trim }
|
||||
- match: { nodes.$master.ingest.processors.14.type: uppercase }
|
||||
- match: { nodes.$master.ingest.processors.4.type: date_index_name }
|
||||
- match: { nodes.$master.ingest.processors.5.type: fail }
|
||||
- match: { nodes.$master.ingest.processors.6.type: foreach }
|
||||
- match: { nodes.$master.ingest.processors.7.type: gsub }
|
||||
- match: { nodes.$master.ingest.processors.8.type: join }
|
||||
- match: { nodes.$master.ingest.processors.9.type: lowercase }
|
||||
- match: { nodes.$master.ingest.processors.10.type: remove }
|
||||
- match: { nodes.$master.ingest.processors.11.type: rename }
|
||||
- match: { nodes.$master.ingest.processors.12.type: set }
|
||||
- match: { nodes.$master.ingest.processors.13.type: split }
|
||||
- match: { nodes.$master.ingest.processors.14.type: trim }
|
||||
- match: { nodes.$master.ingest.processors.15.type: uppercase }
|
||||
|
||||
|
@ -58,6 +58,8 @@ import static org.elasticsearch.ingest.core.ConfigurationUtils.readStringPropert
|
||||
public final class GeoIpProcessor extends AbstractProcessor {
|
||||
|
||||
public static final String TYPE = "geoip";
|
||||
private static final String CITY_DB_TYPE = "GeoLite2-City";
|
||||
private static final String COUNTRY_DB_TYPE = "GeoLite2-Country";
|
||||
|
||||
private final String field;
|
||||
private final String targetField;
|
||||
@ -79,14 +81,14 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||
|
||||
Map<String, Object> geoData;
|
||||
switch (dbReader.getMetadata().getDatabaseType()) {
|
||||
case "GeoLite2-City":
|
||||
case CITY_DB_TYPE:
|
||||
try {
|
||||
geoData = retrieveCityGeoData(ipAddress);
|
||||
} catch (AddressNotFoundRuntimeException e) {
|
||||
geoData = Collections.emptyMap();
|
||||
}
|
||||
break;
|
||||
case "GeoLite2-Country":
|
||||
case COUNTRY_DB_TYPE:
|
||||
try {
|
||||
geoData = retrieveCountryGeoData(ipAddress);
|
||||
} catch (AddressNotFoundRuntimeException e) {
|
||||
@ -215,10 +217,11 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||
}
|
||||
|
||||
public static final class Factory extends AbstractProcessorFactory<GeoIpProcessor> implements Closeable {
|
||||
|
||||
static final Set<Property> DEFAULT_PROPERTIES = EnumSet.of(
|
||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
|
||||
static final Set<Property> DEFAULT_CITY_PROPERTIES = EnumSet.of(
|
||||
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME,
|
||||
Property.CITY_NAME, Property.LOCATION
|
||||
);
|
||||
static final Set<Property> DEFAULT_COUNTRY_PROPERTIES = EnumSet.of(Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE);
|
||||
|
||||
private final Map<String, DatabaseReader> databaseReaders;
|
||||
|
||||
@ -233,24 +236,33 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||
String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb");
|
||||
List<String> propertyNames = readOptionalList(TYPE, processorTag, config, "properties");
|
||||
|
||||
DatabaseReader databaseReader = databaseReaders.get(databaseFile);
|
||||
if (databaseReader == null) {
|
||||
throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist");
|
||||
}
|
||||
|
||||
String databaseType = databaseReader.getMetadata().getDatabaseType();
|
||||
|
||||
final Set<Property> properties;
|
||||
if (propertyNames != null) {
|
||||
properties = EnumSet.noneOf(Property.class);
|
||||
for (String fieldName : propertyNames) {
|
||||
try {
|
||||
properties.add(Property.parse(fieldName));
|
||||
} catch (Exception e) {
|
||||
throw newConfigurationException(TYPE, processorTag, "properties", "illegal field option [" + fieldName + "]. valid values are [" + Arrays.toString(Property.values()) + "]");
|
||||
properties.add(Property.parseProperty(databaseType, fieldName));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
properties = DEFAULT_PROPERTIES;
|
||||
if (CITY_DB_TYPE.equals(databaseType)) {
|
||||
properties = DEFAULT_CITY_PROPERTIES;
|
||||
} else if (COUNTRY_DB_TYPE.equals(databaseType)) {
|
||||
properties = DEFAULT_COUNTRY_PROPERTIES;
|
||||
} else {
|
||||
throw newConfigurationException(TYPE, processorTag, "database_file", "Unsupported database type [" + databaseType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
DatabaseReader databaseReader = databaseReaders.get(databaseFile);
|
||||
if (databaseReader == null) {
|
||||
throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist");
|
||||
}
|
||||
return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties);
|
||||
}
|
||||
|
||||
@ -279,13 +291,29 @@ public final class GeoIpProcessor extends AbstractProcessor {
|
||||
REGION_NAME,
|
||||
CITY_NAME,
|
||||
TIMEZONE,
|
||||
LATITUDE,
|
||||
LONGITUDE,
|
||||
LOCATION;
|
||||
|
||||
public static Property parse(String value) {
|
||||
return valueOf(value.toUpperCase(Locale.ROOT));
|
||||
static final EnumSet<Property> ALL_CITY_PROPERTIES = EnumSet.allOf(Property.class);
|
||||
static final EnumSet<Property> ALL_COUNTRY_PROPERTIES = EnumSet.of(Property.IP, Property.CONTINENT_NAME,
|
||||
Property.COUNTRY_NAME, Property.COUNTRY_ISO_CODE);
|
||||
|
||||
public static Property parseProperty(String databaseType, String value) {
|
||||
Set<Property> validProperties = EnumSet.noneOf(Property.class);
|
||||
if (CITY_DB_TYPE.equals(databaseType)) {
|
||||
validProperties = ALL_CITY_PROPERTIES;
|
||||
} else if (COUNTRY_DB_TYPE.equals(databaseType)) {
|
||||
validProperties = ALL_COUNTRY_PROPERTIES;
|
||||
}
|
||||
|
||||
try {
|
||||
Property property = valueOf(value.toUpperCase(Locale.ROOT));
|
||||
if (validProperties.contains(property) == false) {
|
||||
throw new IllegalArgumentException("invalid");
|
||||
}
|
||||
return property;
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("illegal property value [" + value + "]. valid values are " + Arrays.toString(validProperties.toArray()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -19,8 +19,10 @@
|
||||
|
||||
package org.elasticsearch.ingest.geoip;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.maxmind.geoip2.DatabaseReader;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.ingest.core.AbstractProcessorFactory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.StreamsUtils;
|
||||
@ -79,7 +81,25 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
|
||||
assertThat(processor.getField(), equalTo("_field"));
|
||||
assertThat(processor.getTargetField(), equalTo("geoip"));
|
||||
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City"));
|
||||
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_PROPERTIES));
|
||||
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_CITY_PROPERTIES));
|
||||
}
|
||||
|
||||
public void testCountryBuildDefaults() throws Exception {
|
||||
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
|
||||
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("database_file", "GeoLite2-Country.mmdb");
|
||||
|
||||
String processorTag = randomAsciiOfLength(10);
|
||||
config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
|
||||
|
||||
GeoIpProcessor processor = factory.create(config);
|
||||
assertThat(processor.getTag(), equalTo(processorTag));
|
||||
assertThat(processor.getField(), equalTo("_field"));
|
||||
assertThat(processor.getTargetField(), equalTo("geoip"));
|
||||
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country"));
|
||||
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES));
|
||||
}
|
||||
|
||||
public void testBuildTargetField() throws Exception {
|
||||
@ -101,6 +121,23 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
|
||||
assertThat(processor.getField(), equalTo("_field"));
|
||||
assertThat(processor.getTargetField(), equalTo("geoip"));
|
||||
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country"));
|
||||
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_COUNTRY_PROPERTIES));
|
||||
}
|
||||
|
||||
public void testBuildWithCountryDbAndCityFields() throws Exception {
|
||||
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("field", "_field");
|
||||
config.put("database_file", "GeoLite2-Country.mmdb");
|
||||
EnumSet<GeoIpProcessor.Property> cityOnlyProperties = EnumSet.complementOf(GeoIpProcessor.Property.ALL_COUNTRY_PROPERTIES);
|
||||
String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString();
|
||||
config.put("properties", Collections.singletonList(cityProperty));
|
||||
try {
|
||||
factory.create(config);
|
||||
fail("Exception expected");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + cityProperty + "]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testBuildNonExistingDbFile() throws Exception {
|
||||
@ -146,7 +183,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
|
||||
factory.create(config);
|
||||
fail("exception expected");
|
||||
} catch (ElasticsearchParseException e) {
|
||||
assertThat(e.getMessage(), equalTo("[properties] illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]"));
|
||||
assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LOCATION]"));
|
||||
}
|
||||
|
||||
config = new HashMap<>();
|
||||
|
@ -11,15 +11,16 @@
|
||||
- match: { nodes.$master.ingest.processors.0.type: append }
|
||||
- match: { nodes.$master.ingest.processors.1.type: convert }
|
||||
- match: { nodes.$master.ingest.processors.2.type: date }
|
||||
- match: { nodes.$master.ingest.processors.3.type: fail }
|
||||
- match: { nodes.$master.ingest.processors.4.type: foreach }
|
||||
- match: { nodes.$master.ingest.processors.5.type: geoip }
|
||||
- match: { nodes.$master.ingest.processors.6.type: gsub }
|
||||
- match: { nodes.$master.ingest.processors.7.type: join }
|
||||
- match: { nodes.$master.ingest.processors.8.type: lowercase }
|
||||
- match: { nodes.$master.ingest.processors.9.type: remove }
|
||||
- match: { nodes.$master.ingest.processors.10.type: rename }
|
||||
- match: { nodes.$master.ingest.processors.11.type: set }
|
||||
- match: { nodes.$master.ingest.processors.12.type: split }
|
||||
- match: { nodes.$master.ingest.processors.13.type: trim }
|
||||
- match: { nodes.$master.ingest.processors.14.type: uppercase }
|
||||
- match: { nodes.$master.ingest.processors.3.type: date_index_name }
|
||||
- match: { nodes.$master.ingest.processors.4.type: fail }
|
||||
- match: { nodes.$master.ingest.processors.5.type: foreach }
|
||||
- match: { nodes.$master.ingest.processors.6.type: geoip }
|
||||
- match: { nodes.$master.ingest.processors.7.type: gsub }
|
||||
- match: { nodes.$master.ingest.processors.8.type: join }
|
||||
- match: { nodes.$master.ingest.processors.9.type: lowercase }
|
||||
- match: { nodes.$master.ingest.processors.10.type: remove }
|
||||
- match: { nodes.$master.ingest.processors.11.type: rename }
|
||||
- match: { nodes.$master.ingest.processors.12.type: set }
|
||||
- match: { nodes.$master.ingest.processors.13.type: split }
|
||||
- match: { nodes.$master.ingest.processors.14.type: trim }
|
||||
- match: { nodes.$master.ingest.processors.15.type: uppercase }
|
||||
|
@ -54,7 +54,7 @@
|
||||
{
|
||||
"geoip" : {
|
||||
"field" : "field1",
|
||||
"properties" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"]
|
||||
"properties" : ["city_name", "country_iso_code", "ip", "location", "timezone", "country_name", "region_name", "continent_name"]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -143,4 +143,44 @@ public class MultifieldAttachmentMapperTests extends AttachmentUnitTestCase {
|
||||
// In mapping we set store:true for suggest subfield
|
||||
assertThat(doc.rootDoc().getField("file.name.suggest").fieldType().stored(), is(true));
|
||||
}
|
||||
|
||||
public void testAllExternalValues() throws Exception {
|
||||
String originalText = "This is an elasticsearch mapper attachment test.";
|
||||
String forcedName = randomAsciiOfLength(20);
|
||||
String forcedLanguage = randomAsciiOfLength(20);
|
||||
String forcedContentType = randomAsciiOfLength(20);
|
||||
|
||||
String bytes = Base64.encodeBytes(originalText.getBytes(StandardCharsets.ISO_8859_1));
|
||||
|
||||
MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(),
|
||||
Settings.builder().put(AttachmentMapper.INDEX_ATTACHMENT_DETECT_LANGUAGE_SETTING.getKey(), true).build(),
|
||||
getIndicesModuleWithRegisteredAttachmentMapper());
|
||||
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/attachment/test/unit/multifield/multifield-mapping.json");
|
||||
|
||||
DocumentMapper documentMapper = mapperService.documentMapperParser().parse("person", new CompressedXContent(mapping));
|
||||
|
||||
ParsedDocument doc = documentMapper.parse("person", "person", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("file")
|
||||
.field("_content", bytes)
|
||||
.field("_name", forcedName)
|
||||
.field("_language", forcedLanguage)
|
||||
.field("_content_type", forcedContentType)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
// Note that we don't support forcing values for _title and _keywords
|
||||
|
||||
assertThat(doc.rootDoc().getField("file.content"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("file.content").stringValue(), is(originalText + "\n"));
|
||||
|
||||
assertThat(doc.rootDoc().getField("file.name"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("file.name").stringValue(), is(forcedName));
|
||||
assertThat(doc.rootDoc().getField("file.language"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("file.language").stringValue(), is(forcedLanguage));
|
||||
assertThat(doc.rootDoc().getField("file.content_type"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("file.content_type").stringValue(), is(forcedContentType));
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,7 @@
|
||||
"bytes": {
|
||||
"type": "enum",
|
||||
"description" : "The unit in which to display byte values",
|
||||
"options": [ "b", "k", "m", "g" ]
|
||||
"options": [ "b", "k", "kb", "m", "mb", "g", "gb", "t", "tb", "p", "pb" ]
|
||||
},
|
||||
"local": {
|
||||
"type" : "boolean",
|
||||
|
@ -19,7 +19,7 @@
|
||||
"bytes": {
|
||||
"type": "enum",
|
||||
"description" : "The unit in which to display byte values",
|
||||
"options": [ "b", "k", "m", "g" ]
|
||||
"options": [ "b", "k", "kb", "m", "mb", "g", "gb", "t", "tb", "p", "pb" ]
|
||||
},
|
||||
"local": {
|
||||
"type" : "boolean",
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user