Merge branch 'master' into add_rewrite_infra
This commit is contained in:
commit
25d6a143d4
|
@ -272,7 +272,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
|
|||
list = new ArrayList<>();
|
||||
requestsByShard.put(shardIt.shardId(), list);
|
||||
}
|
||||
list.add(new BulkItemRequest(i, new DeleteRequest(deleteRequest)));
|
||||
list.add(new BulkItemRequest(i, deleteRequest));
|
||||
}
|
||||
} else {
|
||||
ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId();
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.delete;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.DocumentRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
|
@ -80,28 +79,6 @@ public class DeleteRequest extends ReplicationRequest<DeleteRequest> implements
|
|||
this.id = id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new delete request that is a copy of the one provided as an argument.
|
||||
*/
|
||||
public DeleteRequest(DeleteRequest request) {
|
||||
this(request, request);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new delete request that is a copy of the one provided as an argument.
|
||||
* The new request will inherit though headers and context from the original request that caused it.
|
||||
*/
|
||||
public DeleteRequest(DeleteRequest request, ActionRequest originalRequest) {
|
||||
super(request);
|
||||
this.type = request.type();
|
||||
this.id = request.id();
|
||||
this.routing = request.routing();
|
||||
this.parent = request.parent();
|
||||
this.refresh = request.refresh();
|
||||
this.version = request.version();
|
||||
this.versionType = request.versionType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = super.validate();
|
||||
|
|
|
@ -67,26 +67,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
|
|||
type = "_all";
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new get request that is a copy of the one provided as an argument.
|
||||
* The new request will inherit though headers and context from the original request that caused it.
|
||||
*/
|
||||
public GetRequest(GetRequest getRequest) {
|
||||
this.index = getRequest.index;
|
||||
this.type = getRequest.type;
|
||||
this.id = getRequest.id;
|
||||
this.routing = getRequest.routing;
|
||||
this.parent = getRequest.parent;
|
||||
this.preference = getRequest.preference;
|
||||
this.fields = getRequest.fields;
|
||||
this.fetchSourceContext = getRequest.fetchSourceContext;
|
||||
this.refresh = getRequest.refresh;
|
||||
this.realtime = getRequest.realtime;
|
||||
this.version = getRequest.version;
|
||||
this.versionType = getRequest.versionType;
|
||||
this.ignoreErrorsOnGeneratedFields = getRequest.ignoreErrorsOnGeneratedFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new get request against the specified index. The {@link #type(String)} and {@link #id(String)}
|
||||
* must be set.
|
||||
|
|
|
@ -159,26 +159,6 @@ public class IndexRequest extends ReplicationRequest<IndexRequest> implements Do
|
|||
public IndexRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new index request that is a copy of the one provided as an argument.
|
||||
* The new request will inherit though headers and context from the original request that caused it.
|
||||
*/
|
||||
public IndexRequest(IndexRequest indexRequest) {
|
||||
super(indexRequest);
|
||||
this.type = indexRequest.type;
|
||||
this.id = indexRequest.id;
|
||||
this.routing = indexRequest.routing;
|
||||
this.parent = indexRequest.parent;
|
||||
this.timestamp = indexRequest.timestamp;
|
||||
this.ttl = indexRequest.ttl;
|
||||
this.source = indexRequest.source;
|
||||
this.opType = indexRequest.opType;
|
||||
this.refresh = indexRequest.refresh;
|
||||
this.version = indexRequest.version;
|
||||
this.versionType = indexRequest.versionType;
|
||||
this.contentType = indexRequest.contentType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new index request against the specific index. The {@link #type(String)}
|
||||
* {@link #source(byte[])} must be set.
|
||||
|
|
|
@ -103,7 +103,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio
|
|||
void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener<BulkResponse> listener) {
|
||||
BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original);
|
||||
executionService.executeBulkRequest(() -> bulkRequestModifier, (indexRequest, throwable) -> {
|
||||
logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable);
|
||||
logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", throwable, indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id());
|
||||
bulkRequestModifier.markCurrentItemAsFailed(throwable);
|
||||
}, (throwable) -> {
|
||||
if (throwable != null) {
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.action.percolate;
|
|||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.TransportGetAction;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
|
@ -74,9 +73,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
|
|||
protected void doExecute(Task task, final PercolateRequest request, final ActionListener<PercolateResponse> listener) {
|
||||
request.startTime = System.currentTimeMillis();
|
||||
if (request.getRequest() != null) {
|
||||
//create a new get request to make sure it has the same headers and context as the original percolate request
|
||||
GetRequest getRequest = new GetRequest(request.getRequest());
|
||||
getAction.execute(getRequest, new ActionListener<GetResponse>() {
|
||||
getAction.execute(request.getRequest(), new ActionListener<GetResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetResponse getResponse) {
|
||||
if (!getResponse.isExists()) {
|
||||
|
|
|
@ -76,23 +76,6 @@ public class SearchRequest extends ActionRequest<SearchRequest> implements Indic
|
|||
public SearchRequest() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new search request that is a copy of the one provided as an argument.
|
||||
* The new request will inherit though headers and context from the original request that caused it.
|
||||
*/
|
||||
public SearchRequest(SearchRequest searchRequest) {
|
||||
this.searchType = searchRequest.searchType;
|
||||
this.indices = searchRequest.indices;
|
||||
this.routing = searchRequest.routing;
|
||||
this.preference = searchRequest.preference;
|
||||
this.template = searchRequest.template;
|
||||
this.source = searchRequest.source;
|
||||
this.requestCache = searchRequest.requestCache;
|
||||
this.scroll = searchRequest.scroll;
|
||||
this.types = searchRequest.types;
|
||||
this.indicesOptions = searchRequest.indicesOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new search request against the indices. No indices provided here means that search
|
||||
* will run against all indices.
|
||||
|
|
|
@ -59,8 +59,7 @@ public class TransportMultiSearchAction extends HandledTransportAction<MultiSear
|
|||
final AtomicInteger counter = new AtomicInteger(responses.length());
|
||||
for (int i = 0; i < responses.length(); i++) {
|
||||
final int index = i;
|
||||
SearchRequest searchRequest = new SearchRequest(request.requests().get(i));
|
||||
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
|
||||
searchAction.execute(request.requests().get(i), new ActionListener<SearchResponse>() {
|
||||
@Override
|
||||
public void onResponse(SearchResponse searchResponse) {
|
||||
responses.set(index, new MultiSearchResponse.Item(searchResponse, null));
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.support.replication;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
/**
|
||||
|
@ -38,13 +37,4 @@ public class BasicReplicationRequest extends ReplicationRequest<BasicReplication
|
|||
public BasicReplicationRequest(ShardId shardId) {
|
||||
super(shardId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new request that is a copy of the one
|
||||
* provided as an argument.
|
||||
*/
|
||||
protected BasicReplicationRequest(BasicReplicationRequest request) {
|
||||
super(request);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -70,16 +70,6 @@ public abstract class ReplicationRequest<Request extends ReplicationRequest<Requ
|
|||
this.shardId = shardId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy constructor that creates a new request that is a copy of the one provided as an argument.
|
||||
* The new request will inherit though headers and context from the original request that caused it.
|
||||
*/
|
||||
protected ReplicationRequest(Request request) {
|
||||
this.timeout = request.timeout();
|
||||
this.index = request.index();
|
||||
this.consistencyLevel = request.consistencyLevel();
|
||||
}
|
||||
|
||||
/**
|
||||
* A timeout to wait if the index operation can't be performed immediately. Defaults to <tt>1m</tt>.
|
||||
*/
|
||||
|
|
|
@ -26,7 +26,6 @@ import org.elasticsearch.action.RoutingMissingException;
|
|||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.delete.DeleteResponse;
|
||||
import org.elasticsearch.action.delete.TransportDeleteAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
|
@ -169,7 +168,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
final UpdateHelper.Result result = updateHelper.prepare(request, indexShard);
|
||||
switch (result.operation()) {
|
||||
case UPSERT:
|
||||
IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action());
|
||||
IndexRequest upsertRequest = result.action();
|
||||
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
|
||||
final BytesReference upsertSourceBytes = upsertRequest.source();
|
||||
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
|
||||
|
@ -206,7 +205,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
});
|
||||
break;
|
||||
case INDEX:
|
||||
IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action());
|
||||
IndexRequest indexRequest = result.action();
|
||||
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
|
||||
final BytesReference indexSourceBytes = indexRequest.source();
|
||||
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
|
||||
|
@ -236,8 +235,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
});
|
||||
break;
|
||||
case DELETE:
|
||||
DeleteRequest deleteRequest = new DeleteRequest(result.action(), request);
|
||||
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
|
||||
deleteAction.execute(result.action(), new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false);
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.apache.lucene.index.FilteredTermsEnum;
|
|||
import org.apache.lucene.index.PostingsEnum;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -415,6 +417,24 @@ public final class OrdinalsBuilder implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A {@link TermsEnum} that iterates only highest resolution geo prefix coded terms.
|
||||
*
|
||||
* @see #buildFromTerms(TermsEnum)
|
||||
*/
|
||||
public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) {
|
||||
return new FilteredTermsEnum(termsEnum, false) {
|
||||
@Override
|
||||
protected AcceptStatus accept(BytesRef term) throws IOException {
|
||||
// accept only the max resolution terms
|
||||
// todo is this necessary?
|
||||
return GeoEncodingUtils.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ?
|
||||
AcceptStatus.YES : AcceptStatus.END;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the maximum document ID this builder can associate with an ordinal
|
||||
*/
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.util.GeoEncodingUtils;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefIterator;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
|
@ -47,8 +48,10 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
|
|||
}
|
||||
|
||||
protected static class GeoPointTermsEnum extends BaseGeoPointTermsEnum {
|
||||
private final GeoPointField.TermEncoding termEncoding;
|
||||
protected GeoPointTermsEnum(BytesRefIterator termsEnum, GeoPointField.TermEncoding termEncoding) {
|
||||
super(termsEnum);
|
||||
this.termEncoding = termEncoding;
|
||||
}
|
||||
|
||||
public Long next() throws IOException {
|
||||
|
@ -56,8 +59,14 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
|
|||
if (term == null) {
|
||||
return null;
|
||||
}
|
||||
if (termEncoding == GeoPointField.TermEncoding.PREFIX) {
|
||||
return GeoEncodingUtils.prefixCodedToGeoCoded(term);
|
||||
} else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) {
|
||||
return NumericUtils.prefixCodedToLong(term);
|
||||
}
|
||||
throw new IllegalArgumentException("GeoPoint.TermEncoding should be one of: " + GeoPointField.TermEncoding.PREFIX
|
||||
+ " or " + GeoPointField.TermEncoding.NUMERIC + " found: " + termEncoding);
|
||||
}
|
||||
}
|
||||
|
||||
protected static class GeoPointTermsEnumLegacy extends BaseGeoPointTermsEnum {
|
||||
|
|
|
@ -23,10 +23,10 @@ import org.apache.lucene.index.LeafReader;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.RandomAccessOrds;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -92,9 +92,18 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
|
|||
OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
|
||||
boolean success = false;
|
||||
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
|
||||
final GeoPointField.TermEncoding termEncoding = indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_3_0) ?
|
||||
GeoPointField.TermEncoding.PREFIX : GeoPointField.TermEncoding.NUMERIC;
|
||||
final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(OrdinalsBuilder.wrapNumeric64Bit(terms.iterator())), termEncoding);
|
||||
final TermsEnum termsEnum;
|
||||
final GeoPointField.TermEncoding termEncoding;
|
||||
if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_3_0)) {
|
||||
termEncoding = GeoPointField.TermEncoding.PREFIX;
|
||||
termsEnum = OrdinalsBuilder.wrapGeoPointTerms(terms.iterator());
|
||||
} else {
|
||||
termEncoding = GeoPointField.TermEncoding.NUMERIC;
|
||||
termsEnum = OrdinalsBuilder.wrapNumeric64Bit(terms.iterator());
|
||||
}
|
||||
|
||||
final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(termsEnum), termEncoding);
|
||||
|
||||
Long hashedPoint;
|
||||
long numTerms = 0;
|
||||
while ((hashedPoint = iter.next()) != null) {
|
||||
|
|
|
@ -198,22 +198,22 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
|
|||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
if (query != null) {
|
||||
builder.field("query");
|
||||
builder.field(FunctionScoreQueryParser.QUERY_FIELD.getPreferredName());
|
||||
query.toXContent(builder, params);
|
||||
}
|
||||
builder.startArray("functions");
|
||||
builder.startArray(FunctionScoreQueryParser.FUNCTIONS_FIELD.getPreferredName());
|
||||
for (FilterFunctionBuilder filterFunctionBuilder : filterFunctionBuilders) {
|
||||
filterFunctionBuilder.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
builder.field("score_mode", scoreMode.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(FunctionScoreQueryParser.SCORE_MODE_FIELD.getPreferredName(), scoreMode.name().toLowerCase(Locale.ROOT));
|
||||
if (boostMode != null) {
|
||||
builder.field("boost_mode", boostMode.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(FunctionScoreQueryParser.BOOST_MODE_FIELD.getPreferredName(), boostMode.name().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
builder.field("max_boost", maxBoost);
|
||||
builder.field(FunctionScoreQueryParser.MAX_BOOST_FIELD.getPreferredName(), maxBoost);
|
||||
if (minScore != null) {
|
||||
builder.field("min_score", minScore);
|
||||
builder.field(FunctionScoreQueryParser.MIN_SCORE_FIELD.getPreferredName(), minScore);
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
|
@ -359,7 +359,7 @@ public class FunctionScoreQueryBuilder extends AbstractQueryBuilder<FunctionScor
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field("filter");
|
||||
builder.field(FunctionScoreQueryParser.FILTER_FIELD.getPreferredName());
|
||||
filter.toXContent(builder, params);
|
||||
scoreFunction.toXContent(builder, params);
|
||||
builder.endObject();
|
||||
|
|
|
@ -19,10 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.query.functionscore;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -39,6 +35,10 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.index.query.functionscore.weight.WeightBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Parser for function_score query
|
||||
*/
|
||||
|
@ -50,6 +50,13 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
|
|||
static final String MISPLACED_FUNCTION_MESSAGE_PREFIX = "you can either define [functions] array or a single function, not both. ";
|
||||
|
||||
public static final ParseField WEIGHT_FIELD = new ParseField("weight");
|
||||
public static final ParseField QUERY_FIELD = new ParseField("query");
|
||||
public static final ParseField FILTER_FIELD = new ParseField("filter");
|
||||
public static final ParseField FUNCTIONS_FIELD = new ParseField("functions");
|
||||
public static final ParseField SCORE_MODE_FIELD = new ParseField("score_mode");
|
||||
public static final ParseField BOOST_MODE_FIELD = new ParseField("boost_mode");
|
||||
public static final ParseField MAX_BOOST_FIELD = new ParseField("max_boost");
|
||||
public static final ParseField MIN_SCORE_FIELD = new ParseField("min_score");
|
||||
|
||||
private final ScoreFunctionParserMapper functionParserMapper;
|
||||
|
||||
|
@ -86,27 +93,12 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if ("query".equals(currentFieldName)) {
|
||||
query = parseContext.parseInnerQueryBuilder();
|
||||
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
|
||||
scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text());
|
||||
} else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) {
|
||||
combineFunction = CombineFunction.fromString(parser.text());
|
||||
} else if ("max_boost".equals(currentFieldName) || "maxBoost".equals(currentFieldName)) {
|
||||
maxBoost = parser.floatValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else if ("min_score".equals(currentFieldName) || "minScore".equals(currentFieldName)) {
|
||||
minScore = parser.floatValue();
|
||||
} else if ("functions".equals(currentFieldName)) {
|
||||
if (singleFunctionFound) {
|
||||
String errorString = "already found [" + singleFunctionName + "], now encountering [functions].";
|
||||
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
if (query != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. [query] is already defined.", FunctionScoreQueryBuilder.NAME);
|
||||
}
|
||||
functionArrayFound = true;
|
||||
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctionBuilders);
|
||||
query = parseContext.parseInnerQueryBuilder();
|
||||
} else {
|
||||
if (singleFunctionFound) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName);
|
||||
|
@ -118,17 +110,53 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
|
|||
singleFunctionFound = true;
|
||||
singleFunctionName = currentFieldName;
|
||||
|
||||
ScoreFunctionBuilder<?> scoreFunction;
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
|
||||
scoreFunction = new WeightBuilder().setWeight(parser.floatValue());
|
||||
} else {
|
||||
// we try to parse a score function. If there is no score
|
||||
// function for the current field name,
|
||||
// functionParserMapper.get() will throw an Exception.
|
||||
scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser);
|
||||
}
|
||||
// we try to parse a score function. If there is no score function for the current field name,
|
||||
// functionParserMapper.get() may throw an Exception.
|
||||
ScoreFunctionBuilder<?> scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser);
|
||||
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction));
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FUNCTIONS_FIELD)) {
|
||||
if (singleFunctionFound) {
|
||||
String errorString = "already found [" + singleFunctionName + "], now encountering [functions].";
|
||||
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
|
||||
}
|
||||
functionArrayFound = true;
|
||||
currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctionBuilders);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. array [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName);
|
||||
}
|
||||
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) {
|
||||
scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_MODE_FIELD)) {
|
||||
combineFunction = CombineFunction.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_BOOST_FIELD)) {
|
||||
maxBoost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) {
|
||||
minScore = parser.floatValue();
|
||||
} else {
|
||||
if (singleFunctionFound) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName);
|
||||
}
|
||||
if (functionArrayFound) {
|
||||
String errorString = "already found [functions] array, now encountering [" + currentFieldName + "].";
|
||||
handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString);
|
||||
}
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
|
||||
filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(new WeightBuilder().setWeight(parser.floatValue())));
|
||||
singleFunctionFound = true;
|
||||
singleFunctionName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. field [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (query == null) {
|
||||
|
@ -167,21 +195,23 @@ public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryB
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
|
||||
functionWeight = parser.floatValue();
|
||||
} else {
|
||||
if ("filter".equals(currentFieldName)) {
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
|
||||
filter = parseContext.parseInnerQueryBuilder();
|
||||
} else {
|
||||
if (scoreFunction != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse function_score functions. already found [{}], now encountering [{}].", scoreFunction.getName(), currentFieldName);
|
||||
}
|
||||
// do not need to check null here,
|
||||
// functionParserMapper throws exception if parser
|
||||
// non-existent
|
||||
// do not need to check null here, functionParserMapper does it already
|
||||
ScoreFunctionParser functionParser = functionParserMapper.get(parser.getTokenLocation(), currentFieldName);
|
||||
scoreFunction = functionParser.fromXContent(parseContext, parser);
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) {
|
||||
functionWeight = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. field [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (functionWeight != null) {
|
||||
|
|
|
@ -25,11 +25,8 @@ import org.elasticsearch.common.io.stream.Streamable;
|
|||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class TransportMessage<TM extends TransportMessage<TM>> implements Streamable {
|
||||
public abstract class TransportMessage implements Streamable {
|
||||
|
||||
private TransportAddress remoteAddress;
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.tasks.Task;
|
|||
|
||||
/**
|
||||
*/
|
||||
public abstract class TransportRequest extends TransportMessage<TransportRequest> {
|
||||
public abstract class TransportRequest extends TransportMessage {
|
||||
|
||||
public static class Empty extends TransportRequest {
|
||||
public static final Empty INSTANCE = new Empty();
|
||||
|
@ -32,7 +32,6 @@ public abstract class TransportRequest extends TransportMessage<TransportRequest
|
|||
public TransportRequest() {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the task object that should be used to keep track of the processing of the request.
|
||||
*
|
||||
|
@ -48,5 +47,4 @@ public abstract class TransportRequest extends TransportMessage<TransportRequest
|
|||
public String getDescription() {
|
||||
return "";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.transport;
|
|||
|
||||
/**
|
||||
*/
|
||||
public abstract class TransportResponse extends TransportMessage<TransportResponse> {
|
||||
public abstract class TransportResponse extends TransportMessage {
|
||||
|
||||
public static class Empty extends TransportResponse {
|
||||
public static final Empty INSTANCE = new Empty();
|
||||
|
|
|
@ -502,7 +502,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
context.reset(parser);
|
||||
context.parseFieldMatcher(matcher);
|
||||
QueryBuilder<?> parseInnerQueryBuilder = context.parseInnerQueryBuilder();
|
||||
assertTrue(parser.nextToken() == null);
|
||||
assertNull(parser.nextToken());
|
||||
return parseInnerQueryBuilder;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.index.query.functionscore;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -60,7 +59,6 @@ import java.util.Map;
|
|||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.hamcrest.Matchers.closeTo;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.either;
|
||||
|
@ -73,7 +71,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
@Override
|
||||
protected FunctionScoreQueryBuilder doCreateTestQueryBuilder() {
|
||||
FunctionScoreQueryBuilder functionScoreQueryBuilder;
|
||||
switch(randomIntBetween(0, 3)) {
|
||||
switch (randomIntBetween(0, 3)) {
|
||||
case 0:
|
||||
int numFunctions = randomIntBetween(0, 3);
|
||||
FunctionScoreQueryBuilder.FilterFunctionBuilder[] filterFunctionBuilders = new FunctionScoreQueryBuilder.FilterFunctionBuilder[numFunctions];
|
||||
|
@ -125,7 +123,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
DecayFunctionBuilder decayFunctionBuilder;
|
||||
Float offset = randomBoolean() ? null : randomFloat();
|
||||
double decay = randomDouble();
|
||||
switch(randomIntBetween(0, 2)) {
|
||||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
decayFunctionBuilder = new GaussDecayFunctionBuilder(INT_FIELD_NAME, randomFloat(), randomFloat(), offset, decay);
|
||||
break;
|
||||
|
@ -165,7 +163,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
RandomScoreFunctionBuilder randomScoreFunctionBuilder = new RandomScoreFunctionBuilder();
|
||||
if (randomBoolean()) {
|
||||
randomScoreFunctionBuilder.seed(randomLong());
|
||||
} else if(randomBoolean()) {
|
||||
} else if (randomBoolean()) {
|
||||
randomScoreFunctionBuilder.seed(randomInt());
|
||||
} else {
|
||||
randomScoreFunctionBuilder.seed(randomAsciiOfLengthBetween(1, 10));
|
||||
|
@ -199,93 +197,93 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
|
||||
public void testIllegalArguments() {
|
||||
try {
|
||||
new FunctionScoreQueryBuilder((QueryBuilder<?>)null);
|
||||
new FunctionScoreQueryBuilder((QueryBuilder<?>) null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder((ScoreFunctionBuilder)null);
|
||||
new FunctionScoreQueryBuilder((ScoreFunctionBuilder) null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder((FunctionScoreQueryBuilder.FilterFunctionBuilder[])null);
|
||||
new FunctionScoreQueryBuilder((FunctionScoreQueryBuilder.FilterFunctionBuilder[]) null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(null, ScoreFunctionBuilders.randomFunction(123));
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (ScoreFunctionBuilder)null);
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (ScoreFunctionBuilder) null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (FunctionScoreQueryBuilder.FilterFunctionBuilder[])null);
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (FunctionScoreQueryBuilder.FilterFunctionBuilder[]) null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(null, new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(QueryBuilders.matchAllQuery(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{null});
|
||||
fail("content of array must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(null, ScoreFunctionBuilders.randomFunction(123));
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder.FilterFunctionBuilder(new MatchAllQueryBuilder(), null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder()).scoreMode(null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
|
||||
try {
|
||||
new FunctionScoreQueryBuilder(new MatchAllQueryBuilder()).boostMode(null);
|
||||
fail("must not be null");
|
||||
} catch(IllegalArgumentException e) {
|
||||
} catch (IllegalArgumentException e) {
|
||||
//all good
|
||||
}
|
||||
}
|
||||
|
@ -369,7 +367,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
assertThat(functionScoreQueryBuilder.maxBoost(), equalTo(10f));
|
||||
|
||||
if (i < XContentType.values().length) {
|
||||
queryBuilder = parseQuery(((AbstractQueryBuilder<?>)queryBuilder).buildAsBytes(XContentType.values()[i]));
|
||||
queryBuilder = parseQuery(((AbstractQueryBuilder<?>) queryBuilder).buildAsBytes(XContentType.values()[i]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -416,7 +414,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
assertThat(functionScoreQueryBuilder.maxBoost(), equalTo(10f));
|
||||
|
||||
if (i < XContentType.values().length) {
|
||||
queryBuilder = parseQuery(((AbstractQueryBuilder<?>)queryBuilder).buildAsBytes(XContentType.values()[i]));
|
||||
queryBuilder = parseQuery(((AbstractQueryBuilder<?>) queryBuilder).buildAsBytes(XContentType.values()[i]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -434,7 +432,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
try {
|
||||
parseQuery(functionScoreQuery);
|
||||
fail("parsing should have failed");
|
||||
} catch(ParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("use [functions] array if you want to define several functions."));
|
||||
}
|
||||
}
|
||||
|
@ -464,7 +462,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
try {
|
||||
parseQuery(functionScoreQuery);
|
||||
fail("parsing should have failed");
|
||||
} catch(ParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("failed to parse function_score functions. already found [random_score], now encountering [script_score]."));
|
||||
}
|
||||
}
|
||||
|
@ -486,7 +484,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
try {
|
||||
parseQuery(functionScoreQuery);
|
||||
fail("parsing should have failed");
|
||||
} catch(ParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("an entry in functions list is missing a function."));
|
||||
}
|
||||
}
|
||||
|
@ -553,8 +551,22 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
}
|
||||
|
||||
public void testMalformedThrowsException() throws IOException {
|
||||
String json = "{\n" +
|
||||
" \"function_score\":{\n" +
|
||||
" \"query\":{\n" +
|
||||
" \"term\":{\n" +
|
||||
" \"name.last\":\"banon\"\n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" \"functions\": [\n" +
|
||||
" {\n" +
|
||||
" {\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
try {
|
||||
parseQuery(copyToStringFromClasspath("/org/elasticsearch/index/query/faulty-function-score-query.json"));
|
||||
parseQuery(json);
|
||||
fail("Expected JsonParseException");
|
||||
} catch (JsonParseException e) {
|
||||
assertThat(e.getMessage(), containsString("Unexpected character ('{"));
|
||||
|
@ -597,7 +609,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
try {
|
||||
parseQuery(querySource);
|
||||
fail("parsing should have failed");
|
||||
} catch(ParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("[field_value_factor] field 'factor' does not support lists or objects"));
|
||||
}
|
||||
}
|
||||
|
@ -660,4 +672,79 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
|
|||
assertEquals(rewrite.filterFunctionBuilders()[0].getFilter(), new TermQueryBuilder("tq", "1"));
|
||||
assertSame(rewrite.filterFunctionBuilders()[1].getFilter(), secondFunction);
|
||||
}
|
||||
|
||||
public void testQueryMalformedArrayNotSupported() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"function_score\" : {\n" +
|
||||
" \"not_supported\" : []\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse should have failed");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("array [not_supported] is not supported"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testQueryMalformedFieldNotSupported() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"function_score\" : {\n" +
|
||||
" \"not_supported\" : \"value\"\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse should have failed");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("field [not_supported] is not supported"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testMalformedQueryFunctionFieldNotSupported() throws IOException {
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"function_score\" : {\n" +
|
||||
" \"functions\" : [ {\n" +
|
||||
" \"not_supported\" : 23.0\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse should have failed");
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("field [not_supported] is not supported"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testMalformedQuery() throws IOException {
|
||||
//verify that an error is thrown rather than setting the query twice (https://github.com/elastic/elasticsearch/issues/16583)
|
||||
String json =
|
||||
"{\n" +
|
||||
" \"function_score\":{\n" +
|
||||
" \"query\":{\n" +
|
||||
" \"bool\":{\n" +
|
||||
" \"must\":{\"match\":{\"field\":\"value\"}}" +
|
||||
" },\n" +
|
||||
" \"ignored_field_name\": {\n" +
|
||||
" {\"match\":{\"field\":\"value\"}}\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}";
|
||||
|
||||
try {
|
||||
parseQuery(json);
|
||||
fail("parse should have failed");
|
||||
} catch(ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("[query] is already defined."));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.cache.RemovalNotification;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.index.cache.request.ShardRequestCache;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -53,7 +54,8 @@ public class IndicesRequestCacheTests extends ESTestCase {
|
|||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
|
||||
|
||||
writer.addDocument(newDoc(0, "foo"));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
|
||||
AtomicBoolean indexShard = new AtomicBoolean(true);
|
||||
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
|
||||
|
@ -105,7 +107,8 @@ public class IndicesRequestCacheTests extends ESTestCase {
|
|||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
|
||||
|
||||
writer.addDocument(newDoc(0, "foo"));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
|
||||
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
|
||||
|
||||
|
@ -225,8 +228,34 @@ public class IndicesRequestCacheTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testEviction() throws Exception {
|
||||
final ByteSizeValue size;
|
||||
{
|
||||
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
|
||||
AtomicBoolean indexShard = new AtomicBoolean(true);
|
||||
ShardRequestCache requestCacheStats = new ShardRequestCache();
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
|
||||
|
||||
writer.addDocument(newDoc(0, "foo"));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
|
||||
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
|
||||
|
||||
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
|
||||
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
|
||||
|
||||
BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes());
|
||||
assertEquals("foo", value1.toUtf8());
|
||||
BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes());
|
||||
assertEquals("bar", value2.toUtf8());
|
||||
size = requestCacheStats.stats().getMemorySize();
|
||||
IOUtils.close(reader, secondReader, writer, dir, cache);
|
||||
}
|
||||
IndicesRequestCache cache = new IndicesRequestCache(Settings.builder()
|
||||
.put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), "113b") // the first 2 cache entries add up to 112b
|
||||
.put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.bytes()+1 +"b")
|
||||
.build());
|
||||
AtomicBoolean indexShard = new AtomicBoolean(true);
|
||||
ShardRequestCache requestCacheStats = new ShardRequestCache();
|
||||
|
@ -234,16 +263,19 @@ public class IndicesRequestCacheTests extends ESTestCase {
|
|||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
|
||||
|
||||
writer.addDocument(newDoc(0, "foo"));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
|
||||
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
|
||||
|
||||
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
|
||||
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
|
||||
|
||||
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
|
||||
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, indexShard, 0);
|
||||
|
||||
BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes());
|
||||
|
@ -267,16 +299,19 @@ public class IndicesRequestCacheTests extends ESTestCase {
|
|||
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
|
||||
|
||||
writer.addDocument(newDoc(0, "foo"));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
|
||||
TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0);
|
||||
|
||||
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
|
||||
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0);
|
||||
|
||||
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
|
||||
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true), new ShardId("foo", "bar", 1));
|
||||
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer, true),
|
||||
new ShardId("foo", "bar", 1));
|
||||
AtomicBoolean differentIdentity = new AtomicBoolean(true);
|
||||
TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, differentIdentity, 0);
|
||||
|
||||
|
|
|
@ -156,6 +156,7 @@ public class SimpleRoutingIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/16645")
|
||||
public void testRequiredRoutingMapping() throws Exception {
|
||||
client().admin().indices().prepareCreate("test").addAlias(new Alias("alias"))
|
||||
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject())
|
||||
|
|
|
@ -373,7 +373,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
|||
parser.nextToken(); // sometimes we move it on the START_OBJECT to test the embedded case
|
||||
}
|
||||
SearchSourceBuilder newBuilder = SearchSourceBuilder.parseSearchSource(parser, parseContext);
|
||||
assertNotSame(testBuilder, newBuilder);
|
||||
assertNull(parser.nextToken());
|
||||
assertEquals(testBuilder, newBuilder);
|
||||
assertEquals(testBuilder.hashCode(), newBuilder.hashCode());
|
||||
}
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"function_score":{
|
||||
"query":{
|
||||
"term":{
|
||||
"name.last":"banon"
|
||||
}
|
||||
},
|
||||
"functions": {
|
||||
{
|
||||
"boost_factor" : 3
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -47,6 +47,8 @@ include::analysis.asciidoc[]
|
|||
|
||||
include::discovery.asciidoc[]
|
||||
|
||||
include::ingest.asciidoc[]
|
||||
|
||||
include::management.asciidoc[]
|
||||
|
||||
include::mapper.asciidoc[]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[[ingest-attachment]]
|
||||
== Ingest Attachment Processor Plugin
|
||||
=== Ingest Attachment Processor Plugin
|
||||
|
||||
The ingest attachment plugin lets Elasticsearch extract file attachments in common formats (such as PPT, XLS, PDF)
|
||||
using the Apache text extraction library http://lucene.apache.org/tika/[Tika].
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[[ingest-geoip]]
|
||||
== Ingest Geoip Processor Plugin
|
||||
=== Ingest Geoip Processor Plugin
|
||||
|
||||
The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases.
|
||||
This processor adds this information by default under the `geoip` field.
|
||||
|
@ -19,10 +19,11 @@ is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too
|
|||
| `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup.
|
||||
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
|
||||
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files.
|
||||
| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] <1> | Controls what properties are added to the `target_field` based on the geoip lookup.
|
||||
| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|
||||
|======
|
||||
|
||||
<1> Depends on what is available in `database_field`:
|
||||
*Depends on what is available in `database_field`:
|
||||
|
||||
* If the GeoLite2 City database is used then the following fields may be added under the `target_field`: `ip`,
|
||||
`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
|
||||
and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`.
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
[[ingest]]
|
||||
== Ingest Plugins
|
||||
|
||||
The ingest plugins extend Elaticsearch by providing additional ingest node capabilities.
|
||||
|
||||
[float]
|
||||
=== Core Ingest Plugins
|
||||
|
||||
The core ingest plugins are:
|
||||
|
||||
<<ingest-attachment>>::
|
||||
|
||||
The ingest attachment plugin lets Elasticsearch extract file attachments in common formats (such as PPT, XLS, and PDF) by
|
||||
using the Apache text extraction library http://lucene.apache.org/tika/[Tika].
|
||||
|
||||
<<ingest-geoip>>::
|
||||
|
||||
The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases.
|
||||
This processor adds this information by default under the `geoip` field.
|
||||
+
|
||||
The ingest-geoip plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available
|
||||
under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/.
|
||||
|
||||
include::ingest-attachment.asciidoc[]
|
||||
|
||||
include::ingest-geoip.asciidoc[]
|
|
@ -41,6 +41,8 @@ include::modules.asciidoc[]
|
|||
|
||||
include::index-modules.asciidoc[]
|
||||
|
||||
include::ingest.asciidoc[]
|
||||
|
||||
include::testing.asciidoc[]
|
||||
|
||||
include::glossary.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
[[ingest]]
|
||||
= Ingest Node
|
||||
|
||||
[partintro]
|
||||
--
|
||||
Ingest node can be used to pre-process documents before the actual indexing takes place.
|
||||
This pre-processing happens by an ingest node that intercepts bulk and index requests, applies the
|
||||
transformations and then passes the documents back to the index or bulk APIs.
|
||||
|
||||
Ingest node is enabled by default. In order to disable ingest the following
|
||||
setting should be configured in the elasticsearch.yml file:
|
||||
|
||||
[source,yaml]
|
||||
--------------------------------------------------
|
||||
node.ingest: false
|
||||
--------------------------------------------------
|
||||
|
||||
It is possible to enable ingest on any node or have dedicated ingest nodes.
|
||||
|
||||
In order to pre-process document before indexing the `pipeline` parameter should be used
|
||||
on an index or bulk request to tell Ingest what pipeline is going to be used.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /my-index/my-type/my-id?pipeline=my_pipeline_id
|
||||
{
|
||||
...
|
||||
}
|
||||
--------------------------------------------------
|
||||
// AUTOSENSE
|
||||
|
||||
--
|
||||
|
||||
include::ingest/ingest-node.asciidoc[]
|
|
@ -1,33 +1,5 @@
|
|||
[[ingest]]
|
||||
== Ingest Node
|
||||
|
||||
Ingest node can be used to pre-process documents before the actual indexing takes place.
|
||||
This pre-processing happens by an ingest node that intercepts bulk and index requests, applies the
|
||||
transformations and then passes the documents back to the index or bulk APIs.
|
||||
|
||||
Ingest node is enabled by default. In order to disable ingest the following
|
||||
setting should be configured in the elasticsearch.yml file:
|
||||
|
||||
[source,yaml]
|
||||
--------------------------------------------------
|
||||
node.ingest: false
|
||||
--------------------------------------------------
|
||||
|
||||
It is possible to enable ingest on any node or have dedicated ingest nodes.
|
||||
|
||||
In order to pre-process document before indexing the `pipeline` parameter should be used
|
||||
on an index or bulk request to tell Ingest what pipeline is going to be used.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /my-index/my-type/my-id?pipeline=my_pipeline_id
|
||||
{
|
||||
...
|
||||
}
|
||||
--------------------------------------------------
|
||||
// AUTOSENSE
|
||||
|
||||
=== Pipeline Definition
|
||||
[[pipe-line]]
|
||||
== Pipeline Definition
|
||||
|
||||
A pipeline is a definition of a series of processors that are to be
|
||||
executed in the same sequential order as they are declared.
|
||||
|
@ -45,7 +17,7 @@ what the pipeline attempts to achieve.
|
|||
The `processors` parameter defines a list of processors to be executed in
|
||||
order.
|
||||
|
||||
=== Processors
|
||||
== Processors
|
||||
|
||||
All processors are defined in the following way within a pipeline definition:
|
||||
|
||||
|
@ -67,7 +39,7 @@ but is very useful for bookkeeping and tracing errors to specific processors.
|
|||
|
||||
See <<handling-failure-in-pipelines>> to learn more about the `on_failure` field and error handling in pipelines.
|
||||
|
||||
==== Set processor
|
||||
=== Set processor
|
||||
Sets one field and associates it with the specified value. If the field already exists,
|
||||
its value will be replaced with the provided one.
|
||||
|
||||
|
@ -90,7 +62,7 @@ its value will be replaced with the provided one.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Append processor
|
||||
=== Append processor
|
||||
Appends one or more values to an existing array if the field already exists and it is an array.
|
||||
Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar.
|
||||
Creates an array containing the provided values if the fields doesn't exist.
|
||||
|
@ -115,7 +87,7 @@ Accepts a single value or an array of values.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Remove processor
|
||||
=== Remove processor
|
||||
Removes an existing field. If the field doesn't exist, an exception will be thrown
|
||||
|
||||
[[remove-options]]
|
||||
|
@ -135,7 +107,7 @@ Removes an existing field. If the field doesn't exist, an exception will be thro
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Rename processor
|
||||
=== Rename processor
|
||||
Renames an existing field. If the field doesn't exist, an exception will be thrown. Also, the new field
|
||||
name must not exist.
|
||||
|
||||
|
@ -159,7 +131,7 @@ name must not exist.
|
|||
--------------------------------------------------
|
||||
|
||||
|
||||
==== Convert processor
|
||||
=== Convert processor
|
||||
Converts an existing field's value to a different type, like turning a string to an integer.
|
||||
If the field value is an array, all members will be converted.
|
||||
|
||||
|
@ -187,7 +159,7 @@ false if its string value is equal to `false` (ignore case) and it will throw ex
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Gsub processor
|
||||
=== Gsub processor
|
||||
Converts a string field by applying a regular expression and a replacement.
|
||||
If the field is not a string, the processor will throw an exception.
|
||||
|
||||
|
@ -212,7 +184,7 @@ If the field is not a string, the processor will throw an exception.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Join processor
|
||||
=== Join processor
|
||||
Joins each element of an array into a single string using a separator character between each element.
|
||||
Throws error when the field is not an array.
|
||||
|
||||
|
@ -235,7 +207,7 @@ Throws error when the field is not an array.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Split processor
|
||||
=== Split processor
|
||||
Split a field to an array using a separator character. Only works on string fields.
|
||||
|
||||
[[split-options]]
|
||||
|
@ -255,7 +227,7 @@ Split a field to an array using a separator character. Only works on string fiel
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Lowercase processor
|
||||
=== Lowercase processor
|
||||
Converts a string to its lowercase equivalent.
|
||||
|
||||
[[lowercase-options]]
|
||||
|
@ -275,7 +247,7 @@ Converts a string to its lowercase equivalent.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Uppercase processor
|
||||
=== Uppercase processor
|
||||
Converts a string to its uppercase equivalent.
|
||||
|
||||
[[uppercase-options]]
|
||||
|
@ -295,7 +267,7 @@ Converts a string to its uppercase equivalent.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Trim processor
|
||||
=== Trim processor
|
||||
Trims whitespace from field. NOTE: this only works on leading and trailing whitespaces.
|
||||
|
||||
[[trim-options]]
|
||||
|
@ -315,7 +287,7 @@ Trims whitespace from field. NOTE: this only works on leading and trailing white
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Grok Processor
|
||||
=== Grok Processor
|
||||
|
||||
The Grok Processor extracts structured fields out of a single text field within a document. You choose which field to
|
||||
extract matched fields from, as well as the Grok Pattern you expect will match. A Grok Pattern is like a regular
|
||||
|
@ -330,7 +302,7 @@ Here, you can add your own custom grok pattern files with custom grok expression
|
|||
If you need help building patterns to match your logs, you will find the <http://grokdebug.herokuapp.com> and
|
||||
<http://grokconstructor.appspot.com/> applications quite useful!
|
||||
|
||||
===== Grok Basics
|
||||
==== Grok Basics
|
||||
|
||||
Grok sits on top of regular expressions, so any regular expressions are valid in grok as well.
|
||||
The regular expression library is Oniguruma, and you can see the full supported regexp syntax
|
||||
|
@ -367,7 +339,7 @@ Grok expression.
|
|||
%{NUMBER:duration} %{IP:client}
|
||||
--------------------------------------------------
|
||||
|
||||
===== Custom Patterns and Pattern Files
|
||||
==== Custom Patterns and Pattern Files
|
||||
|
||||
The Grok Processor comes pre-packaged with a base set of pattern files. These patterns may not always have
|
||||
what you are looking for. These pattern files have a very basic format. Each line describes a named pattern with
|
||||
|
@ -393,7 +365,7 @@ SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?)
|
|||
TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
|
||||
--------------------------------------------------
|
||||
|
||||
===== Using Grok Processor in a Pipeline
|
||||
==== Using Grok Processor in a Pipeline
|
||||
|
||||
[[grok-options]]
|
||||
.Grok Options
|
||||
|
@ -417,7 +389,7 @@ a document.
|
|||
|
||||
The pattern for this could be
|
||||
|
||||
[source]
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}
|
||||
--------------------------------------------------
|
||||
|
@ -474,7 +446,7 @@ An example of a pipeline specifying custom pattern definitions:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Date processor
|
||||
=== Date processor
|
||||
|
||||
The date processor is used for parsing dates from fields, and then using that date or timestamp as the timestamp for that document.
|
||||
The date processor adds by default the parsed date as a new field called `@timestamp`, configurable by setting the `target_field`
|
||||
|
@ -512,7 +484,7 @@ An example that adds the parsed date to the `timestamp` field based on the `init
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Fail processor
|
||||
=== Fail processor
|
||||
The Fail Processor is used to raise an exception. This is useful for when
|
||||
a user expects a pipeline to fail and wishes to relay a specific message
|
||||
to the requester.
|
||||
|
@ -534,7 +506,7 @@ to the requester.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Foreach processor
|
||||
=== Foreach processor
|
||||
All processors can operate on elements inside an array, but if all elements of an array need to
|
||||
be processed in the same way defining a processor for each element becomes cumbersome and tricky
|
||||
because it is likely that the number of elements in an array are unknown. For this reason the `foreach`
|
||||
|
@ -680,7 +652,7 @@ In this example if the `remove` processor does fail then
|
|||
the array elements that have been processed thus far will
|
||||
be updated.
|
||||
|
||||
=== Accessing data in pipelines
|
||||
== Accessing data in pipelines
|
||||
|
||||
Processors in pipelines have read and write access to documents that pass through the pipeline.
|
||||
The fields in the source of a document and its metadata fields are accessible.
|
||||
|
@ -781,7 +753,8 @@ to depends on the field in the source with name `geoip.country_iso_code`.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
==== Handling Failure in Pipelines
|
||||
[[handling-failure-in-pipelines]]
|
||||
=== Handling Failure in Pipelines
|
||||
|
||||
In its simplest case, pipelines describe a list of processors which
|
||||
are executed sequentially and processing halts at the first exception. This
|
||||
|
@ -845,7 +818,7 @@ the index for which failed documents get sent.
|
|||
--------------------------------------------------
|
||||
|
||||
|
||||
===== Accessing Error Metadata From Processors Handling Exceptions
|
||||
==== Accessing Error Metadata From Processors Handling Exceptions
|
||||
|
||||
Sometimes you may want to retrieve the actual error message that was thrown
|
||||
by a failed processor. To do so you can access metadata fields called
|
||||
|
@ -878,9 +851,9 @@ of manually setting it.
|
|||
--------------------------------------------------
|
||||
|
||||
|
||||
=== Ingest APIs
|
||||
== Ingest APIs
|
||||
|
||||
==== Put pipeline API
|
||||
=== Put pipeline API
|
||||
|
||||
The put pipeline api adds pipelines and updates existing pipelines in the cluster.
|
||||
|
||||
|
@ -904,7 +877,7 @@ PUT _ingest/pipeline/my-pipeline-id
|
|||
NOTE: The put pipeline api also instructs all ingest nodes to reload their in-memory representation of pipelines, so that
|
||||
pipeline changes take immediately in effect.
|
||||
|
||||
==== Get pipeline API
|
||||
=== Get pipeline API
|
||||
|
||||
The get pipeline api returns pipelines based on id. This api always returns a local reference of the pipeline.
|
||||
|
||||
|
@ -940,7 +913,7 @@ For each returned pipeline the source and the version is returned.
|
|||
The version is useful for knowing what version of the pipeline the node has.
|
||||
Multiple ids can be provided at the same time. Also wildcards are supported.
|
||||
|
||||
==== Delete pipeline API
|
||||
=== Delete pipeline API
|
||||
|
||||
The delete pipeline api deletes pipelines by id.
|
||||
|
||||
|
@ -950,7 +923,7 @@ DELETE _ingest/pipeline/my-pipeline-id
|
|||
--------------------------------------------------
|
||||
// AUTOSENSE
|
||||
|
||||
==== Simulate pipeline API
|
||||
=== Simulate pipeline API
|
||||
|
||||
The simulate pipeline api executes a specific pipeline against
|
||||
the set of documents provided in the body of the request.
|
Loading…
Reference in New Issue