diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 9cdce240d7f..52336ccae0b 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -272,7 +272,7 @@ public class TransportBulkAction extends HandledTransportAction(); requestsByShard.put(shardIt.shardId(), list); } - list.add(new BulkItemRequest(i, new DeleteRequest(deleteRequest))); + list.add(new BulkItemRequest(i, deleteRequest)); } } else { ShardId shardId = clusterService.operationRouting().indexShards(clusterState, concreteIndex, deleteRequest.type(), deleteRequest.id(), deleteRequest.routing()).shardId(); diff --git a/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java b/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java index 6c609eb9f31..cbd10553522 100644 --- a/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java +++ b/core/src/main/java/org/elasticsearch/action/delete/DeleteRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.delete; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocumentRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; @@ -80,28 +79,6 @@ public class DeleteRequest extends ReplicationRequest implements this.id = id; } - /** - * Copy constructor that creates a new delete request that is a copy of the one provided as an argument. - */ - public DeleteRequest(DeleteRequest request) { - this(request, request); - } - - /** - * Copy constructor that creates a new delete request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public DeleteRequest(DeleteRequest request, ActionRequest originalRequest) { - super(request); - this.type = request.type(); - this.id = request.id(); - this.routing = request.routing(); - this.parent = request.parent(); - this.refresh = request.refresh(); - this.version = request.version(); - this.versionType = request.versionType(); - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); diff --git a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java index 1c83cbeb631..b2f3bc2b018 100644 --- a/core/src/main/java/org/elasticsearch/action/get/GetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/GetRequest.java @@ -67,26 +67,6 @@ public class GetRequest extends SingleShardRequest implements Realti type = "_all"; } - /** - * Copy constructor that creates a new get request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public GetRequest(GetRequest getRequest) { - this.index = getRequest.index; - this.type = getRequest.type; - this.id = getRequest.id; - this.routing = getRequest.routing; - this.parent = getRequest.parent; - this.preference = getRequest.preference; - this.fields = getRequest.fields; - this.fetchSourceContext = getRequest.fetchSourceContext; - this.refresh = getRequest.refresh; - this.realtime = getRequest.realtime; - this.version = getRequest.version; - this.versionType = getRequest.versionType; - this.ignoreErrorsOnGeneratedFields = getRequest.ignoreErrorsOnGeneratedFields; - } - /** * Constructs a new get request against the specified index. The {@link #type(String)} and {@link #id(String)} * must be set. diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 336782d16f4..5f2d97051e4 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -159,26 +159,6 @@ public class IndexRequest extends ReplicationRequest implements Do public IndexRequest() { } - /** - * Copy constructor that creates a new index request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public IndexRequest(IndexRequest indexRequest) { - super(indexRequest); - this.type = indexRequest.type; - this.id = indexRequest.id; - this.routing = indexRequest.routing; - this.parent = indexRequest.parent; - this.timestamp = indexRequest.timestamp; - this.ttl = indexRequest.ttl; - this.source = indexRequest.source; - this.opType = indexRequest.opType; - this.refresh = indexRequest.refresh; - this.version = indexRequest.version; - this.versionType = indexRequest.versionType; - this.contentType = indexRequest.contentType; - } - /** * Constructs a new index request against the specific index. The {@link #type(String)} * {@link #source(byte[])} must be set. diff --git a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java index 0366e10d750..c969b501b2c 100644 --- a/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java +++ b/core/src/main/java/org/elasticsearch/action/ingest/IngestActionFilter.java @@ -103,7 +103,7 @@ public final class IngestActionFilter extends AbstractComponent implements Actio void processBulkIndexRequest(Task task, BulkRequest original, String action, ActionFilterChain chain, ActionListener listener) { BulkRequestModifier bulkRequestModifier = new BulkRequestModifier(original); executionService.executeBulkRequest(() -> bulkRequestModifier, (indexRequest, throwable) -> { - logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id(), throwable); + logger.debug("failed to execute pipeline [{}] for document [{}/{}/{}]", throwable, indexRequest.getPipeline(), indexRequest.index(), indexRequest.type(), indexRequest.id()); bulkRequestModifier.markCurrentItemAsFailed(throwable); }, (throwable) -> { if (throwable != null) { diff --git a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java index 0edce177be7..2a8f1a4ed24 100644 --- a/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java +++ b/core/src/main/java/org/elasticsearch/action/percolate/TransportPercolateAction.java @@ -21,7 +21,6 @@ package org.elasticsearch.action.percolate; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; -import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.TransportGetAction; import org.elasticsearch.action.support.ActionFilters; @@ -74,9 +73,7 @@ public class TransportPercolateAction extends TransportBroadcastAction listener) { request.startTime = System.currentTimeMillis(); if (request.getRequest() != null) { - //create a new get request to make sure it has the same headers and context as the original percolate request - GetRequest getRequest = new GetRequest(request.getRequest()); - getAction.execute(getRequest, new ActionListener() { + getAction.execute(request.getRequest(), new ActionListener() { @Override public void onResponse(GetResponse getResponse) { if (!getResponse.isExists()) { diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 10a1ad2efce..9d3c200ed98 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -76,23 +76,6 @@ public class SearchRequest extends ActionRequest implements Indic public SearchRequest() { } - /** - * Copy constructor that creates a new search request that is a copy of the one provided as an argument. - * The new request will inherit though headers and context from the original request that caused it. - */ - public SearchRequest(SearchRequest searchRequest) { - this.searchType = searchRequest.searchType; - this.indices = searchRequest.indices; - this.routing = searchRequest.routing; - this.preference = searchRequest.preference; - this.template = searchRequest.template; - this.source = searchRequest.source; - this.requestCache = searchRequest.requestCache; - this.scroll = searchRequest.scroll; - this.types = searchRequest.types; - this.indicesOptions = searchRequest.indicesOptions; - } - /** * Constructs a new search request against the indices. No indices provided here means that search * will run against all indices. diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index 18490735631..43754427598 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -59,8 +59,7 @@ public class TransportMultiSearchAction extends HandledTransportAction() { + searchAction.execute(request.requests().get(i), new ActionListener() { @Override public void onResponse(SearchResponse searchResponse) { responses.set(index, new MultiSearchResponse.Item(searchResponse, null)); diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java index 274d13bf4cf..f431c67b290 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/BasicReplicationRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.support.replication; -import org.elasticsearch.action.ActionRequest; import org.elasticsearch.index.shard.ShardId; /** @@ -38,13 +37,4 @@ public class BasicReplicationRequest extends ReplicationRequest1m. */ diff --git a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index b60403b00b6..a4053ce857e 100644 --- a/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.RoutingMissingException; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction; -import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.IndexRequest; @@ -169,7 +168,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio final UpdateHelper.Result result = updateHelper.prepare(request, indexShard); switch (result.operation()) { case UPSERT: - IndexRequest upsertRequest = new IndexRequest((IndexRequest)result.action()); + IndexRequest upsertRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference upsertSourceBytes = upsertRequest.source(); indexAction.execute(upsertRequest, new ActionListener() { @@ -206,7 +205,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio }); break; case INDEX: - IndexRequest indexRequest = new IndexRequest((IndexRequest)result.action()); + IndexRequest indexRequest = result.action(); // we fetch it from the index request so we don't generate the bytes twice, its already done in the index request final BytesReference indexSourceBytes = indexRequest.source(); indexAction.execute(indexRequest, new ActionListener() { @@ -236,8 +235,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio }); break; case DELETE: - DeleteRequest deleteRequest = new DeleteRequest(result.action(), request); - deleteAction.execute(deleteRequest, new ActionListener() { + deleteAction.execute(result.action(), new ActionListener() { @Override public void onResponse(DeleteResponse response) { UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index 04e1ff16973..a36af886364 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -23,6 +23,8 @@ import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.spatial.geopoint.document.GeoPointField; +import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; @@ -415,6 +417,24 @@ public final class OrdinalsBuilder implements Closeable { } } + /** + * A {@link TermsEnum} that iterates only highest resolution geo prefix coded terms. + * + * @see #buildFromTerms(TermsEnum) + */ + public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) { + return new FilteredTermsEnum(termsEnum, false) { + @Override + protected AcceptStatus accept(BytesRef term) throws IOException { + // accept only the max resolution terms + // todo is this necessary? + return GeoEncodingUtils.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ? + AcceptStatus.YES : AcceptStatus.END; + } + }; + } + + /** * Returns the maximum document ID this builder can associate with an ordinal */ diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java index e9dae4970a9..022e3ad0923 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.spatial.geopoint.document.GeoPointField; +import org.apache.lucene.spatial.util.GeoEncodingUtils; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.CharsRefBuilder; @@ -47,8 +48,10 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData scoreFunction; - if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) { - scoreFunction = new WeightBuilder().setWeight(parser.floatValue()); + } else if (token == XContentParser.Token.START_OBJECT) { + if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { + if (query != null) { + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. [query] is already defined.", FunctionScoreQueryBuilder.NAME); + } + query = parseContext.parseInnerQueryBuilder(); } else { - // we try to parse a score function. If there is no score - // function for the current field name, - // functionParserMapper.get() will throw an Exception. - scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser); + if (singleFunctionFound) { + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName); + } + if (functionArrayFound) { + String errorString = "already found [functions] array, now encountering [" + currentFieldName + "]."; + handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString); + } + singleFunctionFound = true; + singleFunctionName = currentFieldName; + + // we try to parse a score function. If there is no score function for the current field name, + // functionParserMapper.get() may throw an Exception. + ScoreFunctionBuilder scoreFunction = functionParserMapper.get(parser.getTokenLocation(), currentFieldName).fromXContent(parseContext, parser); + filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction)); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (parseContext.parseFieldMatcher().match(currentFieldName, FUNCTIONS_FIELD)) { + if (singleFunctionFound) { + String errorString = "already found [" + singleFunctionName + "], now encountering [functions]."; + handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString); + } + functionArrayFound = true; + currentFieldName = parseFiltersAndFunctions(parseContext, parser, filterFunctionBuilders); + } else { + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. array [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName); + } + + } else if (token.isValue()) { + if (parseContext.parseFieldMatcher().match(currentFieldName, SCORE_MODE_FIELD)) { + scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text()); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_MODE_FIELD)) { + combineFunction = CombineFunction.fromString(parser.text()); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, MAX_BOOST_FIELD)) { + maxBoost = parser.floatValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { + boost = parser.floatValue(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { + queryName = parser.text(); + } else if (parseContext.parseFieldMatcher().match(currentFieldName, MIN_SCORE_FIELD)) { + minScore = parser.floatValue(); + } else { + if (singleFunctionFound) { + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. already found function [{}], now encountering [{}]. use [functions] array if you want to define several functions.", FunctionScoreQueryBuilder.NAME, singleFunctionName, currentFieldName); + } + if (functionArrayFound) { + String errorString = "already found [functions] array, now encountering [" + currentFieldName + "]."; + handleMisplacedFunctionsDeclaration(parser.getTokenLocation(), errorString); + } + if (parseContext.parseFieldMatcher().match(currentFieldName, WEIGHT_FIELD)) { + filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(new WeightBuilder().setWeight(parser.floatValue()))); + singleFunctionFound = true; + singleFunctionName = currentFieldName; + } else { + throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. field [{}] is not supported", FunctionScoreQueryBuilder.NAME, currentFieldName); + } } - filterFunctionBuilders.add(new FunctionScoreQueryBuilder.FilterFunctionBuilder(scoreFunction)); } } @@ -167,21 +195,23 @@ public class FunctionScoreQueryParser implements QueryParser> implements Streamable { +public abstract class TransportMessage implements Streamable { private TransportAddress remoteAddress; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java index dc396c3d8c0..ba3601236c9 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportRequest.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportRequest.java @@ -23,7 +23,7 @@ import org.elasticsearch.tasks.Task; /** */ -public abstract class TransportRequest extends TransportMessage { +public abstract class TransportRequest extends TransportMessage { public static class Empty extends TransportRequest { public static final Empty INSTANCE = new Empty(); @@ -32,7 +32,6 @@ public abstract class TransportRequest extends TransportMessage { +public abstract class TransportResponse extends TransportMessage { public static class Empty extends TransportResponse { public static final Empty INSTANCE = new Empty(); diff --git a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java index 376bbadf85d..26bededf685 100644 --- a/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/query/AbstractQueryTestCase.java @@ -502,7 +502,7 @@ public abstract class AbstractQueryTestCase> context.reset(parser); context.parseFieldMatcher(matcher); QueryBuilder parseInnerQueryBuilder = context.parseInnerQueryBuilder(); - assertTrue(parser.nextToken() == null); + assertNull(parser.nextToken()); return parseInnerQueryBuilder; } diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java index 18150ec2d03..09e57c133de 100644 --- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.query.functionscore; import com.fasterxml.jackson.core.JsonParseException; - import org.apache.lucene.index.Term; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -60,7 +59,6 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; @@ -73,7 +71,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase)null); + new FunctionScoreQueryBuilder((QueryBuilder) null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { - new FunctionScoreQueryBuilder((ScoreFunctionBuilder)null); + new FunctionScoreQueryBuilder((ScoreFunctionBuilder) null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { - new FunctionScoreQueryBuilder((FunctionScoreQueryBuilder.FilterFunctionBuilder[])null); + new FunctionScoreQueryBuilder((FunctionScoreQueryBuilder.FilterFunctionBuilder[]) null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder(null, ScoreFunctionBuilders.randomFunction(123)); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { - new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (ScoreFunctionBuilder)null); + new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (ScoreFunctionBuilder) null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { - new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (FunctionScoreQueryBuilder.FilterFunctionBuilder[])null); + new FunctionScoreQueryBuilder(new MatchAllQueryBuilder(), (FunctionScoreQueryBuilder.FilterFunctionBuilder[]) null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder(null, new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder(QueryBuilders.matchAllQuery(), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{null}); fail("content of array must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder.FilterFunctionBuilder(null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder.FilterFunctionBuilder(null, ScoreFunctionBuilders.randomFunction(123)); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder.FilterFunctionBuilder(new MatchAllQueryBuilder(), null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder(new MatchAllQueryBuilder()).scoreMode(null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } try { new FunctionScoreQueryBuilder(new MatchAllQueryBuilder()).boostMode(null); fail("must not be null"); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { //all good } } public void testParseFunctionsArray() throws IOException { String functionScoreQuery = "{\n" + - " \"function_score\":{\n" + - " \"query\":{\n" + - " \"term\":{\n" + - " \"field1\":\"value1\"\n" + - " }\n" + - " },\n" + - " \"functions\": [\n" + - " {\n" + - " \"random_score\": {\n" + - " \"seed\":123456\n" + - " },\n" + - " \"weight\": 3,\n" + - " \"filter\": {\n" + - " \"term\":{\n" + - " \"field2\":\"value2\"\n" + - " }\n" + - " }\n" + - " },\n" + - " {\n" + - " \"filter\": {\n" + - " \"term\":{\n" + - " \"field3\":\"value3\"\n" + - " }\n" + - " },\n" + - " \"weight\": 9\n" + - " },\n" + - " {\n" + - " \"gauss\": {\n" + - " \"field_name\": {\n" + - " \"origin\":0.5,\n" + - " \"scale\":0.6\n" + - " }\n" + - " }\n" + - " }\n" + - " ],\n" + - " \"boost\" : 3,\n" + - " \"score_mode\" : \"avg\",\n" + - " \"boost_mode\" : \"replace\",\n" + - " \"max_boost\" : 10\n" + - " }\n" + - "}"; + " \"function_score\":{\n" + + " \"query\":{\n" + + " \"term\":{\n" + + " \"field1\":\"value1\"\n" + + " }\n" + + " },\n" + + " \"functions\": [\n" + + " {\n" + + " \"random_score\": {\n" + + " \"seed\":123456\n" + + " },\n" + + " \"weight\": 3,\n" + + " \"filter\": {\n" + + " \"term\":{\n" + + " \"field2\":\"value2\"\n" + + " }\n" + + " }\n" + + " },\n" + + " {\n" + + " \"filter\": {\n" + + " \"term\":{\n" + + " \"field3\":\"value3\"\n" + + " }\n" + + " },\n" + + " \"weight\": 9\n" + + " },\n" + + " {\n" + + " \"gauss\": {\n" + + " \"field_name\": {\n" + + " \"origin\":0.5,\n" + + " \"scale\":0.6\n" + + " }\n" + + " }\n" + + " }\n" + + " ],\n" + + " \"boost\" : 3,\n" + + " \"score_mode\" : \"avg\",\n" + + " \"boost_mode\" : \"replace\",\n" + + " \"max_boost\" : 10\n" + + " }\n" + + "}"; QueryBuilder queryBuilder = parseQuery(functionScoreQuery); //given that we copy part of the decay functions as bytes, we test that fromXContent and toXContent both work no matter what the initial format was @@ -369,31 +367,31 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase)queryBuilder).buildAsBytes(XContentType.values()[i])); + queryBuilder = parseQuery(((AbstractQueryBuilder) queryBuilder).buildAsBytes(XContentType.values()[i])); } } } public void testParseSingleFunction() throws IOException { String functionScoreQuery = "{\n" + - " \"function_score\":{\n" + - " \"query\":{\n" + - " \"term\":{\n" + - " \"field1\":\"value1\"\n" + - " }\n" + - " },\n" + - " \"gauss\": {\n" + - " \"field_name\": {\n" + - " \"origin\":0.5,\n" + - " \"scale\":0.6\n" + - " }\n" + - " },\n" + - " \"boost\" : 3,\n" + - " \"score_mode\" : \"avg\",\n" + - " \"boost_mode\" : \"replace\",\n" + - " \"max_boost\" : 10\n" + - " }\n" + - "}"; + " \"function_score\":{\n" + + " \"query\":{\n" + + " \"term\":{\n" + + " \"field1\":\"value1\"\n" + + " }\n" + + " },\n" + + " \"gauss\": {\n" + + " \"field_name\": {\n" + + " \"origin\":0.5,\n" + + " \"scale\":0.6\n" + + " }\n" + + " },\n" + + " \"boost\" : 3,\n" + + " \"score_mode\" : \"avg\",\n" + + " \"boost_mode\" : \"replace\",\n" + + " \"max_boost\" : 10\n" + + " }\n" + + "}"; QueryBuilder queryBuilder = parseQuery(functionScoreQuery); //given that we copy part of the decay functions as bytes, we test that fromXContent and toXContent both work no matter what the initial format was @@ -416,7 +414,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase)queryBuilder).buildAsBytes(XContentType.values()[i])); + queryBuilder = parseQuery(((AbstractQueryBuilder) queryBuilder).buildAsBytes(XContentType.values()[i])); } } } @@ -424,69 +422,69 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase 0); String queryString = jsonBuilder().startObject() - .startObject("function_score") - .startArray("functions") - .startObject() - .startObject("field_value_factor") - .field("field", INT_FIELD_NAME) - .endObject() - .field("weight", 1.0) - .endObject() - .endArray() - .endObject() - .endObject().string(); + .startObject("function_score") + .startArray("functions") + .startObject() + .startObject("field_value_factor") + .field("field", INT_FIELD_NAME) + .endObject() + .field("weight", 1.0) + .endObject() + .endArray() + .endObject() + .endObject().string(); QueryBuilder query = parseQuery(queryString); assertThat(query, instanceOf(FunctionScoreQueryBuilder.class)); FunctionScoreQueryBuilder functionScoreQueryBuilder = (FunctionScoreQueryBuilder) query; @@ -527,11 +525,11 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase | Controls what properties are added to the `target_field` based on the geoip lookup. +| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. |====== -<1> Depends on what is available in `database_field`: +*Depends on what is available in `database_field`: + * If the GeoLite2 City database is used then the following fields may be added under the `target_field`: `ip`, `country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`. diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc new file mode 100644 index 00000000000..ab746980f84 --- /dev/null +++ b/docs/plugins/ingest.asciidoc @@ -0,0 +1,26 @@ +[[ingest]] +== Ingest Plugins + +The ingest plugins extend Elaticsearch by providing additional ingest node capabilities. + +[float] +=== Core Ingest Plugins + +The core ingest plugins are: + +<>:: + +The ingest attachment plugin lets Elasticsearch extract file attachments in common formats (such as PPT, XLS, and PDF) by +using the Apache text extraction library http://lucene.apache.org/tika/[Tika]. + +<>:: + +The GeoIP processor adds information about the geographical location of IP addresses, based on data from the Maxmind databases. +This processor adds this information by default under the `geoip` field. ++ +The ingest-geoip plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available +under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/. + +include::ingest-attachment.asciidoc[] + +include::ingest-geoip.asciidoc[] \ No newline at end of file diff --git a/docs/reference/index.asciidoc b/docs/reference/index.asciidoc index d150471a8b0..87c400e018c 100644 --- a/docs/reference/index.asciidoc +++ b/docs/reference/index.asciidoc @@ -41,6 +41,8 @@ include::modules.asciidoc[] include::index-modules.asciidoc[] +include::ingest.asciidoc[] + include::testing.asciidoc[] include::glossary.asciidoc[] diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc new file mode 100644 index 00000000000..32b3efdeb28 --- /dev/null +++ b/docs/reference/ingest.asciidoc @@ -0,0 +1,34 @@ +[[ingest]] += Ingest Node + +[partintro] +-- +Ingest node can be used to pre-process documents before the actual indexing takes place. +This pre-processing happens by an ingest node that intercepts bulk and index requests, applies the +transformations and then passes the documents back to the index or bulk APIs. + +Ingest node is enabled by default. In order to disable ingest the following +setting should be configured in the elasticsearch.yml file: + +[source,yaml] +-------------------------------------------------- +node.ingest: false +-------------------------------------------------- + +It is possible to enable ingest on any node or have dedicated ingest nodes. + +In order to pre-process document before indexing the `pipeline` parameter should be used +on an index or bulk request to tell Ingest what pipeline is going to be used. + +[source,js] +-------------------------------------------------- +PUT /my-index/my-type/my-id?pipeline=my_pipeline_id +{ + ... +} +-------------------------------------------------- +// AUTOSENSE + +-- + +include::ingest/ingest-node.asciidoc[] \ No newline at end of file diff --git a/docs/reference/ingest/ingest.asciidoc b/docs/reference/ingest/ingest-node.asciidoc similarity index 95% rename from docs/reference/ingest/ingest.asciidoc rename to docs/reference/ingest/ingest-node.asciidoc index 17a6d2d6e12..8185cc0c30c 100644 --- a/docs/reference/ingest/ingest.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1,33 +1,5 @@ -[[ingest]] -== Ingest Node - -Ingest node can be used to pre-process documents before the actual indexing takes place. -This pre-processing happens by an ingest node that intercepts bulk and index requests, applies the -transformations and then passes the documents back to the index or bulk APIs. - -Ingest node is enabled by default. In order to disable ingest the following -setting should be configured in the elasticsearch.yml file: - -[source,yaml] --------------------------------------------------- -node.ingest: false --------------------------------------------------- - -It is possible to enable ingest on any node or have dedicated ingest nodes. - -In order to pre-process document before indexing the `pipeline` parameter should be used -on an index or bulk request to tell Ingest what pipeline is going to be used. - -[source,js] --------------------------------------------------- -PUT /my-index/my-type/my-id?pipeline=my_pipeline_id -{ - ... -} --------------------------------------------------- -// AUTOSENSE - -=== Pipeline Definition +[[pipe-line]] +== Pipeline Definition A pipeline is a definition of a series of processors that are to be executed in the same sequential order as they are declared. @@ -45,7 +17,7 @@ what the pipeline attempts to achieve. The `processors` parameter defines a list of processors to be executed in order. -=== Processors +== Processors All processors are defined in the following way within a pipeline definition: @@ -67,7 +39,7 @@ but is very useful for bookkeeping and tracing errors to specific processors. See <> to learn more about the `on_failure` field and error handling in pipelines. -==== Set processor +=== Set processor Sets one field and associates it with the specified value. If the field already exists, its value will be replaced with the provided one. @@ -90,7 +62,7 @@ its value will be replaced with the provided one. } -------------------------------------------------- -==== Append processor +=== Append processor Appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the fields doesn't exist. @@ -115,7 +87,7 @@ Accepts a single value or an array of values. } -------------------------------------------------- -==== Remove processor +=== Remove processor Removes an existing field. If the field doesn't exist, an exception will be thrown [[remove-options]] @@ -135,7 +107,7 @@ Removes an existing field. If the field doesn't exist, an exception will be thro } -------------------------------------------------- -==== Rename processor +=== Rename processor Renames an existing field. If the field doesn't exist, an exception will be thrown. Also, the new field name must not exist. @@ -159,7 +131,7 @@ name must not exist. -------------------------------------------------- -==== Convert processor +=== Convert processor Converts an existing field's value to a different type, like turning a string to an integer. If the field value is an array, all members will be converted. @@ -187,7 +159,7 @@ false if its string value is equal to `false` (ignore case) and it will throw ex } -------------------------------------------------- -==== Gsub processor +=== Gsub processor Converts a string field by applying a regular expression and a replacement. If the field is not a string, the processor will throw an exception. @@ -212,7 +184,7 @@ If the field is not a string, the processor will throw an exception. } -------------------------------------------------- -==== Join processor +=== Join processor Joins each element of an array into a single string using a separator character between each element. Throws error when the field is not an array. @@ -235,7 +207,7 @@ Throws error when the field is not an array. } -------------------------------------------------- -==== Split processor +=== Split processor Split a field to an array using a separator character. Only works on string fields. [[split-options]] @@ -255,7 +227,7 @@ Split a field to an array using a separator character. Only works on string fiel } -------------------------------------------------- -==== Lowercase processor +=== Lowercase processor Converts a string to its lowercase equivalent. [[lowercase-options]] @@ -275,7 +247,7 @@ Converts a string to its lowercase equivalent. } -------------------------------------------------- -==== Uppercase processor +=== Uppercase processor Converts a string to its uppercase equivalent. [[uppercase-options]] @@ -295,7 +267,7 @@ Converts a string to its uppercase equivalent. } -------------------------------------------------- -==== Trim processor +=== Trim processor Trims whitespace from field. NOTE: this only works on leading and trailing whitespaces. [[trim-options]] @@ -315,7 +287,7 @@ Trims whitespace from field. NOTE: this only works on leading and trailing white } -------------------------------------------------- -==== Grok Processor +=== Grok Processor The Grok Processor extracts structured fields out of a single text field within a document. You choose which field to extract matched fields from, as well as the Grok Pattern you expect will match. A Grok Pattern is like a regular @@ -330,7 +302,7 @@ Here, you can add your own custom grok pattern files with custom grok expression If you need help building patterns to match your logs, you will find the and applications quite useful! -===== Grok Basics +==== Grok Basics Grok sits on top of regular expressions, so any regular expressions are valid in grok as well. The regular expression library is Oniguruma, and you can see the full supported regexp syntax @@ -367,7 +339,7 @@ Grok expression. %{NUMBER:duration} %{IP:client} -------------------------------------------------- -===== Custom Patterns and Pattern Files +==== Custom Patterns and Pattern Files The Grok Processor comes pre-packaged with a base set of pattern files. These patterns may not always have what you are looking for. These pattern files have a very basic format. Each line describes a named pattern with @@ -393,7 +365,7 @@ SECOND (?:(?:[0-5]?[0-9]|60)(?:[:.,][0-9]+)?) TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) -------------------------------------------------- -===== Using Grok Processor in a Pipeline +==== Using Grok Processor in a Pipeline [[grok-options]] .Grok Options @@ -417,7 +389,7 @@ a document. The pattern for this could be -[source] +[source,js] -------------------------------------------------- %{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration} -------------------------------------------------- @@ -474,7 +446,7 @@ An example of a pipeline specifying custom pattern definitions: } -------------------------------------------------- -==== Date processor +=== Date processor The date processor is used for parsing dates from fields, and then using that date or timestamp as the timestamp for that document. The date processor adds by default the parsed date as a new field called `@timestamp`, configurable by setting the `target_field` @@ -512,7 +484,7 @@ An example that adds the parsed date to the `timestamp` field based on the `init } -------------------------------------------------- -==== Fail processor +=== Fail processor The Fail Processor is used to raise an exception. This is useful for when a user expects a pipeline to fail and wishes to relay a specific message to the requester. @@ -534,7 +506,7 @@ to the requester. } -------------------------------------------------- -==== Foreach processor +=== Foreach processor All processors can operate on elements inside an array, but if all elements of an array need to be processed in the same way defining a processor for each element becomes cumbersome and tricky because it is likely that the number of elements in an array are unknown. For this reason the `foreach` @@ -680,7 +652,7 @@ In this example if the `remove` processor does fail then the array elements that have been processed thus far will be updated. -=== Accessing data in pipelines +== Accessing data in pipelines Processors in pipelines have read and write access to documents that pass through the pipeline. The fields in the source of a document and its metadata fields are accessible. @@ -781,7 +753,8 @@ to depends on the field in the source with name `geoip.country_iso_code`. } -------------------------------------------------- -==== Handling Failure in Pipelines +[[handling-failure-in-pipelines]] +=== Handling Failure in Pipelines In its simplest case, pipelines describe a list of processors which are executed sequentially and processing halts at the first exception. This @@ -845,7 +818,7 @@ the index for which failed documents get sent. -------------------------------------------------- -===== Accessing Error Metadata From Processors Handling Exceptions +==== Accessing Error Metadata From Processors Handling Exceptions Sometimes you may want to retrieve the actual error message that was thrown by a failed processor. To do so you can access metadata fields called @@ -878,9 +851,9 @@ of manually setting it. -------------------------------------------------- -=== Ingest APIs +== Ingest APIs -==== Put pipeline API +=== Put pipeline API The put pipeline api adds pipelines and updates existing pipelines in the cluster. @@ -904,7 +877,7 @@ PUT _ingest/pipeline/my-pipeline-id NOTE: The put pipeline api also instructs all ingest nodes to reload their in-memory representation of pipelines, so that pipeline changes take immediately in effect. -==== Get pipeline API +=== Get pipeline API The get pipeline api returns pipelines based on id. This api always returns a local reference of the pipeline. @@ -940,7 +913,7 @@ For each returned pipeline the source and the version is returned. The version is useful for knowing what version of the pipeline the node has. Multiple ids can be provided at the same time. Also wildcards are supported. -==== Delete pipeline API +=== Delete pipeline API The delete pipeline api deletes pipelines by id. @@ -950,7 +923,7 @@ DELETE _ingest/pipeline/my-pipeline-id -------------------------------------------------- // AUTOSENSE -==== Simulate pipeline API +=== Simulate pipeline API The simulate pipeline api executes a specific pipeline against the set of documents provided in the body of the request.