From c949ce00e8276d570418293373ad33c5cbaf5319 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 13 May 2015 14:34:53 -0400 Subject: [PATCH 01/15] Add test group for third-party tests --- .../test/ElasticsearchIntegrationTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index 67991a896ea..d862a4635e2 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -243,6 +243,26 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase public @interface Integration { } + /** + * Property that controls whether ThirdParty Integration tests are run (not the default). + */ + public static final String SYSPROP_THIRDPARTY = "tests.thirdparty"; + + /** + * Annotation for third-party integration tests. + *

+ * These are tests the require a third-party service in order to run. They + * may require the user to manually configure an external process (such as rabbitmq), + * or may additionally require some external configuration (e.g. AWS credentials) + * via the {@code tests.config} system property. + */ + @Inherited + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + @TestGroup(enabled = false, sysProperty = ElasticsearchIntegrationTest.SYSPROP_THIRDPARTY) + public @interface ThirdParty { + } + /** node names of the corresponding clusters will start with these prefixes */ public static final String SUITE_CLUSTER_NODE_PREFIX = "node_s"; public static final String TEST_CLUSTER_NODE_PREFIX = "node_t"; From 3dd706f1e630047830a363d5615db288dfc5c5ed Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Wed, 13 May 2015 14:56:29 -0400 Subject: [PATCH 02/15] Add these two props for consistency, so we aren't confused when we deduplicate with elasticsearch-parent --- pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pom.xml b/pom.xml index a396fa4f8c9..cc0f43de08d 100644 --- a/pom.xml +++ b/pom.xml @@ -616,6 +616,8 @@ ${tests.timeoutSuite} ${tests.showSuccess} ${tests.integration} + ${tests.thirdparty} + ${tests.config} ${tests.client.ratio} ${tests.enable_mock_modules} ${tests.assertion.disabled} From 30cdd4c03b2d5df599e0ca86c5811ea88389970c Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 14 May 2015 00:09:44 -0400 Subject: [PATCH 03/15] Use our provided JNA library, versus one installed on the system which might be older and not work. --- bin/elasticsearch.in.bat | Bin 2697 -> 2795 bytes bin/elasticsearch.in.sh | 3 +++ pom.xml | 1 + 3 files changed, 4 insertions(+) diff --git a/bin/elasticsearch.in.bat b/bin/elasticsearch.in.bat index 0e77e19ee872cd258a94eaefe7f0ddb18ccb74d2..7a6bfda37ad439bdb200597885b2d98533b39185 100644 GIT binary patch delta 81 zcmeAaeJ#4-B&SJ`tFJ<6ajHUoX^}!fQGQuwN@|LNm!G3TVorHtWwAn8YEf}%u|i2k jszPyPaY<^fLVn&P7J1&hM7_LxponcrQEBSr8x)~lT delta 11 ScmaDY+9|r>B-XX:+HeapDumpOnOutOfMemoryError -XX:+DisableExplicitGC -Dfile.encoding=UTF-8 + -Djna.nosys=true -Delasticsearch From a5c0ac0d67baa081ff4ae04c33974bab8c0ef0b4 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 11 May 2015 09:32:16 -0700 Subject: [PATCH 04/15] Scripting: Add Multi-Valued Field Methods to Expressions Add methods to operate on multi-valued fields in the expressions language. Note that users will still not be able to access individual values within a multi-valued field. The following methods will be included: * min * max * avg * median * count * sum Additionally, changes have been made to MultiValueMode to support the new median method. closes #11105 --- docs/reference/search/request/sort.asciidoc | 2 + .../expression/CountMethodFunctionValues.java | 44 +++++++++++ .../expression/CountMethodValueSource.java | 73 +++++++++++++++++++ .../expression/DateMethodFunctionValues.java | 5 +- .../expression/DateMethodValueSource.java | 31 ++++---- .../ExpressionScriptEngineService.java | 24 +++++- .../expression/FieldDataFunctionValues.java | 4 +- .../expression/FieldDataValueSource.java | 28 ++++--- .../elasticsearch/search/MultiValueMode.java | 49 ++++++++++++- .../expression/ExpressionScriptTests.java | 66 +++++++++++++++++ .../search/MultiValueModeTests.java | 14 ++++ 11 files changed, 307 insertions(+), 33 deletions(-) create mode 100644 src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java create mode 100644 src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 39d1262a050..1e4218bb61d 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -50,6 +50,8 @@ to. The `mode` option can have the following values: number based array fields. `avg`:: Use the average of all values as sort value. Only applicable for number based array fields. +`median`:: Use the median of all values as sort value. Only applicable + for number based array fields. ===== Sort mode example usage diff --git a/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java b/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java new file mode 100644 index 00000000000..818404e98e2 --- /dev/null +++ b/src/main/java/org/elasticsearch/script/expression/CountMethodFunctionValues.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.docvalues.DoubleDocValues; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; + +/** + * FunctionValues to get the count of the number of values in a field for a document. + */ +public class CountMethodFunctionValues extends DoubleDocValues { + SortedNumericDoubleValues values; + + CountMethodFunctionValues(ValueSource parent, AtomicNumericFieldData fieldData) { + super(parent); + + values = fieldData.getDoubleValues(); + } + + @Override + public double doubleVal(int doc) { + values.setDocument(doc); + return values.count(); + } +} diff --git a/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java new file mode 100644 index 00000000000..0fa83d92d76 --- /dev/null +++ b/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script.expression; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.queries.function.FunctionValues; +import org.apache.lucene.queries.function.ValueSource; +import org.elasticsearch.index.fielddata.AtomicFieldData; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.MultiValueMode; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +/** + * A ValueSource to create FunctionValues to get the count of the number of values in a field for a document. + */ +public class CountMethodValueSource extends ValueSource { + protected IndexFieldData fieldData; + + protected CountMethodValueSource(IndexFieldData fieldData) { + Objects.requireNonNull(fieldData); + + this.fieldData = fieldData; + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicFieldData leafData = fieldData.load(leaf); + assert(leafData instanceof AtomicNumericFieldData); + + return new CountMethodFunctionValues(this, (AtomicNumericFieldData)leafData); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + FieldDataValueSource that = (FieldDataValueSource) o; + + return fieldData.equals(that.fieldData); + } + + @Override + public int hashCode() { + return fieldData.hashCode(); + } + + @Override + public String description() { + return "count: field(" + fieldData.getFieldNames().toString() + ")"; + } +} diff --git a/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java b/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java index f71a3697664..f7198fc0ae2 100644 --- a/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java +++ b/src/main/java/org/elasticsearch/script/expression/DateMethodFunctionValues.java @@ -25,13 +25,14 @@ import java.util.TimeZone; import org.apache.lucene.queries.function.ValueSource; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.search.MultiValueMode; class DateMethodFunctionValues extends FieldDataFunctionValues { private final int calendarType; private final Calendar calendar; - DateMethodFunctionValues(ValueSource parent, AtomicNumericFieldData data, int calendarType) { - super(parent, data); + DateMethodFunctionValues(ValueSource parent, MultiValueMode multiValueMode, AtomicNumericFieldData data, int calendarType) { + super(parent, multiValueMode, data); this.calendarType = calendarType; calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); diff --git a/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java index a157790e2bb..522b546656d 100644 --- a/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java +++ b/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java @@ -29,14 +29,15 @@ import org.apache.lucene.queries.function.FunctionValues; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.MultiValueMode; class DateMethodValueSource extends FieldDataValueSource { protected final String methodName; protected final int calendarType; - DateMethodValueSource(IndexFieldData indexFieldData, String methodName, int calendarType) { - super(indexFieldData); + DateMethodValueSource(IndexFieldData indexFieldData, MultiValueMode multiValueMode, String methodName, int calendarType) { + super(indexFieldData, multiValueMode); Objects.requireNonNull(methodName); @@ -44,6 +45,19 @@ class DateMethodValueSource extends FieldDataValueSource { this.calendarType = calendarType; } + @Override + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicFieldData leafData = fieldData.load(leaf); + assert(leafData instanceof AtomicNumericFieldData); + + return new DateMethodFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData, calendarType); + } + + @Override + public String description() { + return methodName + ": field(" + fieldData.getFieldNames().toString() + ")"; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -64,17 +78,4 @@ class DateMethodValueSource extends FieldDataValueSource { result = 31 * result + calendarType; return result; } - - @Override - public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { - AtomicFieldData leafData = fieldData.load(leaf); - assert(leafData instanceof AtomicNumericFieldData); - - return new DateMethodFunctionValues(this, (AtomicNumericFieldData)leafData, calendarType); - } - - @Override - public String description() { - return methodName + ": field(" + fieldData.getFieldNames().toString() + ")"; - } } diff --git a/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index 6d6f986432b..5e94422314a 100644 --- a/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -39,6 +39,7 @@ import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.SearchScript; +import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.lookup.SearchLookup; import java.text.ParseException; @@ -60,6 +61,13 @@ public class ExpressionScriptEngineService extends AbstractComponent implements protected static final String GET_MINUTES_METHOD = "getMinutes"; protected static final String GET_SECONDS_METHOD = "getSeconds"; + protected static final String MINIMUM_METHOD = "min"; + protected static final String MAXIMUM_METHOD = "max"; + protected static final String AVERAGE_METHOD = "avg"; + protected static final String MEDIAN_METHOD = "median"; + protected static final String SUM_METHOD = "sum"; + protected static final String COUNT_METHOD = "count"; + @Inject public ExpressionScriptEngineService(Settings settings) { super(settings); @@ -156,7 +164,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements IndexFieldData fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper)field); if (methodname == null) { - bindings.add(variable, new FieldDataValueSource(fieldData)); + bindings.add(variable, new FieldDataValueSource(fieldData, MultiValueMode.MIN)); } else { bindings.add(variable, getMethodValueSource(field, fieldData, fieldname, methodname)); } @@ -180,6 +188,18 @@ public class ExpressionScriptEngineService extends AbstractComponent implements return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MINUTE); case GET_SECONDS_METHOD: return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.SECOND); + case MINIMUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MIN); + case MAXIMUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MAX); + case AVERAGE_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.AVG); + case MEDIAN_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN); + case SUM_METHOD: + return new FieldDataValueSource(fieldData, MultiValueMode.SUM); + case COUNT_METHOD: + return new CountMethodValueSource(fieldData); default: throw new IllegalArgumentException("Member method [" + methodName + "] does not exist."); } @@ -190,7 +210,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "]."); } - return new DateMethodValueSource(fieldData, methodName, calendarType); + return new DateMethodValueSource(fieldData, MultiValueMode.MIN, methodName, calendarType); } @Override diff --git a/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java b/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java index 7f25b3e1931..b3e06d6b9f2 100644 --- a/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java +++ b/src/main/java/org/elasticsearch/script/expression/FieldDataFunctionValues.java @@ -31,9 +31,9 @@ import org.elasticsearch.search.MultiValueMode; class FieldDataFunctionValues extends DoubleDocValues { NumericDoubleValues dataAccessor; - FieldDataFunctionValues(ValueSource parent, AtomicNumericFieldData d) { + FieldDataFunctionValues(ValueSource parent, MultiValueMode m, AtomicNumericFieldData d) { super(parent); - dataAccessor = MultiValueMode.MIN.select(d.getDoubleValues(), 0d); + dataAccessor = m.select(d.getDoubleValues(), 0d); } @Override diff --git a/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java b/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java index 7a97532068a..39386ee4913 100644 --- a/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java +++ b/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java @@ -25,6 +25,7 @@ import org.apache.lucene.queries.function.ValueSource; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.search.MultiValueMode; import java.io.IOException; import java.util.Map; @@ -36,18 +37,14 @@ import java.util.Objects; class FieldDataValueSource extends ValueSource { protected IndexFieldData fieldData; + protected MultiValueMode multiValueMode; - protected FieldDataValueSource(IndexFieldData d) { + protected FieldDataValueSource(IndexFieldData d, MultiValueMode m) { Objects.requireNonNull(d); + Objects.requireNonNull(m); fieldData = d; - } - - @Override - public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { - AtomicFieldData leafData = fieldData.load(leaf); - assert(leafData instanceof AtomicNumericFieldData); - return new FieldDataFunctionValues(this, (AtomicNumericFieldData)leafData); + multiValueMode = m; } @Override @@ -57,12 +54,23 @@ class FieldDataValueSource extends ValueSource { FieldDataValueSource that = (FieldDataValueSource) o; - return fieldData.equals(that.fieldData); + if (!fieldData.equals(that.fieldData)) return false; + return multiValueMode == that.multiValueMode; + } @Override public int hashCode() { - return fieldData.hashCode(); + int result = fieldData.hashCode(); + result = 31 * result + multiValueMode.hashCode(); + return result; + } + + @Override + public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException { + AtomicFieldData leafData = fieldData.load(leaf); + assert(leafData instanceof AtomicNumericFieldData); + return new FieldDataFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData); } @Override diff --git a/src/main/java/org/elasticsearch/search/MultiValueMode.java b/src/main/java/org/elasticsearch/search/MultiValueMode.java index 839d4714dbe..50b9ad6b193 100644 --- a/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -20,6 +20,7 @@ package org.elasticsearch.search; +import javafx.collections.transformation.SortedList; import org.apache.lucene.index.*; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; @@ -105,6 +106,46 @@ public enum MultiValueMode { } }, + /** + * Median of the values. + * + * Note that apply/reduce do not work with MED since median cannot be derived from + * an accumulator algorithm without using internal memory. + */ + MEDIAN { + @Override + protected long pick(SortedNumericDocValues values, long missingValue) { + int count = values.count(); + if (count > 0) { + if (count % 2 == 0) { + count /= 2; + return (values.valueAt(count - 1) + values.valueAt(count))/2; + } else { + count /= 2; + return values.valueAt(count); + } + } else { + return missingValue; + } + } + + @Override + protected double pick(SortedNumericDoubleValues values, double missingValue) { + int count = values.count(); + if (count > 0) { + if (count % 2 == 0) { + count /= 2; + return (values.valueAt(count - 1) + values.valueAt(count))/2; + } else { + count /= 2; + return values.valueAt(count); + } + } else { + return missingValue; + } + } + }, + /** * Pick the lowest value. */ @@ -288,7 +329,9 @@ public enum MultiValueMode { * @param b another argument * @return the result of the function. */ - public abstract double apply(double a, double b); + public double apply(double a, double b) { + throw new UnsupportedOperationException(); + } /** * Applies the sort mode and returns the result. This method is meant to be @@ -302,7 +345,9 @@ public enum MultiValueMode { * @param b another argument * @return the result of the function. */ - public abstract long apply(long a, long b); + public long apply(long a, long b) { + throw new UnsupportedOperationException(); + } public int applyOrd(int ord1, int ord2) { throw new UnsupportedOperationException(); diff --git a/src/test/java/org/elasticsearch/script/expression/ExpressionScriptTests.java b/src/test/java/org/elasticsearch/script/expression/ExpressionScriptTests.java index 2c8a7ddbc0d..cf01a3e4cc9 100644 --- a/src/test/java/org/elasticsearch/script/expression/ExpressionScriptTests.java +++ b/src/test/java/org/elasticsearch/script/expression/ExpressionScriptTests.java @@ -125,6 +125,72 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest { assertEquals(1983.0, hits.getAt(1).field("foo").getValue()); } + public void testMultiValueMethods() throws Exception { + ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double")); + ensureGreen("test"); + indexRandom(true, + client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4"), + client().prepareIndex("test", "doc", "2").setSource("double0", "5.0", "double1", "3.0"), + client().prepareIndex("test", "doc", "3").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double0", "-1.5", "double1", "4.0")); + + + SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get(); + assertSearchResponse(rsp); + SearchHits hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(5.0, hits.getAt(0).field("foo").getValue()); + assertEquals(2.0, hits.getAt(1).field("foo").getValue()); + assertEquals(5.0, hits.getAt(2).field("foo").getValue()); + + rsp = buildRequest("doc['double0'].sum()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(7.5, hits.getAt(0).field("foo").getValue()); + assertEquals(5.0, hits.getAt(1).field("foo").getValue()); + assertEquals(6.0, hits.getAt(2).field("foo").getValue()); + + rsp = buildRequest("doc['double0'].avg() + doc['double1'].avg()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(4.3, hits.getAt(0).field("foo").getValue()); + assertEquals(8.0, hits.getAt(1).field("foo").getValue()); + assertEquals(5.5, hits.getAt(2).field("foo").getValue()); + + rsp = buildRequest("doc['double0'].median()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(1.5, hits.getAt(0).field("foo").getValue()); + assertEquals(5.0, hits.getAt(1).field("foo").getValue()); + assertEquals(1.25, hits.getAt(2).field("foo").getValue()); + + rsp = buildRequest("doc['double0'].min()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(1.0, hits.getAt(0).field("foo").getValue()); + assertEquals(5.0, hits.getAt(1).field("foo").getValue()); + assertEquals(-1.5, hits.getAt(2).field("foo").getValue()); + + rsp = buildRequest("doc['double0'].max()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(5.0, hits.getAt(0).field("foo").getValue()); + assertEquals(5.0, hits.getAt(1).field("foo").getValue()); + assertEquals(5.0, hits.getAt(2).field("foo").getValue()); + + rsp = buildRequest("doc['double0'].sum()/doc['double0'].count()").get(); + assertSearchResponse(rsp); + hits = rsp.getHits(); + assertEquals(3, hits.getTotalHits()); + assertEquals(2.5, hits.getAt(0).field("foo").getValue()); + assertEquals(5.0, hits.getAt(1).field("foo").getValue()); + assertEquals(1.5, hits.getAt(2).field("foo").getValue()); + } + public void testInvalidDateMethodCall() throws Exception { ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double")); ensureGreen("test"); diff --git a/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 9f8c9ef6239..61a3e557aa9 100644 --- a/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -32,6 +32,8 @@ import org.elasticsearch.test.ElasticsearchTestCase; import java.io.IOException; import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; public class MultiValueModeTests extends ElasticsearchTestCase { @@ -122,6 +124,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase { private void verify(SortedNumericDocValues values, int maxDoc) { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { + if (MultiValueMode.MEDIAN.equals(mode)) { + continue; + } final NumericDocValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { final long actual = selected.get(i); @@ -147,6 +152,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase { private void verify(SortedNumericDocValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { + if (MultiValueMode.MEDIAN.equals(mode)) { + continue; + } final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitDocIdSet(innerDocs), maxDoc); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { @@ -239,6 +247,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase { private void verify(SortedNumericDoubleValues values, int maxDoc) { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { + if (MultiValueMode.MEDIAN.equals(mode)) { + continue; + } final NumericDoubleValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { final double actual = selected.get(i); @@ -264,6 +275,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase { private void verify(SortedNumericDoubleValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { + if (MultiValueMode.MEDIAN.equals(mode)) { + continue; + } final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitDocIdSet(innerDocs), maxDoc); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { From df59288b729a0712dc0783eebb92661507bbef91 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Thu, 14 May 2015 09:53:26 -0700 Subject: [PATCH 05/15] Fixed a broken import in MultiValueMode. --- src/main/java/org/elasticsearch/search/MultiValueMode.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/org/elasticsearch/search/MultiValueMode.java b/src/main/java/org/elasticsearch/search/MultiValueMode.java index 50b9ad6b193..f7defa89263 100644 --- a/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -20,7 +20,6 @@ package org.elasticsearch.search; -import javafx.collections.transformation.SortedList; import org.apache.lucene.index.*; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; From d31ce434526f3477eead2597acb38ed32552ad6a Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 14 May 2015 13:01:41 -0700 Subject: [PATCH 06/15] Mappings: Add back support for enabled/includes/excludes in _source This adds back the ability to disable _source, as well as set includes and excludes. However, it also restricts these settings to not be updateable. enabled was actually already not modifiable, but no conflict was previously given if an attempt was made to change it. This also adds a check that can be made on the source mapper to know if the the source is "complete" and can be used for purposes other than returning in search or get requests. There is one example use here in highlighting, but more need to be added in a follow up issue (eg in the update API). closes #11116 --- .../mapper/internal/SourceFieldMapper.java | 37 ++++-- .../search/highlight/HighlightPhase.java | 4 +- .../source/DefaultSourceMappingTests.java | 124 ++++++++++++++---- .../UpdateMappingIntegrationTests.java | 96 -------------- 4 files changed, 122 insertions(+), 139 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index be962af9234..5dd55356257 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import java.io.IOException; +import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -150,7 +151,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro Map.Entry entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { + if (fieldName.equals("enabled")) { builder.enabled(nodeBooleanValue(fieldNode)); iterator.remove(); } else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { @@ -172,7 +173,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro } else if ("format".equals(fieldName)) { builder.format(nodeStringValue(fieldNode, null)); iterator.remove(); - } else if (fieldName.equals("includes") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { + } else if (fieldName.equals("includes")) { List values = (List) fieldNode; String[] includes = new String[values.size()]; for (int i = 0; i < includes.length; i++) { @@ -180,7 +181,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro } builder.includes(includes); iterator.remove(); - } else if (fieldName.equals("excludes") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { + } else if (fieldName.equals("excludes")) { List values = (List) fieldNode; String[] excludes = new String[values.size()]; for (int i = 0; i < excludes.length; i++) { @@ -197,11 +198,14 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro private final boolean enabled; + /** indicates whether the source will always exist and be complete, for use by features like the update API */ + private final boolean complete; + private Boolean compress; private long compressThreshold; - private String[] includes; - private String[] excludes; + private final String[] includes; + private final String[] excludes; private String format; @@ -222,6 +226,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro this.excludes = excludes; this.format = format; this.formatContentType = format == null ? null : XContentType.fromRestContentType(format); + this.complete = enabled && includes == null && excludes == null; } public boolean enabled() { @@ -237,6 +242,10 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro return this.includes != null ? this.includes : Strings.EMPTY_ARRAY; } + public boolean isComplete() { + return complete; + } + @Override public FieldType defaultFieldType() { return Defaults.FIELD_TYPE; @@ -420,19 +429,23 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; - if (!mergeResult.simulate()) { + if (mergeResult.simulate()) { + if (this.enabled != sourceMergeWith.enabled) { + mergeResult.addConflict("Cannot update enabled setting for [_source]"); + } + if (Arrays.equals(this.includes, sourceMergeWith.includes) == false) { + mergeResult.addConflict("Cannot update includes setting for [_source]"); + } + if (Arrays.equals(this.excludes, sourceMergeWith.excludes) == false) { + mergeResult.addConflict("Cannot update excludes setting for [_source]"); + } + } else { if (sourceMergeWith.compress != null) { this.compress = sourceMergeWith.compress; } if (sourceMergeWith.compressThreshold != -1) { this.compressThreshold = sourceMergeWith.compressThreshold; } - if (sourceMergeWith.includes != null) { - this.includes = sourceMergeWith.includes; - } - if (sourceMergeWith.excludes != null) { - this.excludes = sourceMergeWith.excludes; - } } } } diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index cd3c12591f7..cb22ab3a0c8 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -86,8 +86,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { if (context.highlight().forceSource(field)) { SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper(); - if (!sourceFieldMapper.enabled()) { - throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has disabled _source"); + if (!sourceFieldMapper.isComplete()) { + throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has incomplete _source"); } } diff --git a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 7b84424633c..2516a3b42a2 100644 --- a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -35,6 +35,8 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.junit.Test; +import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -42,7 +44,6 @@ import java.util.Map; import static org.hamcrest.Matchers.*; public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { - Settings backcompatSettings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); public void testNoFormat() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") @@ -80,8 +81,8 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { documentMapper = parser.parse(mapping); doc = documentMapper.parse("type", "1", XContentFactory.smileBuilder().startObject() - .field("field", "value") - .endObject().bytes()); + .field("field", "value") + .endObject().bytes()); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); } @@ -91,6 +92,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { .startObject("_source").field("format", "json").field("compress", true).endObject() .endObject().endObject().string(); + Settings backcompatSettings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build(); DocumentMapperParser parser = createIndex("test", backcompatSettings).mapperService().documentMapperParser(); DocumentMapper documentMapper = parser.parse(mapping); ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() @@ -111,19 +113,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { assertThat(XContentFactory.xContentType(uncompressed), equalTo(XContentType.JSON)); } - public void testIncludesBackcompat() throws Exception { + public void testIncludes() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("includes", new String[]{"path1*"}).endObject() .endObject().endObject().string(); - try { - createIndex("testbad").mapperService().documentMapperParser().parse(mapping); - fail("includes should not be allowed"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage().contains("unsupported parameters")); - } - - DocumentMapper documentMapper = createIndex("test", backcompatSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() @@ -136,19 +131,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { assertThat(sourceAsMap.containsKey("path2"), equalTo(false)); } - public void testExcludesBackcompat() throws Exception { + public void testExcludes() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").field("excludes", new String[]{"path1*"}).endObject() .endObject().endObject().string(); - try { - createIndex("testbad").mapperService().documentMapperParser().parse(mapping); - fail("excludes should not be allowed"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage().contains("unsupported parameters")); - } - - DocumentMapper documentMapper = createIndex("test", backcompatSettings).mapperService().documentMapperParser().parse(mapping); + DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() .startObject("path1").field("field1", "value1").endObject() @@ -161,12 +149,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { assertThat(sourceAsMap.containsKey("path2"), equalTo(true)); } - public void testDefaultMappingAndNoMappingBackcompat() throws Exception { + public void testDefaultMappingAndNoMapping() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", backcompatSettings).mapperService().documentMapperParser(); + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper mapper = parser.parse("my_type", null, defaultMapping); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(false)); @@ -189,7 +177,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { } } - public void testDefaultMappingAndWithMappingOverrideBackcompat() throws Exception { + public void testDefaultMappingAndWithMappingOverride() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); @@ -198,17 +186,17 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { .startObject("_source").field("enabled", true).endObject() .endObject().endObject().string(); - DocumentMapper mapper = createIndex("test", backcompatSettings).mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping); + DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping); assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(true)); } - public void testDefaultMappingAndNoMappingWithMapperServiceBackcompat() throws Exception { + public void testDefaultMappingAndNoMappingWithMapperService() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); - MapperService mapperService = createIndex("test", backcompatSettings).mapperService(); + MapperService mapperService = createIndex("test").mapperService(); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1(); @@ -216,12 +204,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { assertThat(mapper.sourceMapper().enabled(), equalTo(false)); } - public void testDefaultMappingAndWithMappingOverrideWithMapperServiceBackcompat() throws Exception { + public void testDefaultMappingAndWithMappingOverrideWithMapperService() throws Exception { String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) .startObject("_source").field("enabled", false).endObject() .endObject().endObject().string(); - MapperService mapperService = createIndex("test", backcompatSettings).mapperService(); + MapperService mapperService = createIndex("test").mapperService(); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") @@ -233,4 +221,82 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.sourceMapper().enabled(), equalTo(true)); } + + void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException { + DocumentMapper docMapper = parser.parse(mapping1); + docMapper.refreshSource(); + docMapper = parser.parse(docMapper.mappingSource().string()); + MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true); + + List expectedConflicts = new ArrayList<>(Arrays.asList(conflicts)); + for (String conflict : mergeResult.buildConflicts()) { + assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict)); + } + assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty()); + } + + public void testEnabledNotUpdateable() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + // using default of true + String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").field("enabled", false).endObject() + .endObject().endObject().string(); + assertConflicts(mapping1, mapping2, parser, "Cannot update enabled setting for [_source]"); + + // not changing is ok + String mapping3 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").field("enabled", true).endObject() + .endObject().endObject().string(); + assertConflicts(mapping1, mapping3, parser); + } + + public void testIncludesNotUpdateable() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").array("includes", "foo.*").endObject() + .endObject().endObject().string(); + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").array("includes", "foo.*", "bar.*").endObject() + .endObject().endObject().string(); + assertConflicts(mapping1, mapping2, parser, "Cannot update includes setting for [_source]"); + + // not changing is ok + assertConflicts(mapping1, mapping1, parser); + } + + public void testExcludesNotUpdateable() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").array("excludes", "foo.*").endObject() + .endObject().endObject().string(); + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").array("excludes", "foo.*", "bar.*").endObject() + .endObject().endObject().string(); + assertConflicts(mapping1, mapping2, parser, "Cannot update excludes setting for [_source]"); + + // not changing is ok + assertConflicts(mapping1, mapping1, parser); + } + + public void testComplete() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); + assertTrue(parser.parse(mapping).sourceMapper().isComplete()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").field("enabled", false).endObject() + .endObject().endObject().string(); + assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").array("includes", "foo.*").endObject() + .endObject().endObject().string(); + assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("_source").array("excludes", "foo.*").endObject() + .endObject().endObject().string(); + assertFalse(parser.parse(mapping).sourceMapper().isComplete()); + } } diff --git a/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationTests.java b/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationTests.java index 03c8bbe56e1..dca5183a471 100644 --- a/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationTests.java +++ b/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationTests.java @@ -212,102 +212,6 @@ public class UpdateMappingIntegrationTests extends ElasticsearchIntegrationTest assertThat(putMappingResponse.isAcknowledged(), equalTo(true)); } - - @SuppressWarnings("unchecked") - @Test - public void updateIncludeExcludeBackcompat() throws Exception { - assertAcked(prepareCreate("test").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) - .addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("normal").field("type", "long").endObject() - .startObject("exclude").field("type", "long").endObject() - .startObject("include").field("type", "long").endObject() - .endObject().endObject().endObject())); - ensureGreen(); // make sure that replicas are initialized so the refresh command will work them too - - logger.info("Index doc"); - index("test", "type", "1", JsonXContent.contentBuilder().startObject() - .field("normal", 1).field("exclude", 1).field("include", 1) - .endObject() - ); - refresh(); // commit it for later testing. - - logger.info("Adding exclude settings"); - PutMappingResponse putResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource( - JsonXContent.contentBuilder().startObject().startObject("type") - .startObject("_source") - .startArray("excludes").value("exclude").endArray() - .endObject().endObject() - ).get(); - - assertTrue(putResponse.isAcknowledged()); - - // changed mapping doesn't affect indexed documents (checking backward compatibility) - GetResponse getResponse = client().prepareGet("test", "type", "1").setRealtime(false).get(); - assertThat(getResponse.getSource(), hasKey("normal")); - assertThat(getResponse.getSource(), hasKey("exclude")); - assertThat(getResponse.getSource(), hasKey("include")); - - - logger.info("Index doc again"); - index("test", "type", "1", JsonXContent.contentBuilder().startObject() - .field("normal", 2).field("exclude", 1).field("include", 2) - .endObject() - ); - - // but do affect newly indexed docs - getResponse = get("test", "type", "1"); - assertThat(getResponse.getSource(), hasKey("normal")); - assertThat(getResponse.getSource(), not(hasKey("exclude"))); - assertThat(getResponse.getSource(), hasKey("include")); - - logger.info("Changing mapping to includes"); - putResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource( - JsonXContent.contentBuilder().startObject().startObject("type") - .startObject("_source") - .startArray("excludes").endArray() - .startArray("includes").value("include").endArray() - .endObject().endObject() - ).get(); - assertTrue(putResponse.isAcknowledged()); - - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); - MappingMetaData typeMapping = getMappingsResponse.getMappings().get("test").get("type"); - assertThat((Map) typeMapping.getSourceAsMap().get("_source"), hasKey("includes")); - ArrayList includes = (ArrayList) ((Map) typeMapping.getSourceAsMap().get("_source")).get("includes"); - assertThat(includes, contains("include")); - assertThat((Map) typeMapping.getSourceAsMap().get("_source"), hasKey("excludes")); - assertThat((ArrayList) ((Map) typeMapping.getSourceAsMap().get("_source")).get("excludes"), emptyIterable()); - - logger.info("Indexing doc yet again"); - index("test", "type", "1", JsonXContent.contentBuilder().startObject() - .field("normal", 3).field("exclude", 3).field("include", 3) - .endObject() - ); - - getResponse = get("test", "type", "1"); - assertThat(getResponse.getSource(), not(hasKey("normal"))); - assertThat(getResponse.getSource(), not(hasKey("exclude"))); - assertThat(getResponse.getSource(), hasKey("include")); - - logger.info("Adding excludes, but keep includes"); - putResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource( - JsonXContent.contentBuilder().startObject().startObject("type") - .startObject("_source") - .startArray("excludes").value("*.excludes").endArray() - .endObject().endObject() - ).get(); - assertTrue(putResponse.isAcknowledged()); - - getMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); - typeMapping = getMappingsResponse.getMappings().get("test").get("type"); - assertThat((Map) typeMapping.getSourceAsMap().get("_source"), hasKey("includes")); - includes = (ArrayList) ((Map) typeMapping.getSourceAsMap().get("_source")).get("includes"); - assertThat(includes, contains("include")); - assertThat((Map) typeMapping.getSourceAsMap().get("_source"), hasKey("excludes")); - ArrayList excludes = (ArrayList) ((Map) typeMapping.getSourceAsMap().get("_source")).get("excludes"); - assertThat(excludes, contains("*.excludes")); - } - @SuppressWarnings("unchecked") @Test public void updateDefaultMappingSettings() throws Exception { From af6b69e79106219d7c464cd4b130f89103c3a9e2 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 13 May 2015 20:01:16 -0400 Subject: [PATCH 07/15] Ensure empty completion entries are never indexed closes #10987 --- .../mapper/core/CompletionFieldMapper.java | 6 ++++ .../suggest/ContextSuggestSearchTests.java | 28 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 83b8781af9e..738ea401e7d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -369,6 +369,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper { payload = payload == null ? EMPTY : payload; if (surfaceForm == null) { // no surface form use the input for (String input : inputs) { + if (input.length() == 0) { + continue; + } BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( input), weight, payload); context.doc().add(getCompletionField(ctx, input, suggestPayload)); @@ -377,6 +380,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper { BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( surfaceForm), weight, payload); for (String input : inputs) { + if (input.length() == 0) { + continue; + } context.doc().add(getCompletionField(ctx, input, suggestPayload)); } } diff --git a/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchTests.java b/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchTests.java index a1f95a229cd..95bbcbba03f 100644 --- a/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchTests.java +++ b/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearchTests.java @@ -435,6 +435,34 @@ public class ContextSuggestSearchTests extends ElasticsearchIntegrationTest { } + @Test // see issue #10987 + public void testEmptySuggestion() throws Exception { + String mapping = jsonBuilder() + .startObject() + .startObject(TYPE) + .startObject("properties") + .startObject(FIELD) + .field("type", "completion") + .startObject("context") + .startObject("type_context") + .field("path", "_type") + .field("type", "category") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + .string(); + + assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, mapping).get()); + ensureGreen(); + + client().prepareIndex(INDEX, TYPE, "1").setSource(FIELD, "") + .setRefresh(true).get(); + + } + @Test public void testMultiValueField() throws Exception { assertAcked(prepareCreate(INDEX).addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category")))); From 7efc43db25495b92833c4bc804f604b82597c157 Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 13 May 2015 15:23:18 -0400 Subject: [PATCH 08/15] Re-structure collate option in PhraseSuggester to only collate on local shard. Previously, collate feature would be executed on all shards of an index using the client, this leads to a deadlock when concurrent collate requests are run from the _search API, due to the fact that both the external request and internal collate requests use the same search threadpool. As phrase suggestions are generated from the terms of the local shard, in most cases the generated suggestion, which does not yield a hit for the collate query on the local shard would not yield a hit for collate query on non-local shards. Instead of using the client for collating suggestions, collate query is executed against the ContextIndexSearcher. This PR removes the ability to specify a preference for a collate query, as the collate query is only run on the local shard. closes #9377 --- .../search/suggesters/phrase-suggest.asciidoc | 32 ++--- .../suggest/TransportSuggestAction.java | 9 +- .../search/suggest/SuggestContextParser.java | 3 +- .../search/suggest/SuggestParseElement.java | 7 +- .../search/suggest/SuggestPhase.java | 8 +- .../search/suggest/Suggester.java | 11 +- .../completion/CompletionSuggestParser.java | 3 +- .../completion/CompletionSuggester.java | 5 +- .../phrase/NoisyChannelSpellChecker.java | 4 +- .../suggest/phrase/PhraseSuggestParser.java | 6 +- .../suggest/phrase/PhraseSuggester.java | 125 ++++++------------ .../phrase/PhraseSuggestionContext.java | 23 ++-- .../suggest/term/TermSuggestParser.java | 3 +- .../search/suggest/term/TermSuggester.java | 5 +- .../search/suggest/CustomSuggester.java | 7 +- .../search/suggest/SuggestSearchTests.java | 7 +- 16 files changed, 110 insertions(+), 148 deletions(-) diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index 311a9a75304..07332568685 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -163,20 +163,18 @@ can contain misspellings (See parameter descriptions below). `collate`:: Checks each suggestion against the specified `query` or `filter` to - prune suggestions for which no matching docs exist in the index. Either - a `query` or a `filter` must be specified, and it is run as a - <>. The current suggestion is - automatically made available as the `{{suggestion}}` variable, which - should be used in your query/filter. You can still specify your own - template `params` -- the `suggestion` value will be added to the - variables you specify. You can specify a `preference` to control - on which shards the query is executed (see <>). - The default value is `_only_local`. Additionally, you can specify - a `prune` to control if all phrase suggestions will be - returned, when set to `true` the suggestions will have an additional - option `collate_match`, which will be `true` if matching documents - for the phrase was found, `false` otherwise. The default value for - `prune` is `false`. + prune suggestions for which no matching docs exist in the index. + The collate query for a suggestion is run only on the local shard from which + the suggestion has been generated from. Either a `query` or a `filter` must + be specified, and it is run as a <>. + The current suggestion is automatically made available as the `{{suggestion}}` + variable, which should be used in your query/filter. You can still specify + your own template `params` -- the `suggestion` value will be added to the + variables you specify. Additionally, you can specify a `prune` to control + if all phrase suggestions will be returned, when set to `true` the suggestions + will have an additional option `collate_match`, which will be `true` if + matching documents for the phrase was found, `false` otherwise. + The default value for `prune` is `false`. [source,js] -------------------------------------------------- @@ -199,8 +197,7 @@ curl -XPOST 'localhost:9200/_search' -d { } }, "params": {"field_name" : "title"}, <3> - "preference": "_primary", <4> - "prune": true <5> + "prune": true <4> } } } @@ -212,8 +209,7 @@ curl -XPOST 'localhost:9200/_search' -d { of each suggestion. <3> An additional `field_name` variable has been specified in `params` and is used by the `match` query. -<4> The default `preference` has been changed to `_primary`. -<5> All suggestions will be returned with an extra `collate_match` +<4> All suggestions will be returned with an extra `collate_match` option indicating whether the generated phrase matched any document. diff --git a/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java b/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java index 7d69af266b3..9874783ae79 100644 --- a/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java +++ b/src/main/java/org/elasticsearch/action/suggest/TransportSuggestAction.java @@ -130,27 +130,26 @@ public class TransportSuggestAction extends TransportBroadcastOperationAction 0) { parser = XContentFactory.xContent(suggest).createParser(suggest); if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new IllegalArgumentException("suggest content missing"); } - final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), request.shardId().getIndex(), request.shardId().id()); - final Suggest result = suggestPhase.execute(context, searcher.reader()); + final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), + indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id()); + final Suggest result = suggestPhase.execute(context, searcher.searcher()); return new ShardSuggestResponse(request.shardId(), result); } return new ShardSuggestResponse(request.shardId(), new Suggest()); } catch (Throwable ex) { throw new ElasticsearchException("failed to execute suggest", ex); } finally { - searcher.close(); if (parser != null) { parser.close(); } diff --git a/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java b/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java index ce1654245ba..98e450d1265 100644 --- a/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java +++ b/src/main/java/org/elasticsearch/search/suggest/SuggestContextParser.java @@ -22,8 +22,9 @@ import java.io.IOException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; public interface SuggestContextParser { - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException; + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException; } \ No newline at end of file diff --git a/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java b/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java index edfe04108a0..cf85500cb4b 100644 --- a/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java +++ b/src/main/java/org/elasticsearch/search/suggest/SuggestParseElement.java @@ -22,6 +22,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; @@ -44,11 +45,11 @@ public final class SuggestParseElement implements SearchParseElement { @Override public void parse(XContentParser parser, SearchContext context) throws Exception { - SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.shardTarget().index(), context.shardTarget().shardId()); + SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.queryParserService(), context.shardTarget().index(), context.shardTarget().shardId()); context.suggest(suggestionSearchContext); } - public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, String index, int shardId) throws IOException { + public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService, String index, int shardId) throws IOException { SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); BytesRef globalText = null; String fieldName = null; @@ -86,7 +87,7 @@ public final class SuggestParseElement implements SearchParseElement { throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); } final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser(); - suggestionContext = contextParser.parse(parser, mapperService); + suggestionContext = contextParser.parse(parser, mapperService, queryParserService); } } if (suggestionContext != null) { diff --git a/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java b/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java index 209c7b68ce7..58a4502abf8 100644 --- a/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java +++ b/src/main/java/org/elasticsearch/search/suggest/SuggestPhase.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.suggest; import com.google.common.collect.ImmutableMap; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; @@ -71,10 +71,10 @@ public class SuggestPhase extends AbstractComponent implements SearchPhase { if (suggest == null) { return; } - context.queryResult().suggest(execute(suggest, context.searcher().getIndexReader())); + context.queryResult().suggest(execute(suggest, context.searcher())); } - public Suggest execute(SuggestionSearchContext suggest, IndexReader reader) { + public Suggest execute(SuggestionSearchContext suggest, IndexSearcher searcher) { try { CharsRefBuilder spare = new CharsRefBuilder(); final List>> suggestions = new ArrayList<>(suggest.suggestions().size()); @@ -82,7 +82,7 @@ public class SuggestPhase extends AbstractComponent implements SearchPhase { for (Map.Entry entry : suggest.suggestions().entrySet()) { SuggestionSearchContext.SuggestionContext suggestion = entry.getValue(); Suggester suggester = suggestion.getSuggester(); - Suggestion> result = suggester.execute(entry.getKey(), suggestion, reader, spare); + Suggestion> result = suggester.execute(entry.getKey(), suggestion, searcher, spare); if (result != null) { assert entry.getKey().equals(result.name); suggestions.add(result); diff --git a/src/main/java/org/elasticsearch/search/suggest/Suggester.java b/src/main/java/org/elasticsearch/search/suggest/Suggester.java index dcb84ea5467..51f5f21b460 100644 --- a/src/main/java/org/elasticsearch/search/suggest/Suggester.java +++ b/src/main/java/org/elasticsearch/search/suggest/Suggester.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.suggest; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CharsRefBuilder; import java.io.IOException; @@ -27,19 +27,20 @@ import java.io.IOException; public abstract class Suggester { protected abstract Suggest.Suggestion> - innerExecute(String name, T suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException; + innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException; public abstract String[] names(); public abstract SuggestContextParser getContextParser(); public Suggest.Suggestion> - execute(String name, T suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException { + execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { // #3469 We want to ignore empty shards - if (indexReader.numDocs() == 0) { + + if (searcher.getIndexReader().numDocs() == 0) { return null; } - return innerExecute(name, suggestion, indexReader, spare); + return innerExecute(name, suggestion, searcher, spare); } } diff --git a/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java b/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java index 05a14291cad..8ef271fb764 100644 --- a/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java +++ b/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestParser.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.CompletionFieldMapper; +import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.context.ContextMapping.ContextQuery; @@ -48,7 +49,7 @@ public class CompletionSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException { XContentParser.Token token; String fieldName = null; CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester); diff --git a/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 3e3ceba438e..ee1cc70bc45 100644 --- a/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Terms; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CollectionUtil; @@ -48,11 +49,11 @@ public class CompletionSuggester extends Suggester @Override protected Suggest.Suggestion> innerExecute(String name, - CompletionSuggestionContext suggestionContext, IndexReader indexReader, CharsRefBuilder spare) throws IOException { + CompletionSuggestionContext suggestionContext, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { if (suggestionContext.mapper() == null || !(suggestionContext.mapper() instanceof CompletionFieldMapper)) { throw new ElasticsearchException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); } - + final IndexReader indexReader = searcher.getIndexReader(); CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize()); spare.copyUTF8Bytes(suggestionContext.getText()); diff --git a/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 4eb09545167..ec9ca6e1da2 100644 --- a/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -60,7 +60,7 @@ public final class NoisyChannelSpellChecker { } public Result getCorrections(TokenStream stream, final CandidateGenerator generator, - float maxErrors, int numCorrections, IndexReader reader, WordScorer wordScorer, BytesRef separator, float confidence, int gramSize) throws IOException { + float maxErrors, int numCorrections, WordScorer wordScorer, float confidence, int gramSize) throws IOException { final List candidateSetsList = new ArrayList<>(); SuggestUtils.analyze(stream, new SuggestUtils.TokenConsumer() { @@ -134,7 +134,7 @@ public final class NoisyChannelSpellChecker { public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerator generator, float maxErrors, int numCorrections, IndexReader reader, String analysisField, WordScorer scorer, float confidence, int gramSize) throws IOException { - return getCorrections(tokenStream(analyzer, query, new CharsRefBuilder(), analysisField), generator, maxErrors, numCorrections, reader, scorer, new BytesRef(" "), confidence, gramSize); + return getCorrections(tokenStream(analyzer, query, new CharsRefBuilder(), analysisField), generator, maxErrors, numCorrections, scorer, confidence, gramSize); } diff --git a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java index 7905d538848..453dbdc3eb5 100644 --- a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java +++ b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestParser.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -49,8 +50,9 @@ public final class PhraseSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException { PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester); + suggestion.setQueryParserService(queryParserService); XContentParser.Token token; String fieldName = null; boolean gramSizeSet = false; @@ -159,8 +161,6 @@ public final class PhraseSuggestParser implements SuggestContextParser { } else { suggestion.setCollateFilterScript(compiledScript); } - } else if ("preference".equals(fieldName)) { - suggestion.setPreference(parser.text()); } else if ("params".equals(fieldName)) { suggestion.setCollateScriptParams(parser.map()); } else if ("prune".equals(fieldName)) { diff --git a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java index bf22dae44db..37ab17fffed 100644 --- a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java +++ b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggester.java @@ -23,22 +23,19 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.Terms; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.search.MultiSearchRequestBuilder; -import org.elasticsearch.action.search.MultiSearchResponse; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; @@ -58,12 +55,10 @@ import java.util.Map; public final class PhraseSuggester extends Suggester { private final BytesRef SEPARATOR = new BytesRef(" "); private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion"; - private final Client client; private final ScriptService scriptService; @Inject - public PhraseSuggester(Client client, ScriptService scriptService) { - this.client = client; + public PhraseSuggester(ScriptService scriptService) { this.scriptService = scriptService; } @@ -76,11 +71,11 @@ public final class PhraseSuggester extends Suggester { * - phonetic filters could be interesting here too for candidate selection */ @Override - public Suggestion> innerExecute(String name, PhraseSuggestionContext suggestion, - IndexReader indexReader, CharsRefBuilder spare) throws IOException { + public Suggestion> innerExecute(String name, PhraseSuggestionContext suggestion, IndexSearcher searcher, + CharsRefBuilder spare) throws IOException { double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood(); final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize()); - + final IndexReader indexReader = searcher.getIndexReader(); List generators = suggestion.generators(); final int numGenerators = generators.size(); final List gens = new ArrayList<>(generators.size()); @@ -103,31 +98,52 @@ public final class PhraseSuggester extends Suggester { WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); Result checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), - suggestion.getShardSize(), indexReader,wordScorer , separator, suggestion.confidence(), suggestion.gramSize()); + suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize()); PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore); response.addTerm(resultEntry); - BytesRefBuilder byteSpare = new BytesRefBuilder(); - - MultiSearchResponse multiSearchResponse = collate(suggestion, checkerResult, byteSpare, spare); - final boolean collateEnabled = multiSearchResponse != null; - final boolean collatePrune = suggestion.collatePrune(); - + final BytesRefBuilder byteSpare = new BytesRefBuilder(); + final EarlyTerminatingCollector collector = Lucene.createExistsCollector(); + final CompiledScript collateScript; + if (suggestion.getCollateQueryScript() != null) { + collateScript = suggestion.getCollateQueryScript(); + } else if (suggestion.getCollateFilterScript() != null) { + collateScript = suggestion.getCollateFilterScript(); + } else { + collateScript = null; + } + final boolean collatePrune = (collateScript != null) && suggestion.collatePrune(); for (int i = 0; i < checkerResult.corrections.length; i++) { - boolean collateMatch = hasMatchingDocs(multiSearchResponse, i); + Correction correction = checkerResult.corrections[i]; + spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null)); + boolean collateMatch = true; + if (collateScript != null) { + // Checks if the template query collateScript yields any documents + // from the index for a correction, collateMatch is updated + final Map vars = suggestion.getCollateScriptParams(); + vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); + final ExecutableScript executable = scriptService.executable(collateScript, vars); + final BytesReference querySource = (BytesReference) executable.run(); + final ParsedQuery parsedQuery; + if (suggestion.getCollateFilterScript() != null) { + parsedQuery = suggestion.getQueryParserService().parse( + QueryBuilders.constantScoreQuery(QueryBuilders.wrapperQuery(querySource))); + } else { + parsedQuery = suggestion.getQueryParserService().parse(querySource); + } + collateMatch = Lucene.exists(searcher, parsedQuery.query(), collector); + } if (!collateMatch && !collatePrune) { continue; } - Correction correction = checkerResult.corrections[i]; - spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null)); Text phrase = new StringText(spare.toString()); Text highlighted = null; if (suggestion.getPreTag() != null) { spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, suggestion.getPreTag(), suggestion.getPostTag())); highlighted = new StringText(spare.toString()); } - if (collateEnabled && collatePrune) { + if (collatePrune) { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); } else { resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score))); @@ -144,67 +160,6 @@ public final class PhraseSuggester extends Suggester { return new PhraseSuggestion.Entry(new StringText(spare.toString()), 0, spare.length(), cutoffScore); } - private MultiSearchResponse collate(PhraseSuggestionContext suggestion, Result checkerResult, BytesRefBuilder byteSpare, CharsRefBuilder spare) throws IOException { - CompiledScript collateQueryScript = suggestion.getCollateQueryScript(); - CompiledScript collateFilterScript = suggestion.getCollateFilterScript(); - MultiSearchResponse multiSearchResponse = null; - if (collateQueryScript != null) { - multiSearchResponse = fetchMatchingDocCountResponses(checkerResult.corrections, collateQueryScript, false, suggestion, byteSpare, spare); - } else if (collateFilterScript != null) { - multiSearchResponse = fetchMatchingDocCountResponses(checkerResult.corrections, collateFilterScript, true, suggestion, byteSpare, spare); - } - return multiSearchResponse; - } - - private MultiSearchResponse fetchMatchingDocCountResponses(Correction[] corrections, CompiledScript collateScript, - boolean isFilter, PhraseSuggestionContext suggestions, - BytesRefBuilder byteSpare, CharsRefBuilder spare) throws IOException { - Map vars = suggestions.getCollateScriptParams(); - MultiSearchResponse multiSearchResponse = null; - MultiSearchRequestBuilder multiSearchRequestBuilder = client.prepareMultiSearch(); - boolean requestAdded = false; - SearchRequestBuilder req; - for (Correction correction : corrections) { - spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null)); - vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString()); - ExecutableScript executable = scriptService.executable(collateScript, vars); - BytesReference querySource = (BytesReference) executable.run(); - requestAdded = true; - if (isFilter) { - req = client.prepareSearch() - .setPreference(suggestions.getPreference()) - .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.wrapperQuery(querySource))) - .setSize(0) - .setTerminateAfter(1); - } else { - req = client.prepareSearch() - .setPreference(suggestions.getPreference()) - .setQuery(querySource) - .setSize(0) - .setTerminateAfter(1); - } - multiSearchRequestBuilder.add(req); - } - if (requestAdded) { - multiSearchResponse = multiSearchRequestBuilder.get(); - } - - return multiSearchResponse; - } - - private static boolean hasMatchingDocs(MultiSearchResponse multiSearchResponse, int index) { - if (multiSearchResponse == null) { - return true; - } - MultiSearchResponse.Item item = multiSearchResponse.getResponses()[index]; - if (!item.isFailure()) { - SearchResponse resp = item.getResponse(); - return resp.getHits().totalHits() > 0; - } else { - throw new ElasticsearchException("Collate request failed: " + item.getFailureMessage()); - } - } - ScriptService scriptService() { return scriptService; } diff --git a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java index 7115b7a629a..b38dd95053a 100644 --- a/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java +++ b/src/main/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionContext.java @@ -25,7 +25,7 @@ import java.util.Map; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.Suggester; @@ -33,7 +33,7 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContex class PhraseSuggestionContext extends SuggestionContext { private final BytesRef SEPARATOR = new BytesRef(" "); - + private IndexQueryParserService queryParserService; private float maxErrors = 0.5f; private BytesRef separator = SEPARATOR; private float realworldErrorLikelihood = 0.95f; @@ -45,7 +45,6 @@ class PhraseSuggestionContext extends SuggestionContext { private BytesRef postTag; private CompiledScript collateQueryScript; private CompiledScript collateFilterScript; - private String preference = Preference.ONLY_LOCAL.type(); private Map collateScriptParams = new HashMap<>(1); private WordScorer.WordScorerFactory scorer; @@ -112,7 +111,15 @@ class PhraseSuggestionContext extends SuggestionContext { public WordScorer.WordScorerFactory model() { return scorer; } - + + public void setQueryParserService(IndexQueryParserService queryParserService) { + this.queryParserService = queryParserService; + } + + public IndexQueryParserService getQueryParserService() { + return queryParserService; + } + static class DirectCandidateGenerator extends DirectSpellcheckerSettings { private Analyzer preFilter; private Analyzer postFilter; @@ -205,14 +212,6 @@ class PhraseSuggestionContext extends SuggestionContext { this.collateFilterScript = collateFilterScript; } - String getPreference() { - return preference; - } - - void setPreference(String preference) { - this.preference = preference; - } - Map getCollateScriptParams() { return collateScriptParams; } diff --git a/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java b/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java index 94ef1f6b393..9e1a0e57190 100644 --- a/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java +++ b/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestParser.java @@ -22,6 +22,7 @@ import java.io.IOException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestUtils; @@ -36,7 +37,7 @@ public final class TermSuggestParser implements SuggestContextParser { } @Override - public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { + public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException { XContentParser.Token token; String fieldName = null; TermSuggestionContext suggestion = new TermSuggestionContext(suggester); diff --git a/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java b/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java index 64c38b2df81..70dfefe9522 100644 --- a/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java +++ b/src/main/java/org/elasticsearch/search/suggest/term/TermSuggester.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.term; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.SuggestWord; import org.apache.lucene.util.BytesRef; @@ -41,9 +42,9 @@ import java.util.List; public final class TermSuggester extends Suggester { @Override - public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException { + public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(suggestion.getDirectSpellCheckerSettings()); - + final IndexReader indexReader = searcher.getIndexReader(); TermSuggestion response = new TermSuggestion( name, suggestion.getSize(), suggestion.getDirectSpellCheckerSettings().sort() ); diff --git a/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java b/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java index 0fe75cd5113..6e57390a165 100644 --- a/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java +++ b/src/test/java/org/elasticsearch/search/suggest/CustomSuggester.java @@ -18,11 +18,12 @@ */ package org.elasticsearch.search.suggest; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.IndexQueryParserService; import java.io.IOException; import java.util.Locale; @@ -36,7 +37,7 @@ public class CustomSuggester extends Suggester> innerExecute(String name, CustomSuggestionsContext suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException { + public Suggest.Suggestion> innerExecute(String name, CustomSuggestionsContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException { // Get the suggestion context String text = suggestion.getText().utf8ToString(); @@ -63,7 +64,7 @@ public class CustomSuggester extends Suggester options = parser.map(); CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options); suggestionContext.setField((String) options.get("field")); diff --git a/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java b/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java index 86d979df31c..19bc28f188d 100644 --- a/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java +++ b/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java @@ -1247,12 +1247,17 @@ public class SuggestSearchTests extends ElasticsearchIntegrationTest { // expected } - // collate request with prune set to true + // collate query request with prune set to true PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params).collatePrune(true); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); + // collate filter request with prune set to true + phraseSuggestWithParamsAndReturn = suggest.collateFilter(collateWithParams).collateQuery(null).collateParams(params).collatePrune(true); + searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); + assertSuggestionSize(searchSuggest, 0, 10, "title"); + assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); } protected Suggest searchSuggest(SuggestionBuilder... suggestion) { From 0e14c6d2568c5e5ab15985fb525fb18c295a87f3 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 14 May 2015 14:36:26 -0700 Subject: [PATCH 09/15] Fix includes/excludes to be handled on merge conflict checking when they are null --- .../index/mapper/internal/SourceFieldMapper.java | 14 +++++++------- .../mapper/source/DefaultSourceMappingTests.java | 8 ++++++++ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 5dd55356257..26513ddaeb6 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -222,24 +222,24 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro this.enabled = enabled; this.compress = compress; this.compressThreshold = compressThreshold; - this.includes = includes; - this.excludes = excludes; + this.includes = includes == null ? Strings.EMPTY_ARRAY : includes; + this.excludes = excludes == null ? Strings.EMPTY_ARRAY : excludes; this.format = format; this.formatContentType = format == null ? null : XContentType.fromRestContentType(format); this.complete = enabled && includes == null && excludes == null; } public boolean enabled() { - return this.enabled; + return enabled; } public String[] excludes() { - return this.excludes != null ? this.excludes : Strings.EMPTY_ARRAY; + return excludes; } public String[] includes() { - return this.includes != null ? this.includes : Strings.EMPTY_ARRAY; + return includes; } public boolean isComplete() { @@ -433,10 +433,10 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro if (this.enabled != sourceMergeWith.enabled) { mergeResult.addConflict("Cannot update enabled setting for [_source]"); } - if (Arrays.equals(this.includes, sourceMergeWith.includes) == false) { + if (Arrays.equals(includes, sourceMergeWith.includes) == false) { mergeResult.addConflict("Cannot update includes setting for [_source]"); } - if (Arrays.equals(this.excludes, sourceMergeWith.excludes) == false) { + if (Arrays.equals(excludes, sourceMergeWith.excludes) == false) { mergeResult.addConflict("Cannot update excludes setting for [_source]"); } } else { diff --git a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java index 2516a3b42a2..25ab2a5c6c0 100644 --- a/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/source/DefaultSourceMappingTests.java @@ -253,9 +253,13 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { public void testIncludesNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*").endObject() .endObject().endObject().string(); + assertConflicts(defaultMapping, mapping1, parser, "Cannot update includes setting for [_source]"); + assertConflicts(mapping1, defaultMapping, parser, "Cannot update includes setting for [_source]"); + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", "foo.*", "bar.*").endObject() .endObject().endObject().string(); @@ -267,9 +271,13 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { public void testExcludesNotUpdateable() throws Exception { DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*").endObject() .endObject().endObject().string(); + assertConflicts(defaultMapping, mapping1, parser, "Cannot update excludes setting for [_source]"); + assertConflicts(mapping1, defaultMapping, parser, "Cannot update excludes setting for [_source]"); + String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("excludes", "foo.*", "bar.*").endObject() .endObject().endObject().string(); From 7cacfa0769e2a5f52dd70f9fd535081ad6bbc6b4 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 14 May 2015 14:57:09 -0700 Subject: [PATCH 10/15] Change includes/excludes back to null based for now, since it complicates serialization and causes a number of test failures. --- .../index/mapper/internal/SourceFieldMapper.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 26513ddaeb6..45e5f26f568 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -222,8 +222,8 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro this.enabled = enabled; this.compress = compress; this.compressThreshold = compressThreshold; - this.includes = includes == null ? Strings.EMPTY_ARRAY : includes; - this.excludes = excludes == null ? Strings.EMPTY_ARRAY : excludes; + this.includes = includes; + this.excludes = excludes; this.format = format; this.formatContentType = format == null ? null : XContentType.fromRestContentType(format); this.complete = enabled && includes == null && excludes == null; @@ -234,12 +234,12 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro } public String[] excludes() { - return excludes; + return this.excludes != null ? this.excludes : Strings.EMPTY_ARRAY; } public String[] includes() { - return includes; + return this.includes != null ? this.includes : Strings.EMPTY_ARRAY; } public boolean isComplete() { @@ -433,10 +433,10 @@ public class SourceFieldMapper extends AbstractFieldMapper implements Ro if (this.enabled != sourceMergeWith.enabled) { mergeResult.addConflict("Cannot update enabled setting for [_source]"); } - if (Arrays.equals(includes, sourceMergeWith.includes) == false) { + if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) { mergeResult.addConflict("Cannot update includes setting for [_source]"); } - if (Arrays.equals(excludes, sourceMergeWith.excludes) == false) { + if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) { mergeResult.addConflict("Cannot update excludes setting for [_source]"); } } else { From 179dad69b6ee1f6b408a66e83eea4518739c26f3 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Thu, 14 May 2015 16:01:58 -0600 Subject: [PATCH 11/15] [DOCS] Add DNS SRV discovery plugin --- docs/reference/modules/plugins.asciidoc | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/reference/modules/plugins.asciidoc b/docs/reference/modules/plugins.asciidoc index c06fc9c6e57..3a9f867a4dd 100644 --- a/docs/reference/modules/plugins.asciidoc +++ b/docs/reference/modules/plugins.asciidoc @@ -212,6 +212,7 @@ You can disable that check using `plugins.check_lucene: false`. .Supported by the community * https://github.com/shikhar/eskka[eskka Discovery Plugin] (by Shikhar Bhushan) +* https://github.com/grantr/elasticsearch-srv-discovery[DNS SRV Discovery Plugin] (by Grant Rodgers) [float] [[river]] From ac4942b5a7f17dc0497291f0df1c9bf4767e197a Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Thu, 14 May 2015 18:16:03 -0400 Subject: [PATCH 12/15] [TEST] improve Phrase Collate filter test --- .../search/suggest/SuggestSearchTests.java | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java b/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java index 19bc28f188d..a362b435af8 100644 --- a/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java +++ b/src/test/java/org/elasticsearch/search/suggest/SuggestSearchTests.java @@ -1253,6 +1253,18 @@ public class SuggestSearchTests extends ElasticsearchIntegrationTest { assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); + collateWithParams = XContentFactory.jsonBuilder() + .startObject() + .startObject("query") + .startObject("{{query_type}}") + .field("{{query_field}}", "{{suggestion}}") + .endObject() + .endObject() + .endObject().string(); + + params.clear(); + params.put("query_type", "match_phrase"); + params.put("query_field", "title"); // collate filter request with prune set to true phraseSuggestWithParamsAndReturn = suggest.collateFilter(collateWithParams).collateQuery(null).collateParams(params).collatePrune(true); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); From 91aeea9c66cffd2bf7392419825a84f45ee80201 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 14 May 2015 15:14:13 -0700 Subject: [PATCH 13/15] Make highlighter check source enabled again instead of source complete...for now. --- .../org/elasticsearch/search/highlight/HighlightPhase.java | 4 ++-- .../search/highlight/HighlighterSearchTests.java | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java index cb22ab3a0c8..cd3c12591f7 100644 --- a/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java +++ b/src/main/java/org/elasticsearch/search/highlight/HighlightPhase.java @@ -86,8 +86,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { if (context.highlight().forceSource(field)) { SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper(); - if (!sourceFieldMapper.isComplete()) { - throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has incomplete _source"); + if (!sourceFieldMapper.enabled()) { + throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has disabled _source"); } } diff --git a/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java b/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java index 7758c3fca13..4691cc9f76d 100644 --- a/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java +++ b/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchTests.java @@ -562,10 +562,9 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest { } @Test - public void testForceSourceWithSourceDisabledBackcompat() throws Exception { + public void testForceSourceWithSourceDisabled() throws Exception { assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id) .addMapping("type1", jsonBuilder().startObject().startObject("type1") .startObject("_source").field("enabled", false).endObject() .startObject("properties") From 8831ae6e5c7721949d0c25bfc786dc343250920f Mon Sep 17 00:00:00 2001 From: Nicholas Canzoneri Date: Tue, 12 May 2015 10:33:12 -0400 Subject: [PATCH 14/15] Add index name to log statements when settings update fails When an index setting is invalid and fails to be set, a WARN statement is logged but it doesn't contain the index name, making tracking down and fixing the problem more difficult. This commit adds the index name to the log statement. --- .../metadata/MetaDataUpdateSettingsService.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 8b4e334bade..dd98081e021 100644 --- a/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -85,16 +85,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements final int dash = autoExpandReplicas.indexOf('-'); if (-1 == dash) { - logger.warn("Unexpected value [{}] for setting [{}]; it should be dash delimited", - autoExpandReplicas, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS); + logger.warn("failed to set [{}] for index [{}], it should be dash delimited [{}]", + IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), autoExpandReplicas); continue; } final String sMin = autoExpandReplicas.substring(0, dash); try { min = Integer.parseInt(sMin); } catch (NumberFormatException e) { - logger.warn("failed to set [{}], minimum value is not a number [{}]", - e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, sMin); + logger.warn("failed to set [{}] for index [{}], minimum value is not a number [{}]", + e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), sMin); continue; } String sMax = autoExpandReplicas.substring(dash + 1); @@ -104,8 +104,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements try { max = Integer.parseInt(sMax); } catch (NumberFormatException e) { - logger.warn("failed to set [{}], maximum value is neither [{}] nor a number [{}]", - e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, ALL_NODES_VALUE, sMax); + logger.warn("failed to set [{}] for index [{}], maximum value is neither [{}] nor a number [{}]", + e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), ALL_NODES_VALUE, sMax); continue; } } From f05808d59e344ccee07ace5f26dece190c6710ec Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Fri, 15 May 2015 08:40:40 +0200 Subject: [PATCH 15/15] HttpServer: Support relative plugin paths in configuration When specifying relative paths on startup, handling plugin paths failed due to recently added security fix. This fix ensures normalization of the plugin path as well. In addition a new matcher has been added to easily check for a status code of an HTTP response likes this assertThat(response, hasStatus(OK)); Closes #10958 --- .../org/elasticsearch/http/HttpServer.java | 2 +- .../plugins/ResponseHeaderPluginTests.java | 13 ++- .../SitePluginRelativePathConfigTests.java | 90 +++++++++++++++++++ .../plugins/SitePluginTests.java | 23 +++-- .../hamcrest/ElasticsearchAssertions.java | 5 ++ .../test/hamcrest/ElasticsearchMatchers.java | 27 ++++++ 6 files changed, 139 insertions(+), 21 deletions(-) create mode 100644 src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigTests.java diff --git a/src/main/java/org/elasticsearch/http/HttpServer.java b/src/main/java/org/elasticsearch/http/HttpServer.java index a40fc96a29d..5199ea5e422 100644 --- a/src/main/java/org/elasticsearch/http/HttpServer.java +++ b/src/main/java/org/elasticsearch/http/HttpServer.java @@ -183,7 +183,7 @@ public class HttpServer extends AbstractLifecycleComponent { Path file = siteFile.resolve(sitePath); // return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist - if (!Files.exists(file) || Files.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath())) { + if (!Files.exists(file) || Files.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath().normalize())) { channel.sendResponse(new BytesRestResponse(NOT_FOUND)); return; } diff --git a/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginTests.java b/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginTests.java index 852c4471ab8..148f81391de 100644 --- a/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginTests.java +++ b/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginTests.java @@ -18,19 +18,18 @@ */ package org.elasticsearch.plugins; -import org.apache.http.impl.client.HttpClients; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.plugins.responseheader.TestResponseHeaderPlugin; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; -import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; -import org.elasticsearch.plugins.responseheader.TestResponseHeaderPlugin; import org.junit.Test; +import static org.elasticsearch.rest.RestStatus.OK; +import static org.elasticsearch.rest.RestStatus.UNAUTHORIZED; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; import static org.hamcrest.Matchers.equalTo; /** @@ -52,11 +51,11 @@ public class ResponseHeaderPluginTests extends ElasticsearchIntegrationTest { public void testThatSettingHeadersWorks() throws Exception { ensureGreen(); HttpResponse response = httpClient().method("GET").path("/_protected").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.UNAUTHORIZED.getStatus())); + assertThat(response, hasStatus(UNAUTHORIZED)); assertThat(response.getHeaders().get("Secret"), equalTo("required")); HttpResponse authResponse = httpClient().method("GET").path("/_protected").addHeader("Secret", "password").execute(); - assertThat(authResponse.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(authResponse, hasStatus(OK)); assertThat(authResponse.getHeaders().get("Secret"), equalTo("granted")); } diff --git a/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigTests.java b/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigTests.java new file mode 100644 index 00000000000..83755d7980c --- /dev/null +++ b/src/test/java/org/elasticsearch/plugins/SitePluginRelativePathConfigTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.plugins; + +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.test.ElasticsearchIntegrationTest; +import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; +import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; +import org.elasticsearch.test.rest.client.http.HttpResponse; +import org.junit.Test; + +import java.nio.file.Path; + +import static org.apache.lucene.util.Constants.WINDOWS; +import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; +import static org.elasticsearch.rest.RestStatus.OK; +import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope.SUITE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; + +@ClusterScope(scope = SUITE, numDataNodes = 1) +public class SitePluginRelativePathConfigTests extends ElasticsearchIntegrationTest { + + private final Path root = PathUtils.get(".").toAbsolutePath().getRoot(); + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + String cwdToRoot = getRelativePath(PathUtils.get(".").toAbsolutePath()); + Path pluginDir = PathUtils.get(cwdToRoot, relativizeToRootIfNecessary(getDataPath("/org/elasticsearch/plugins")).toString()); + + Path tempDir = createTempDir(); + boolean useRelativeInMiddleOfPath = randomBoolean(); + if (useRelativeInMiddleOfPath) { + pluginDir = PathUtils.get(tempDir.toString(), getRelativePath(tempDir), pluginDir.toString()); + } + + return settingsBuilder() + .put(super.nodeSettings(nodeOrdinal)) + .put("path.plugins", pluginDir) + .put("force.http.enabled", true) + .build(); + } + + @Test + public void testThatRelativePathsDontAffectPlugins() throws Exception { + HttpResponse response = httpClient().method("GET").path("/_plugin/dummy/").execute(); + assertThat(response, hasStatus(OK)); + } + + private Path relativizeToRootIfNecessary(Path path) { + if (WINDOWS) { + return root.relativize(path); + } + return path; + } + + private String getRelativePath(Path path) { + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < path.getNameCount(); i++) { + sb.append(".."); + sb.append(path.getFileSystem().getSeparator()); + } + + return sb.toString(); + } + + public HttpRequestBuilder httpClient() { + CloseableHttpClient httpClient = HttpClients.createDefault(); + return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class)); + } +} diff --git a/src/test/java/org/elasticsearch/plugins/SitePluginTests.java b/src/test/java/org/elasticsearch/plugins/SitePluginTests.java index 8106c6f60dd..8df880a3aa3 100644 --- a/src/test/java/org/elasticsearch/plugins/SitePluginTests.java +++ b/src/test/java/org/elasticsearch/plugins/SitePluginTests.java @@ -21,27 +21,24 @@ package org.elasticsearch.plugins; import org.apache.http.client.config.RequestConfig; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.junit.Test; -import java.net.URISyntaxException; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Locale; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; +import static org.elasticsearch.rest.RestStatus.*; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; /** * We want to test site plugins @@ -70,12 +67,12 @@ public class SitePluginTests extends ElasticsearchIntegrationTest { public void testRedirectSitePlugin() throws Exception { // We use an HTTP Client to test redirection HttpResponse response = httpClient().method("GET").path("/_plugin/dummy").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.MOVED_PERMANENTLY.getStatus())); + assertThat(response, hasStatus(MOVED_PERMANENTLY)); assertThat(response.getBody(), containsString("/_plugin/dummy/")); // We test the real URL response = httpClient().method("GET").path("/_plugin/dummy/").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(response, hasStatus(OK)); assertThat(response.getBody(), containsString("Dummy Site Plugin")); } @@ -85,7 +82,7 @@ public class SitePluginTests extends ElasticsearchIntegrationTest { @Test public void testAnyPage() throws Exception { HttpResponse response = httpClient().path("/_plugin/dummy/index.html").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(response, hasStatus(OK)); assertThat(response.getBody(), containsString("Dummy Site Plugin")); } @@ -108,12 +105,12 @@ public class SitePluginTests extends ElasticsearchIntegrationTest { for (String uri : notFoundUris) { HttpResponse response = httpClient().path(uri).execute(); String message = String.format(Locale.ROOT, "URI [%s] expected to be not found", uri); - assertThat(message, response.getStatusCode(), equalTo(RestStatus.NOT_FOUND.getStatus())); + assertThat(message, response, hasStatus(NOT_FOUND)); } // using relative path inside of the plugin should work HttpResponse response = httpClient().path("/_plugin/dummy/dir1/../dir1/../index.html").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(response, hasStatus(OK)); assertThat(response.getBody(), containsString("Dummy Site Plugin")); } @@ -124,14 +121,14 @@ public class SitePluginTests extends ElasticsearchIntegrationTest { @Test public void testWelcomePageInSubDirs() throws Exception { HttpResponse response = httpClient().path("/_plugin/subdir/dir/").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(response, hasStatus(OK)); assertThat(response.getBody(), containsString("Dummy Site Plugin (subdir)")); response = httpClient().path("/_plugin/subdir/dir_without_index/").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.FORBIDDEN.getStatus())); + assertThat(response, hasStatus(FORBIDDEN)); response = httpClient().path("/_plugin/subdir/dir_without_index/page.html").execute(); - assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(response, hasStatus(OK)); assertThat(response.getBody(), containsString("Dummy Site Plugin (page)")); } } diff --git a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index ab59da837e4..201bc879c67 100644 --- a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -66,6 +66,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.test.rest.client.http.HttpResponse; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.junit.Assert; @@ -490,6 +491,10 @@ public class ElasticsearchAssertions { return new ElasticsearchMatchers.SearchHitHasScoreMatcher(score); } + public static Matcher hasStatus(RestStatus restStatus) { + return new ElasticsearchMatchers.HttpResponseHasStatusMatcher(restStatus); + } + public static T assertBooleanSubQuery(Query query, Class subqueryType, int i) { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; diff --git a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java index f49cc3bd39e..1853d291c6d 100644 --- a/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java +++ b/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchMatchers.java @@ -18,8 +18,11 @@ */ package org.elasticsearch.test.hamcrest; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.rest.client.http.HttpResponse; import org.hamcrest.Description; +import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; public class ElasticsearchMatchers { @@ -115,4 +118,28 @@ public class ElasticsearchMatchers { description.appendText("searchHit score should be ").appendValue(score); } } + + public static class HttpResponseHasStatusMatcher extends TypeSafeMatcher { + + private RestStatus restStatus; + + public HttpResponseHasStatusMatcher(RestStatus restStatus) { + this.restStatus = restStatus; + } + + @Override + protected boolean matchesSafely(HttpResponse response) { + return response.getStatusCode() == restStatus.getStatus(); + } + + @Override + public void describeMismatchSafely(final HttpResponse response, final Description mismatchDescription) { + mismatchDescription.appendText(" was ").appendValue(response.getStatusCode()); + } + + @Override + public void describeTo(final Description description) { + description.appendText("HTTP response status code should be ").appendValue(restStatus.getStatus()); + } + } }