Merge branch 'master' into feature/query-refactoring

This commit is contained in:
Christoph Büscher 2015-05-15 10:56:22 +02:00
commit d7884b662c
45 changed files with 764 additions and 348 deletions

Binary file not shown.

View File

@ -68,3 +68,6 @@ JAVA_OPTS="$JAVA_OPTS -XX:+DisableExplicitGC"
# Ensure UTF-8 encoding by default (e.g. filenames) # Ensure UTF-8 encoding by default (e.g. filenames)
JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8" JAVA_OPTS="$JAVA_OPTS -Dfile.encoding=UTF-8"
# Use our provided JNA always versus the system one
JAVA_OPTS="$JAVA_OPTS -Djna.nosys=true"

View File

@ -212,6 +212,7 @@ You can disable that check using `plugins.check_lucene: false`.
.Supported by the community .Supported by the community
* https://github.com/shikhar/eskka[eskka Discovery Plugin] (by Shikhar Bhushan) * https://github.com/shikhar/eskka[eskka Discovery Plugin] (by Shikhar Bhushan)
* https://github.com/grantr/elasticsearch-srv-discovery[DNS SRV Discovery Plugin] (by Grant Rodgers)
[float] [float]
[[river]] [[river]]

View File

@ -50,6 +50,8 @@ to. The `mode` option can have the following values:
number based array fields. number based array fields.
`avg`:: Use the average of all values as sort value. Only applicable `avg`:: Use the average of all values as sort value. Only applicable
for number based array fields. for number based array fields.
`median`:: Use the median of all values as sort value. Only applicable
for number based array fields.
===== Sort mode example usage ===== Sort mode example usage

View File

@ -163,20 +163,18 @@ can contain misspellings (See parameter descriptions below).
`collate`:: `collate`::
Checks each suggestion against the specified `query` or `filter` to Checks each suggestion against the specified `query` or `filter` to
prune suggestions for which no matching docs exist in the index. Either prune suggestions for which no matching docs exist in the index.
a `query` or a `filter` must be specified, and it is run as a The collate query for a suggestion is run only on the local shard from which
<<query-dsl-template-query,`template` query>>. The current suggestion is the suggestion has been generated from. Either a `query` or a `filter` must
automatically made available as the `{{suggestion}}` variable, which be specified, and it is run as a <<query-dsl-template-query,`template` query>>.
should be used in your query/filter. You can still specify your own The current suggestion is automatically made available as the `{{suggestion}}`
template `params` -- the `suggestion` value will be added to the variable, which should be used in your query/filter. You can still specify
variables you specify. You can specify a `preference` to control your own template `params` -- the `suggestion` value will be added to the
on which shards the query is executed (see <<search-request-preference>>). variables you specify. Additionally, you can specify a `prune` to control
The default value is `_only_local`. Additionally, you can specify if all phrase suggestions will be returned, when set to `true` the suggestions
a `prune` to control if all phrase suggestions will be will have an additional option `collate_match`, which will be `true` if
returned, when set to `true` the suggestions will have an additional matching documents for the phrase was found, `false` otherwise.
option `collate_match`, which will be `true` if matching documents The default value for `prune` is `false`.
for the phrase was found, `false` otherwise. The default value for
`prune` is `false`.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -199,8 +197,7 @@ curl -XPOST 'localhost:9200/_search' -d {
} }
}, },
"params": {"field_name" : "title"}, <3> "params": {"field_name" : "title"}, <3>
"preference": "_primary", <4> "prune": true <4>
"prune": true <5>
} }
} }
} }
@ -212,8 +209,7 @@ curl -XPOST 'localhost:9200/_search' -d {
of each suggestion. of each suggestion.
<3> An additional `field_name` variable has been specified in <3> An additional `field_name` variable has been specified in
`params` and is used by the `match` query. `params` and is used by the `match` query.
<4> The default `preference` has been changed to `_primary`. <4> All suggestions will be returned with an extra `collate_match`
<5> All suggestions will be returned with an extra `collate_match`
option indicating whether the generated phrase matched any option indicating whether the generated phrase matched any
document. document.

View File

@ -469,6 +469,7 @@
<argument>-XX:+HeapDumpOnOutOfMemoryError</argument> <argument>-XX:+HeapDumpOnOutOfMemoryError</argument>
<argument>-XX:+DisableExplicitGC</argument> <argument>-XX:+DisableExplicitGC</argument>
<argument>-Dfile.encoding=UTF-8</argument> <argument>-Dfile.encoding=UTF-8</argument>
<argument>-Djna.nosys=true</argument>
<argument>-Delasticsearch</argument> <argument>-Delasticsearch</argument>
</arguments> </arguments>
</configuration> </configuration>
@ -616,6 +617,8 @@
<tests.timeoutSuite>${tests.timeoutSuite}</tests.timeoutSuite> <tests.timeoutSuite>${tests.timeoutSuite}</tests.timeoutSuite>
<tests.showSuccess>${tests.showSuccess}</tests.showSuccess> <tests.showSuccess>${tests.showSuccess}</tests.showSuccess>
<tests.integration>${tests.integration}</tests.integration> <tests.integration>${tests.integration}</tests.integration>
<tests.thirdparty>${tests.thirdparty}</tests.thirdparty>
<tests.config>${tests.config}</tests.config>
<tests.client.ratio>${tests.client.ratio}</tests.client.ratio> <tests.client.ratio>${tests.client.ratio}</tests.client.ratio>
<tests.enable_mock_modules>${tests.enable_mock_modules}</tests.enable_mock_modules> <tests.enable_mock_modules>${tests.enable_mock_modules}</tests.enable_mock_modules>
<tests.assertion.disabled>${tests.assertion.disabled}</tests.assertion.disabled> <tests.assertion.disabled>${tests.assertion.disabled}</tests.assertion.disabled>

View File

@ -130,27 +130,26 @@ public class TransportSuggestAction extends TransportBroadcastOperationAction<Su
protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) { protected ShardSuggestResponse shardOperation(ShardSuggestRequest request) {
IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex());
IndexShard indexShard = indexService.shardSafe(request.shardId().id()); IndexShard indexShard = indexService.shardSafe(request.shardId().id());
final Engine.Searcher searcher = indexShard.acquireSearcher("suggest");
ShardSuggestService shardSuggestService = indexShard.shardSuggestService(); ShardSuggestService shardSuggestService = indexShard.shardSuggestService();
shardSuggestService.preSuggest(); shardSuggestService.preSuggest();
long startTime = System.nanoTime(); long startTime = System.nanoTime();
XContentParser parser = null; XContentParser parser = null;
try { try (Engine.Searcher searcher = indexShard.acquireSearcher("suggest")) {
BytesReference suggest = request.suggest(); BytesReference suggest = request.suggest();
if (suggest != null && suggest.length() > 0) { if (suggest != null && suggest.length() > 0) {
parser = XContentFactory.xContent(suggest).createParser(suggest); parser = XContentFactory.xContent(suggest).createParser(suggest);
if (parser.nextToken() != XContentParser.Token.START_OBJECT) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("suggest content missing"); throw new IllegalArgumentException("suggest content missing");
} }
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(), request.shardId().getIndex(), request.shardId().id()); final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
final Suggest result = suggestPhase.execute(context, searcher.reader()); indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id());
final Suggest result = suggestPhase.execute(context, searcher.searcher());
return new ShardSuggestResponse(request.shardId(), result); return new ShardSuggestResponse(request.shardId(), result);
} }
return new ShardSuggestResponse(request.shardId(), new Suggest()); return new ShardSuggestResponse(request.shardId(), new Suggest());
} catch (Throwable ex) { } catch (Throwable ex) {
throw new ElasticsearchException("failed to execute suggest", ex); throw new ElasticsearchException("failed to execute suggest", ex);
} finally { } finally {
searcher.close();
if (parser != null) { if (parser != null) {
parser.close(); parser.close();
} }

View File

@ -85,16 +85,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
final int dash = autoExpandReplicas.indexOf('-'); final int dash = autoExpandReplicas.indexOf('-');
if (-1 == dash) { if (-1 == dash) {
logger.warn("Unexpected value [{}] for setting [{}]; it should be dash delimited", logger.warn("failed to set [{}] for index [{}], it should be dash delimited [{}]",
autoExpandReplicas, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS); IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), autoExpandReplicas);
continue; continue;
} }
final String sMin = autoExpandReplicas.substring(0, dash); final String sMin = autoExpandReplicas.substring(0, dash);
try { try {
min = Integer.parseInt(sMin); min = Integer.parseInt(sMin);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
logger.warn("failed to set [{}], minimum value is not a number [{}]", logger.warn("failed to set [{}] for index [{}], minimum value is not a number [{}]",
e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, sMin); e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), sMin);
continue; continue;
} }
String sMax = autoExpandReplicas.substring(dash + 1); String sMax = autoExpandReplicas.substring(dash + 1);
@ -104,8 +104,8 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements
try { try {
max = Integer.parseInt(sMax); max = Integer.parseInt(sMax);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
logger.warn("failed to set [{}], maximum value is neither [{}] nor a number [{}]", logger.warn("failed to set [{}] for index [{}], maximum value is neither [{}] nor a number [{}]",
e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, ALL_NODES_VALUE, sMax); e, IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, indexMetaData.index(), ALL_NODES_VALUE, sMax);
continue; continue;
} }
} }

View File

@ -183,7 +183,7 @@ public class HttpServer extends AbstractLifecycleComponent<HttpServer> {
Path file = siteFile.resolve(sitePath); Path file = siteFile.resolve(sitePath);
// return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist // return not found instead of forbidden to prevent malicious requests to find out if files exist or dont exist
if (!Files.exists(file) || Files.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath())) { if (!Files.exists(file) || Files.isHidden(file) || !file.toAbsolutePath().normalize().startsWith(siteFile.toAbsolutePath().normalize())) {
channel.sendResponse(new BytesRestResponse(NOT_FOUND)); channel.sendResponse(new BytesRestResponse(NOT_FOUND));
return; return;
} }

View File

@ -369,6 +369,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
payload = payload == null ? EMPTY : payload; payload = payload == null ? EMPTY : payload;
if (surfaceForm == null) { // no surface form use the input if (surfaceForm == null) { // no surface form use the input
for (String input : inputs) { for (String input : inputs) {
if (input.length() == 0) {
continue;
}
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
input), weight, payload); input), weight, payload);
context.doc().add(getCompletionField(ctx, input, suggestPayload)); context.doc().add(getCompletionField(ctx, input, suggestPayload));
@ -377,6 +380,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef( BytesRef suggestPayload = analyzingSuggestLookupProvider.buildPayload(new BytesRef(
surfaceForm), weight, payload); surfaceForm), weight, payload);
for (String input : inputs) { for (String input : inputs) {
if (input.length() == 0) {
continue;
}
context.doc().add(getCompletionField(ctx, input, suggestPayload)); context.doc().add(getCompletionField(ctx, input, suggestPayload));
} }
} }

View File

@ -54,6 +54,7 @@ import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper; import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -150,7 +151,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue(); Object fieldNode = entry.getValue();
if (fieldName.equals("enabled") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode)); builder.enabled(nodeBooleanValue(fieldNode));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { } else if (fieldName.equals("compress") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
@ -172,7 +173,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
} else if ("format".equals(fieldName)) { } else if ("format".equals(fieldName)) {
builder.format(nodeStringValue(fieldNode, null)); builder.format(nodeStringValue(fieldNode, null));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("includes") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { } else if (fieldName.equals("includes")) {
List<Object> values = (List<Object>) fieldNode; List<Object> values = (List<Object>) fieldNode;
String[] includes = new String[values.size()]; String[] includes = new String[values.size()];
for (int i = 0; i < includes.length; i++) { for (int i = 0; i < includes.length; i++) {
@ -180,7 +181,7 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
} }
builder.includes(includes); builder.includes(includes);
iterator.remove(); iterator.remove();
} else if (fieldName.equals("excludes") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { } else if (fieldName.equals("excludes")) {
List<Object> values = (List<Object>) fieldNode; List<Object> values = (List<Object>) fieldNode;
String[] excludes = new String[values.size()]; String[] excludes = new String[values.size()];
for (int i = 0; i < excludes.length; i++) { for (int i = 0; i < excludes.length; i++) {
@ -197,11 +198,14 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
private final boolean enabled; private final boolean enabled;
/** indicates whether the source will always exist and be complete, for use by features like the update API */
private final boolean complete;
private Boolean compress; private Boolean compress;
private long compressThreshold; private long compressThreshold;
private String[] includes; private final String[] includes;
private String[] excludes; private final String[] excludes;
private String format; private String format;
@ -222,10 +226,11 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
this.excludes = excludes; this.excludes = excludes;
this.format = format; this.format = format;
this.formatContentType = format == null ? null : XContentType.fromRestContentType(format); this.formatContentType = format == null ? null : XContentType.fromRestContentType(format);
this.complete = enabled && includes == null && excludes == null;
} }
public boolean enabled() { public boolean enabled() {
return this.enabled; return enabled;
} }
public String[] excludes() { public String[] excludes() {
@ -237,6 +242,10 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
return this.includes != null ? this.includes : Strings.EMPTY_ARRAY; return this.includes != null ? this.includes : Strings.EMPTY_ARRAY;
} }
public boolean isComplete() {
return complete;
}
@Override @Override
public FieldType defaultFieldType() { public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE; return Defaults.FIELD_TYPE;
@ -420,19 +429,23 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements Ro
@Override @Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith; SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
if (!mergeResult.simulate()) { if (mergeResult.simulate()) {
if (this.enabled != sourceMergeWith.enabled) {
mergeResult.addConflict("Cannot update enabled setting for [_source]");
}
if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) {
mergeResult.addConflict("Cannot update includes setting for [_source]");
}
if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) {
mergeResult.addConflict("Cannot update excludes setting for [_source]");
}
} else {
if (sourceMergeWith.compress != null) { if (sourceMergeWith.compress != null) {
this.compress = sourceMergeWith.compress; this.compress = sourceMergeWith.compress;
} }
if (sourceMergeWith.compressThreshold != -1) { if (sourceMergeWith.compressThreshold != -1) {
this.compressThreshold = sourceMergeWith.compressThreshold; this.compressThreshold = sourceMergeWith.compressThreshold;
} }
if (sourceMergeWith.includes != null) {
this.includes = sourceMergeWith.includes;
}
if (sourceMergeWith.excludes != null) {
this.excludes = sourceMergeWith.excludes;
}
} }
} }
} }

View File

@ -0,0 +1,44 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.expression;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
/**
* FunctionValues to get the count of the number of values in a field for a document.
*/
public class CountMethodFunctionValues extends DoubleDocValues {
SortedNumericDoubleValues values;
CountMethodFunctionValues(ValueSource parent, AtomicNumericFieldData fieldData) {
super(parent);
values = fieldData.getDoubleValues();
}
@Override
public double doubleVal(int doc) {
values.setDocument(doc);
return values.count();
}
}

View File

@ -0,0 +1,73 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.expression;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
/**
* A ValueSource to create FunctionValues to get the count of the number of values in a field for a document.
*/
public class CountMethodValueSource extends ValueSource {
protected IndexFieldData<?> fieldData;
protected CountMethodValueSource(IndexFieldData<?> fieldData) {
Objects.requireNonNull(fieldData);
this.fieldData = fieldData;
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
AtomicFieldData leafData = fieldData.load(leaf);
assert(leafData instanceof AtomicNumericFieldData);
return new CountMethodFunctionValues(this, (AtomicNumericFieldData)leafData);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldDataValueSource that = (FieldDataValueSource) o;
return fieldData.equals(that.fieldData);
}
@Override
public int hashCode() {
return fieldData.hashCode();
}
@Override
public String description() {
return "count: field(" + fieldData.getFieldNames().toString() + ")";
}
}

View File

@ -25,13 +25,14 @@ import java.util.TimeZone;
import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.ValueSource;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.search.MultiValueMode;
class DateMethodFunctionValues extends FieldDataFunctionValues { class DateMethodFunctionValues extends FieldDataFunctionValues {
private final int calendarType; private final int calendarType;
private final Calendar calendar; private final Calendar calendar;
DateMethodFunctionValues(ValueSource parent, AtomicNumericFieldData data, int calendarType) { DateMethodFunctionValues(ValueSource parent, MultiValueMode multiValueMode, AtomicNumericFieldData data, int calendarType) {
super(parent, data); super(parent, multiValueMode, data);
this.calendarType = calendarType; this.calendarType = calendarType;
calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);

View File

@ -29,14 +29,15 @@ import org.apache.lucene.queries.function.FunctionValues;
import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.MultiValueMode;
class DateMethodValueSource extends FieldDataValueSource { class DateMethodValueSource extends FieldDataValueSource {
protected final String methodName; protected final String methodName;
protected final int calendarType; protected final int calendarType;
DateMethodValueSource(IndexFieldData<?> indexFieldData, String methodName, int calendarType) { DateMethodValueSource(IndexFieldData<?> indexFieldData, MultiValueMode multiValueMode, String methodName, int calendarType) {
super(indexFieldData); super(indexFieldData, multiValueMode);
Objects.requireNonNull(methodName); Objects.requireNonNull(methodName);
@ -44,6 +45,19 @@ class DateMethodValueSource extends FieldDataValueSource {
this.calendarType = calendarType; this.calendarType = calendarType;
} }
@Override
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
AtomicFieldData leafData = fieldData.load(leaf);
assert(leafData instanceof AtomicNumericFieldData);
return new DateMethodFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData, calendarType);
}
@Override
public String description() {
return methodName + ": field(" + fieldData.getFieldNames().toString() + ")";
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
@ -64,17 +78,4 @@ class DateMethodValueSource extends FieldDataValueSource {
result = 31 * result + calendarType; result = 31 * result + calendarType;
return result; return result;
} }
@Override
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
AtomicFieldData leafData = fieldData.load(leaf);
assert(leafData instanceof AtomicNumericFieldData);
return new DateMethodFunctionValues(this, (AtomicNumericFieldData)leafData, calendarType);
}
@Override
public String description() {
return methodName + ": field(" + fieldData.getFieldNames().toString() + ")";
}
} }

View File

@ -39,6 +39,7 @@ import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptEngineService; import org.elasticsearch.script.ScriptEngineService;
import org.elasticsearch.script.SearchScript; import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.text.ParseException; import java.text.ParseException;
@ -60,6 +61,13 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
protected static final String GET_MINUTES_METHOD = "getMinutes"; protected static final String GET_MINUTES_METHOD = "getMinutes";
protected static final String GET_SECONDS_METHOD = "getSeconds"; protected static final String GET_SECONDS_METHOD = "getSeconds";
protected static final String MINIMUM_METHOD = "min";
protected static final String MAXIMUM_METHOD = "max";
protected static final String AVERAGE_METHOD = "avg";
protected static final String MEDIAN_METHOD = "median";
protected static final String SUM_METHOD = "sum";
protected static final String COUNT_METHOD = "count";
@Inject @Inject
public ExpressionScriptEngineService(Settings settings) { public ExpressionScriptEngineService(Settings settings) {
super(settings); super(settings);
@ -156,7 +164,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper)field); IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField((NumberFieldMapper)field);
if (methodname == null) { if (methodname == null) {
bindings.add(variable, new FieldDataValueSource(fieldData)); bindings.add(variable, new FieldDataValueSource(fieldData, MultiValueMode.MIN));
} else { } else {
bindings.add(variable, getMethodValueSource(field, fieldData, fieldname, methodname)); bindings.add(variable, getMethodValueSource(field, fieldData, fieldname, methodname));
} }
@ -180,6 +188,18 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MINUTE); return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.MINUTE);
case GET_SECONDS_METHOD: case GET_SECONDS_METHOD:
return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.SECOND); return getDateMethodValueSource(field, fieldData, fieldName, methodName, Calendar.SECOND);
case MINIMUM_METHOD:
return new FieldDataValueSource(fieldData, MultiValueMode.MIN);
case MAXIMUM_METHOD:
return new FieldDataValueSource(fieldData, MultiValueMode.MAX);
case AVERAGE_METHOD:
return new FieldDataValueSource(fieldData, MultiValueMode.AVG);
case MEDIAN_METHOD:
return new FieldDataValueSource(fieldData, MultiValueMode.MEDIAN);
case SUM_METHOD:
return new FieldDataValueSource(fieldData, MultiValueMode.SUM);
case COUNT_METHOD:
return new CountMethodValueSource(fieldData);
default: default:
throw new IllegalArgumentException("Member method [" + methodName + "] does not exist."); throw new IllegalArgumentException("Member method [" + methodName + "] does not exist.");
} }
@ -190,7 +210,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "]."); throw new IllegalArgumentException("Member method [" + methodName + "] can only be used with a date field type, not the field [" + fieldName + "].");
} }
return new DateMethodValueSource(fieldData, methodName, calendarType); return new DateMethodValueSource(fieldData, MultiValueMode.MIN, methodName, calendarType);
} }
@Override @Override

View File

@ -31,9 +31,9 @@ import org.elasticsearch.search.MultiValueMode;
class FieldDataFunctionValues extends DoubleDocValues { class FieldDataFunctionValues extends DoubleDocValues {
NumericDoubleValues dataAccessor; NumericDoubleValues dataAccessor;
FieldDataFunctionValues(ValueSource parent, AtomicNumericFieldData d) { FieldDataFunctionValues(ValueSource parent, MultiValueMode m, AtomicNumericFieldData d) {
super(parent); super(parent);
dataAccessor = MultiValueMode.MIN.select(d.getDoubleValues(), 0d); dataAccessor = m.select(d.getDoubleValues(), 0d);
} }
@Override @Override

View File

@ -25,6 +25,7 @@ import org.apache.lucene.queries.function.ValueSource;
import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
@ -36,18 +37,14 @@ import java.util.Objects;
class FieldDataValueSource extends ValueSource { class FieldDataValueSource extends ValueSource {
protected IndexFieldData<?> fieldData; protected IndexFieldData<?> fieldData;
protected MultiValueMode multiValueMode;
protected FieldDataValueSource(IndexFieldData<?> d) { protected FieldDataValueSource(IndexFieldData<?> d, MultiValueMode m) {
Objects.requireNonNull(d); Objects.requireNonNull(d);
Objects.requireNonNull(m);
fieldData = d; fieldData = d;
} multiValueMode = m;
@Override
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
AtomicFieldData leafData = fieldData.load(leaf);
assert(leafData instanceof AtomicNumericFieldData);
return new FieldDataFunctionValues(this, (AtomicNumericFieldData)leafData);
} }
@Override @Override
@ -57,12 +54,23 @@ class FieldDataValueSource extends ValueSource {
FieldDataValueSource that = (FieldDataValueSource) o; FieldDataValueSource that = (FieldDataValueSource) o;
return fieldData.equals(that.fieldData); if (!fieldData.equals(that.fieldData)) return false;
return multiValueMode == that.multiValueMode;
} }
@Override @Override
public int hashCode() { public int hashCode() {
return fieldData.hashCode(); int result = fieldData.hashCode();
result = 31 * result + multiValueMode.hashCode();
return result;
}
@Override
public FunctionValues getValues(Map context, LeafReaderContext leaf) throws IOException {
AtomicFieldData leafData = fieldData.load(leaf);
assert(leafData instanceof AtomicNumericFieldData);
return new FieldDataFunctionValues(this, multiValueMode, (AtomicNumericFieldData)leafData);
} }
@Override @Override

View File

@ -105,6 +105,46 @@ public enum MultiValueMode {
} }
}, },
/**
* Median of the values.
*
* Note that apply/reduce do not work with MED since median cannot be derived from
* an accumulator algorithm without using internal memory.
*/
MEDIAN {
@Override
protected long pick(SortedNumericDocValues values, long missingValue) {
int count = values.count();
if (count > 0) {
if (count % 2 == 0) {
count /= 2;
return (values.valueAt(count - 1) + values.valueAt(count))/2;
} else {
count /= 2;
return values.valueAt(count);
}
} else {
return missingValue;
}
}
@Override
protected double pick(SortedNumericDoubleValues values, double missingValue) {
int count = values.count();
if (count > 0) {
if (count % 2 == 0) {
count /= 2;
return (values.valueAt(count - 1) + values.valueAt(count))/2;
} else {
count /= 2;
return values.valueAt(count);
}
} else {
return missingValue;
}
}
},
/** /**
* Pick the lowest value. * Pick the lowest value.
*/ */
@ -288,7 +328,9 @@ public enum MultiValueMode {
* @param b another argument * @param b another argument
* @return the result of the function. * @return the result of the function.
*/ */
public abstract double apply(double a, double b); public double apply(double a, double b) {
throw new UnsupportedOperationException();
}
/** /**
* Applies the sort mode and returns the result. This method is meant to be * Applies the sort mode and returns the result. This method is meant to be
@ -302,7 +344,9 @@ public enum MultiValueMode {
* @param b another argument * @param b another argument
* @return the result of the function. * @return the result of the function.
*/ */
public abstract long apply(long a, long b); public long apply(long a, long b) {
throw new UnsupportedOperationException();
}
public int applyOrd(int ord1, int ord2) { public int applyOrd(int ord1, int ord2) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -22,8 +22,9 @@ import java.io.IOException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
public interface SuggestContextParser { public interface SuggestContextParser {
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException; public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException;
} }

View File

@ -22,6 +22,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext;
@ -44,11 +45,11 @@ public final class SuggestParseElement implements SearchParseElement {
@Override @Override
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws Exception {
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.shardTarget().index(), context.shardTarget().shardId()); SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.queryParserService(), context.shardTarget().index(), context.shardTarget().shardId());
context.suggest(suggestionSearchContext); context.suggest(suggestionSearchContext);
} }
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, String index, int shardId) throws IOException { public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService, String index, int shardId) throws IOException {
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext(); SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
BytesRef globalText = null; BytesRef globalText = null;
String fieldName = null; String fieldName = null;
@ -86,7 +87,7 @@ public final class SuggestParseElement implements SearchParseElement {
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported"); throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
} }
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser(); final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
suggestionContext = contextParser.parse(parser, mapperService); suggestionContext = contextParser.parse(parser, mapperService, queryParserService);
} }
} }
if (suggestionContext != null) { if (suggestionContext != null) {

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.suggest; package org.elasticsearch.search.suggest;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
@ -71,10 +71,10 @@ public class SuggestPhase extends AbstractComponent implements SearchPhase {
if (suggest == null) { if (suggest == null) {
return; return;
} }
context.queryResult().suggest(execute(suggest, context.searcher().getIndexReader())); context.queryResult().suggest(execute(suggest, context.searcher()));
} }
public Suggest execute(SuggestionSearchContext suggest, IndexReader reader) { public Suggest execute(SuggestionSearchContext suggest, IndexSearcher searcher) {
try { try {
CharsRefBuilder spare = new CharsRefBuilder(); CharsRefBuilder spare = new CharsRefBuilder();
final List<Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>(suggest.suggestions().size()); final List<Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>(suggest.suggestions().size());
@ -82,7 +82,7 @@ public class SuggestPhase extends AbstractComponent implements SearchPhase {
for (Map.Entry<String, SuggestionSearchContext.SuggestionContext> entry : suggest.suggestions().entrySet()) { for (Map.Entry<String, SuggestionSearchContext.SuggestionContext> entry : suggest.suggestions().entrySet()) {
SuggestionSearchContext.SuggestionContext suggestion = entry.getValue(); SuggestionSearchContext.SuggestionContext suggestion = entry.getValue();
Suggester<SuggestionContext> suggester = suggestion.getSuggester(); Suggester<SuggestionContext> suggester = suggestion.getSuggester();
Suggestion<? extends Entry<? extends Option>> result = suggester.execute(entry.getKey(), suggestion, reader, spare); Suggestion<? extends Entry<? extends Option>> result = suggester.execute(entry.getKey(), suggestion, searcher, spare);
if (result != null) { if (result != null) {
assert entry.getKey().equals(result.name); assert entry.getKey().equals(result.name);
suggestions.add(result); suggestions.add(result);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.search.suggest; package org.elasticsearch.search.suggest;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import java.io.IOException; import java.io.IOException;
@ -27,19 +27,20 @@ import java.io.IOException;
public abstract class Suggester<T extends SuggestionSearchContext.SuggestionContext> { public abstract class Suggester<T extends SuggestionSearchContext.SuggestionContext> {
protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> protected abstract Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
innerExecute(String name, T suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException; innerExecute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException;
public abstract String[] names(); public abstract String[] names();
public abstract SuggestContextParser getContextParser(); public abstract SuggestContextParser getContextParser();
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>
execute(String name, T suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException { execute(String name, T suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
// #3469 We want to ignore empty shards // #3469 We want to ignore empty shards
if (indexReader.numDocs() == 0) {
if (searcher.getIndexReader().numDocs() == 0) {
return null; return null;
} }
return innerExecute(name, suggestion, indexReader, spare); return innerExecute(name, suggestion, searcher, spare);
} }
} }

View File

@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.CompletionFieldMapper; import org.elasticsearch.index.mapper.core.CompletionFieldMapper;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestionSearchContext; import org.elasticsearch.search.suggest.SuggestionSearchContext;
import org.elasticsearch.search.suggest.context.ContextMapping.ContextQuery; import org.elasticsearch.search.suggest.context.ContextMapping.ContextQuery;
@ -48,7 +49,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
} }
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;
CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester); CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester);

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
@ -48,11 +49,11 @@ public class CompletionSuggester extends Suggester<CompletionSuggestionContext>
@Override @Override
protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name, protected Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name,
CompletionSuggestionContext suggestionContext, IndexReader indexReader, CharsRefBuilder spare) throws IOException { CompletionSuggestionContext suggestionContext, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
if (suggestionContext.mapper() == null || !(suggestionContext.mapper() instanceof CompletionFieldMapper)) { if (suggestionContext.mapper() == null || !(suggestionContext.mapper() instanceof CompletionFieldMapper)) {
throw new ElasticsearchException("Field [" + suggestionContext.getField() + "] is not a completion suggest field"); throw new ElasticsearchException("Field [" + suggestionContext.getField() + "] is not a completion suggest field");
} }
final IndexReader indexReader = searcher.getIndexReader();
CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize()); CompletionSuggestion completionSuggestion = new CompletionSuggestion(name, suggestionContext.getSize());
spare.copyUTF8Bytes(suggestionContext.getText()); spare.copyUTF8Bytes(suggestionContext.getText());

View File

@ -60,7 +60,7 @@ public final class NoisyChannelSpellChecker {
} }
public Result getCorrections(TokenStream stream, final CandidateGenerator generator, public Result getCorrections(TokenStream stream, final CandidateGenerator generator,
float maxErrors, int numCorrections, IndexReader reader, WordScorer wordScorer, BytesRef separator, float confidence, int gramSize) throws IOException { float maxErrors, int numCorrections, WordScorer wordScorer, float confidence, int gramSize) throws IOException {
final List<CandidateSet> candidateSetsList = new ArrayList<>(); final List<CandidateSet> candidateSetsList = new ArrayList<>();
SuggestUtils.analyze(stream, new SuggestUtils.TokenConsumer() { SuggestUtils.analyze(stream, new SuggestUtils.TokenConsumer() {
@ -134,7 +134,7 @@ public final class NoisyChannelSpellChecker {
public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerator generator, public Result getCorrections(Analyzer analyzer, BytesRef query, CandidateGenerator generator,
float maxErrors, int numCorrections, IndexReader reader, String analysisField, WordScorer scorer, float confidence, int gramSize) throws IOException { float maxErrors, int numCorrections, IndexReader reader, String analysisField, WordScorer scorer, float confidence, int gramSize) throws IOException {
return getCorrections(tokenStream(analyzer, query, new CharsRefBuilder(), analysisField), generator, maxErrors, numCorrections, reader, scorer, new BytesRef(" "), confidence, gramSize); return getCorrections(tokenStream(analyzer, query, new CharsRefBuilder(), analysisField), generator, maxErrors, numCorrections, scorer, confidence, gramSize);
} }

View File

@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory;
import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
@ -49,8 +50,9 @@ public final class PhraseSuggestParser implements SuggestContextParser {
} }
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester); PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
suggestion.setQueryParserService(queryParserService);
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;
boolean gramSizeSet = false; boolean gramSizeSet = false;
@ -159,8 +161,6 @@ public final class PhraseSuggestParser implements SuggestContextParser {
} else { } else {
suggestion.setCollateFilterScript(compiledScript); suggestion.setCollateFilterScript(compiledScript);
} }
} else if ("preference".equals(fieldName)) {
suggestion.setPreference(parser.text());
} else if ("params".equals(fieldName)) { } else if ("params".equals(fieldName)) {
suggestion.setCollateScriptParams(parser.map()); suggestion.setCollateScriptParams(parser.map());
} else if ("prune".equals(fieldName)) { } else if ("prune".equals(fieldName)) {

View File

@ -23,22 +23,19 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.DirectSpellChecker;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
@ -58,12 +55,10 @@ import java.util.Map;
public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> { public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
private final BytesRef SEPARATOR = new BytesRef(" "); private final BytesRef SEPARATOR = new BytesRef(" ");
private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion"; private static final String SUGGESTION_TEMPLATE_VAR_NAME = "suggestion";
private final Client client;
private final ScriptService scriptService; private final ScriptService scriptService;
@Inject @Inject
public PhraseSuggester(Client client, ScriptService scriptService) { public PhraseSuggester(ScriptService scriptService) {
this.client = client;
this.scriptService = scriptService; this.scriptService = scriptService;
} }
@ -76,11 +71,11 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
* - phonetic filters could be interesting here too for candidate selection * - phonetic filters could be interesting here too for candidate selection
*/ */
@Override @Override
public Suggestion<? extends Entry<? extends Option>> innerExecute(String name, PhraseSuggestionContext suggestion, public Suggestion<? extends Entry<? extends Option>> innerExecute(String name, PhraseSuggestionContext suggestion, IndexSearcher searcher,
IndexReader indexReader, CharsRefBuilder spare) throws IOException { CharsRefBuilder spare) throws IOException {
double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood(); double realWordErrorLikelihood = suggestion.realworldErrorLikelyhood();
final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize()); final PhraseSuggestion response = new PhraseSuggestion(name, suggestion.getSize());
final IndexReader indexReader = searcher.getIndexReader();
List<PhraseSuggestionContext.DirectCandidateGenerator> generators = suggestion.generators(); List<PhraseSuggestionContext.DirectCandidateGenerator> generators = suggestion.generators();
final int numGenerators = generators.size(); final int numGenerators = generators.size();
final List<CandidateGenerator> gens = new ArrayList<>(generators.size()); final List<CandidateGenerator> gens = new ArrayList<>(generators.size());
@ -103,31 +98,52 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator); WordScorer wordScorer = suggestion.model().newScorer(indexReader, suggestTerms, suggestField, realWordErrorLikelihood, separator);
Result checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(), Result checkerResult = checker.getCorrections(stream, new MultiCandidateGeneratorWrapper(suggestion.getShardSize(),
gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(), gens.toArray(new CandidateGenerator[gens.size()])), suggestion.maxErrors(),
suggestion.getShardSize(), indexReader,wordScorer , separator, suggestion.confidence(), suggestion.gramSize()); suggestion.getShardSize(), wordScorer, suggestion.confidence(), suggestion.gramSize());
PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore); PhraseSuggestion.Entry resultEntry = buildResultEntry(suggestion, spare, checkerResult.cutoffScore);
response.addTerm(resultEntry); response.addTerm(resultEntry);
BytesRefBuilder byteSpare = new BytesRefBuilder(); final BytesRefBuilder byteSpare = new BytesRefBuilder();
final EarlyTerminatingCollector collector = Lucene.createExistsCollector();
MultiSearchResponse multiSearchResponse = collate(suggestion, checkerResult, byteSpare, spare); final CompiledScript collateScript;
final boolean collateEnabled = multiSearchResponse != null; if (suggestion.getCollateQueryScript() != null) {
final boolean collatePrune = suggestion.collatePrune(); collateScript = suggestion.getCollateQueryScript();
} else if (suggestion.getCollateFilterScript() != null) {
collateScript = suggestion.getCollateFilterScript();
} else {
collateScript = null;
}
final boolean collatePrune = (collateScript != null) && suggestion.collatePrune();
for (int i = 0; i < checkerResult.corrections.length; i++) { for (int i = 0; i < checkerResult.corrections.length; i++) {
boolean collateMatch = hasMatchingDocs(multiSearchResponse, i); Correction correction = checkerResult.corrections[i];
spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null));
boolean collateMatch = true;
if (collateScript != null) {
// Checks if the template query collateScript yields any documents
// from the index for a correction, collateMatch is updated
final Map<String, Object> vars = suggestion.getCollateScriptParams();
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
final ExecutableScript executable = scriptService.executable(collateScript, vars);
final BytesReference querySource = (BytesReference) executable.run();
final ParsedQuery parsedQuery;
if (suggestion.getCollateFilterScript() != null) {
parsedQuery = suggestion.getQueryParserService().parse(
QueryBuilders.constantScoreQuery(QueryBuilders.wrapperQuery(querySource)));
} else {
parsedQuery = suggestion.getQueryParserService().parse(querySource);
}
collateMatch = Lucene.exists(searcher, parsedQuery.query(), collector);
}
if (!collateMatch && !collatePrune) { if (!collateMatch && !collatePrune) {
continue; continue;
} }
Correction correction = checkerResult.corrections[i];
spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null));
Text phrase = new StringText(spare.toString()); Text phrase = new StringText(spare.toString());
Text highlighted = null; Text highlighted = null;
if (suggestion.getPreTag() != null) { if (suggestion.getPreTag() != null) {
spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, suggestion.getPreTag(), suggestion.getPostTag())); spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, suggestion.getPreTag(), suggestion.getPostTag()));
highlighted = new StringText(spare.toString()); highlighted = new StringText(spare.toString());
} }
if (collateEnabled && collatePrune) { if (collatePrune) {
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch)); resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score), collateMatch));
} else { } else {
resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score))); resultEntry.addOption(new Suggestion.Entry.Option(phrase, highlighted, (float) (correction.score)));
@ -144,67 +160,6 @@ public final class PhraseSuggester extends Suggester<PhraseSuggestionContext> {
return new PhraseSuggestion.Entry(new StringText(spare.toString()), 0, spare.length(), cutoffScore); return new PhraseSuggestion.Entry(new StringText(spare.toString()), 0, spare.length(), cutoffScore);
} }
private MultiSearchResponse collate(PhraseSuggestionContext suggestion, Result checkerResult, BytesRefBuilder byteSpare, CharsRefBuilder spare) throws IOException {
CompiledScript collateQueryScript = suggestion.getCollateQueryScript();
CompiledScript collateFilterScript = suggestion.getCollateFilterScript();
MultiSearchResponse multiSearchResponse = null;
if (collateQueryScript != null) {
multiSearchResponse = fetchMatchingDocCountResponses(checkerResult.corrections, collateQueryScript, false, suggestion, byteSpare, spare);
} else if (collateFilterScript != null) {
multiSearchResponse = fetchMatchingDocCountResponses(checkerResult.corrections, collateFilterScript, true, suggestion, byteSpare, spare);
}
return multiSearchResponse;
}
private MultiSearchResponse fetchMatchingDocCountResponses(Correction[] corrections, CompiledScript collateScript,
boolean isFilter, PhraseSuggestionContext suggestions,
BytesRefBuilder byteSpare, CharsRefBuilder spare) throws IOException {
Map<String, Object> vars = suggestions.getCollateScriptParams();
MultiSearchResponse multiSearchResponse = null;
MultiSearchRequestBuilder multiSearchRequestBuilder = client.prepareMultiSearch();
boolean requestAdded = false;
SearchRequestBuilder req;
for (Correction correction : corrections) {
spare.copyUTF8Bytes(correction.join(SEPARATOR, byteSpare, null, null));
vars.put(SUGGESTION_TEMPLATE_VAR_NAME, spare.toString());
ExecutableScript executable = scriptService.executable(collateScript, vars);
BytesReference querySource = (BytesReference) executable.run();
requestAdded = true;
if (isFilter) {
req = client.prepareSearch()
.setPreference(suggestions.getPreference())
.setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.wrapperQuery(querySource)))
.setSize(0)
.setTerminateAfter(1);
} else {
req = client.prepareSearch()
.setPreference(suggestions.getPreference())
.setQuery(querySource)
.setSize(0)
.setTerminateAfter(1);
}
multiSearchRequestBuilder.add(req);
}
if (requestAdded) {
multiSearchResponse = multiSearchRequestBuilder.get();
}
return multiSearchResponse;
}
private static boolean hasMatchingDocs(MultiSearchResponse multiSearchResponse, int index) {
if (multiSearchResponse == null) {
return true;
}
MultiSearchResponse.Item item = multiSearchResponse.getResponses()[index];
if (!item.isFailure()) {
SearchResponse resp = item.getResponse();
return resp.getHits().totalHits() > 0;
} else {
throw new ElasticsearchException("Collate request failed: " + item.getFailureMessage());
}
}
ScriptService scriptService() { ScriptService scriptService() {
return scriptService; return scriptService;
} }

View File

@ -25,7 +25,7 @@ import java.util.Map;
import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggester;
@ -33,7 +33,7 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContex
class PhraseSuggestionContext extends SuggestionContext { class PhraseSuggestionContext extends SuggestionContext {
private final BytesRef SEPARATOR = new BytesRef(" "); private final BytesRef SEPARATOR = new BytesRef(" ");
private IndexQueryParserService queryParserService;
private float maxErrors = 0.5f; private float maxErrors = 0.5f;
private BytesRef separator = SEPARATOR; private BytesRef separator = SEPARATOR;
private float realworldErrorLikelihood = 0.95f; private float realworldErrorLikelihood = 0.95f;
@ -45,7 +45,6 @@ class PhraseSuggestionContext extends SuggestionContext {
private BytesRef postTag; private BytesRef postTag;
private CompiledScript collateQueryScript; private CompiledScript collateQueryScript;
private CompiledScript collateFilterScript; private CompiledScript collateFilterScript;
private String preference = Preference.ONLY_LOCAL.type();
private Map<String, Object> collateScriptParams = new HashMap<>(1); private Map<String, Object> collateScriptParams = new HashMap<>(1);
private WordScorer.WordScorerFactory scorer; private WordScorer.WordScorerFactory scorer;
@ -112,7 +111,15 @@ class PhraseSuggestionContext extends SuggestionContext {
public WordScorer.WordScorerFactory model() { public WordScorer.WordScorerFactory model() {
return scorer; return scorer;
} }
public void setQueryParserService(IndexQueryParserService queryParserService) {
this.queryParserService = queryParserService;
}
public IndexQueryParserService getQueryParserService() {
return queryParserService;
}
static class DirectCandidateGenerator extends DirectSpellcheckerSettings { static class DirectCandidateGenerator extends DirectSpellcheckerSettings {
private Analyzer preFilter; private Analyzer preFilter;
private Analyzer postFilter; private Analyzer postFilter;
@ -205,14 +212,6 @@ class PhraseSuggestionContext extends SuggestionContext {
this.collateFilterScript = collateFilterScript; this.collateFilterScript = collateFilterScript;
} }
String getPreference() {
return preference;
}
void setPreference(String preference) {
this.preference = preference;
}
Map<String, Object> getCollateScriptParams() { Map<String, Object> getCollateScriptParams() {
return collateScriptParams; return collateScriptParams;
} }

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.search.suggest.DirectSpellcheckerSettings; import org.elasticsearch.search.suggest.DirectSpellcheckerSettings;
import org.elasticsearch.search.suggest.SuggestContextParser; import org.elasticsearch.search.suggest.SuggestContextParser;
import org.elasticsearch.search.suggest.SuggestUtils; import org.elasticsearch.search.suggest.SuggestUtils;
@ -36,7 +37,7 @@ public final class TermSuggestParser implements SuggestContextParser {
} }
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
XContentParser.Token token; XContentParser.Token token;
String fieldName = null; String fieldName = null;
TermSuggestionContext suggestion = new TermSuggestionContext(suggester); TermSuggestionContext suggestion = new TermSuggestionContext(suggester);

View File

@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest.term;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.DirectSpellChecker;
import org.apache.lucene.search.spell.SuggestWord; import org.apache.lucene.search.spell.SuggestWord;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -41,9 +42,9 @@ import java.util.List;
public final class TermSuggester extends Suggester<TermSuggestionContext> { public final class TermSuggester extends Suggester<TermSuggestionContext> {
@Override @Override
public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException { public TermSuggestion innerExecute(String name, TermSuggestionContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(suggestion.getDirectSpellCheckerSettings()); DirectSpellChecker directSpellChecker = SuggestUtils.getDirectSpellChecker(suggestion.getDirectSpellCheckerSettings());
final IndexReader indexReader = searcher.getIndexReader();
TermSuggestion response = new TermSuggestion( TermSuggestion response = new TermSuggestion(
name, suggestion.getSize(), suggestion.getDirectSpellCheckerSettings().sort() name, suggestion.getSize(), suggestion.getDirectSpellCheckerSettings().sort()
); );

View File

@ -35,6 +35,8 @@ import org.elasticsearch.index.IndexService;
import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test; import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -42,7 +44,6 @@ import java.util.Map;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest { public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
Settings backcompatSettings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
public void testNoFormat() throws Exception { public void testNoFormat() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -80,8 +81,8 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
documentMapper = parser.parse(mapping); documentMapper = parser.parse(mapping);
doc = documentMapper.parse("type", "1", XContentFactory.smileBuilder().startObject() doc = documentMapper.parse("type", "1", XContentFactory.smileBuilder().startObject()
.field("field", "value") .field("field", "value")
.endObject().bytes()); .endObject().bytes());
assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON)); assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.JSON));
} }
@ -91,6 +92,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
.startObject("_source").field("format", "json").field("compress", true).endObject() .startObject("_source").field("format", "json").field("compress", true).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
Settings backcompatSettings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id).build();
DocumentMapperParser parser = createIndex("test", backcompatSettings).mapperService().documentMapperParser(); DocumentMapperParser parser = createIndex("test", backcompatSettings).mapperService().documentMapperParser();
DocumentMapper documentMapper = parser.parse(mapping); DocumentMapper documentMapper = parser.parse(mapping);
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
@ -111,19 +113,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
assertThat(XContentFactory.xContentType(uncompressed), equalTo(XContentType.JSON)); assertThat(XContentFactory.xContentType(uncompressed), equalTo(XContentType.JSON));
} }
public void testIncludesBackcompat() throws Exception { public void testIncludes() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("includes", new String[]{"path1*"}).endObject() .startObject("_source").field("includes", new String[]{"path1*"}).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
try { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("testbad").mapperService().documentMapperParser().parse(mapping);
fail("includes should not be allowed");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("unsupported parameters"));
}
DocumentMapper documentMapper = createIndex("test", backcompatSettings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject() .startObject("path1").field("field1", "value1").endObject()
@ -136,19 +131,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
assertThat(sourceAsMap.containsKey("path2"), equalTo(false)); assertThat(sourceAsMap.containsKey("path2"), equalTo(false));
} }
public void testExcludesBackcompat() throws Exception { public void testExcludes() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("excludes", new String[]{"path1*"}).endObject() .startObject("_source").field("excludes", new String[]{"path1*"}).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
try { DocumentMapper documentMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
createIndex("testbad").mapperService().documentMapperParser().parse(mapping);
fail("excludes should not be allowed");
} catch (MapperParsingException e) {
assertTrue(e.getMessage().contains("unsupported parameters"));
}
DocumentMapper documentMapper = createIndex("test", backcompatSettings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject() ParsedDocument doc = documentMapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
.startObject("path1").field("field1", "value1").endObject() .startObject("path1").field("field1", "value1").endObject()
@ -161,12 +149,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
assertThat(sourceAsMap.containsKey("path2"), equalTo(true)); assertThat(sourceAsMap.containsKey("path2"), equalTo(true));
} }
public void testDefaultMappingAndNoMappingBackcompat() throws Exception { public void testDefaultMappingAndNoMapping() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject() .startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", backcompatSettings).mapperService().documentMapperParser(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper mapper = parser.parse("my_type", null, defaultMapping); DocumentMapper mapper = parser.parse("my_type", null, defaultMapping);
assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(false)); assertThat(mapper.sourceMapper().enabled(), equalTo(false));
@ -189,7 +177,7 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
} }
} }
public void testDefaultMappingAndWithMappingOverrideBackcompat() throws Exception { public void testDefaultMappingAndWithMappingOverride() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject() .startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
@ -198,17 +186,17 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
.startObject("_source").field("enabled", true).endObject() .startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper mapper = createIndex("test", backcompatSettings).mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping); DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("my_type", mapping, defaultMapping);
assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(true)); assertThat(mapper.sourceMapper().enabled(), equalTo(true));
} }
public void testDefaultMappingAndNoMappingWithMapperServiceBackcompat() throws Exception { public void testDefaultMappingAndNoMappingWithMapperService() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject() .startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
MapperService mapperService = createIndex("test", backcompatSettings).mapperService(); MapperService mapperService = createIndex("test").mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true);
DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1(); DocumentMapper mapper = mapperService.documentMapperWithAutoCreate("my_type").v1();
@ -216,12 +204,12 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
assertThat(mapper.sourceMapper().enabled(), equalTo(false)); assertThat(mapper.sourceMapper().enabled(), equalTo(false));
} }
public void testDefaultMappingAndWithMappingOverrideWithMapperServiceBackcompat() throws Exception { public void testDefaultMappingAndWithMappingOverrideWithMapperService() throws Exception {
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING) String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject(MapperService.DEFAULT_MAPPING)
.startObject("_source").field("enabled", false).endObject() .startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject().string();
MapperService mapperService = createIndex("test", backcompatSettings).mapperService(); MapperService mapperService = createIndex("test").mapperService();
mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true); mapperService.merge(MapperService.DEFAULT_MAPPING, new CompressedString(defaultMapping), true);
String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("my_type")
@ -233,4 +221,90 @@ public class DefaultSourceMappingTests extends ElasticsearchSingleNodeTest {
assertThat(mapper.type(), equalTo("my_type")); assertThat(mapper.type(), equalTo("my_type"));
assertThat(mapper.sourceMapper().enabled(), equalTo(true)); assertThat(mapper.sourceMapper().enabled(), equalTo(true));
} }
void assertConflicts(String mapping1, String mapping2, DocumentMapperParser parser, String... conflicts) throws IOException {
DocumentMapper docMapper = parser.parse(mapping1);
docMapper.refreshSource();
docMapper = parser.parse(docMapper.mappingSource().string());
MergeResult mergeResult = docMapper.merge(parser.parse(mapping2).mapping(), true);
List<String> expectedConflicts = new ArrayList<>(Arrays.asList(conflicts));
for (String conflict : mergeResult.buildConflicts()) {
assertTrue("found unexpected conflict [" + conflict + "]", expectedConflicts.remove(conflict));
}
assertTrue("missing conflicts: " + Arrays.toString(expectedConflicts.toArray()), expectedConflicts.isEmpty());
}
public void testEnabledNotUpdateable() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
// using default of true
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping2, parser, "Cannot update enabled setting for [_source]");
// not changing is ok
String mapping3 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", true).endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping3, parser);
}
public void testIncludesNotUpdateable() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*").endObject()
.endObject().endObject().string();
assertConflicts(defaultMapping, mapping1, parser, "Cannot update includes setting for [_source]");
assertConflicts(mapping1, defaultMapping, parser, "Cannot update includes setting for [_source]");
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*", "bar.*").endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping2, parser, "Cannot update includes setting for [_source]");
// not changing is ok
assertConflicts(mapping1, mapping1, parser);
}
public void testExcludesNotUpdateable() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String defaultMapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*").endObject()
.endObject().endObject().string();
assertConflicts(defaultMapping, mapping1, parser, "Cannot update excludes setting for [_source]");
assertConflicts(mapping1, defaultMapping, parser, "Cannot update excludes setting for [_source]");
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*", "bar.*").endObject()
.endObject().endObject().string();
assertConflicts(mapping1, mapping2, parser, "Cannot update excludes setting for [_source]");
// not changing is ok
assertConflicts(mapping1, mapping1, parser);
}
public void testComplete() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
assertTrue(parser.parse(mapping).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").field("enabled", false).endObject()
.endObject().endObject().string();
assertFalse(parser.parse(mapping).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("includes", "foo.*").endObject()
.endObject().endObject().string();
assertFalse(parser.parse(mapping).sourceMapper().isComplete());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_source").array("excludes", "foo.*").endObject()
.endObject().endObject().string();
assertFalse(parser.parse(mapping).sourceMapper().isComplete());
}
} }

View File

@ -212,102 +212,6 @@ public class UpdateMappingIntegrationTests extends ElasticsearchIntegrationTest
assertThat(putMappingResponse.isAcknowledged(), equalTo(true)); assertThat(putMappingResponse.isAcknowledged(), equalTo(true));
} }
@SuppressWarnings("unchecked")
@Test
public void updateIncludeExcludeBackcompat() throws Exception {
assertAcked(prepareCreate("test").setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.addMapping("type", jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("normal").field("type", "long").endObject()
.startObject("exclude").field("type", "long").endObject()
.startObject("include").field("type", "long").endObject()
.endObject().endObject().endObject()));
ensureGreen(); // make sure that replicas are initialized so the refresh command will work them too
logger.info("Index doc");
index("test", "type", "1", JsonXContent.contentBuilder().startObject()
.field("normal", 1).field("exclude", 1).field("include", 1)
.endObject()
);
refresh(); // commit it for later testing.
logger.info("Adding exclude settings");
PutMappingResponse putResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(
JsonXContent.contentBuilder().startObject().startObject("type")
.startObject("_source")
.startArray("excludes").value("exclude").endArray()
.endObject().endObject()
).get();
assertTrue(putResponse.isAcknowledged());
// changed mapping doesn't affect indexed documents (checking backward compatibility)
GetResponse getResponse = client().prepareGet("test", "type", "1").setRealtime(false).get();
assertThat(getResponse.getSource(), hasKey("normal"));
assertThat(getResponse.getSource(), hasKey("exclude"));
assertThat(getResponse.getSource(), hasKey("include"));
logger.info("Index doc again");
index("test", "type", "1", JsonXContent.contentBuilder().startObject()
.field("normal", 2).field("exclude", 1).field("include", 2)
.endObject()
);
// but do affect newly indexed docs
getResponse = get("test", "type", "1");
assertThat(getResponse.getSource(), hasKey("normal"));
assertThat(getResponse.getSource(), not(hasKey("exclude")));
assertThat(getResponse.getSource(), hasKey("include"));
logger.info("Changing mapping to includes");
putResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(
JsonXContent.contentBuilder().startObject().startObject("type")
.startObject("_source")
.startArray("excludes").endArray()
.startArray("includes").value("include").endArray()
.endObject().endObject()
).get();
assertTrue(putResponse.isAcknowledged());
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
MappingMetaData typeMapping = getMappingsResponse.getMappings().get("test").get("type");
assertThat((Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("includes"));
ArrayList<String> includes = (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("includes");
assertThat(includes, contains("include"));
assertThat((Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("excludes"));
assertThat((ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("excludes"), emptyIterable());
logger.info("Indexing doc yet again");
index("test", "type", "1", JsonXContent.contentBuilder().startObject()
.field("normal", 3).field("exclude", 3).field("include", 3)
.endObject()
);
getResponse = get("test", "type", "1");
assertThat(getResponse.getSource(), not(hasKey("normal")));
assertThat(getResponse.getSource(), not(hasKey("exclude")));
assertThat(getResponse.getSource(), hasKey("include"));
logger.info("Adding excludes, but keep includes");
putResponse = client().admin().indices().preparePutMapping("test").setType("type").setSource(
JsonXContent.contentBuilder().startObject().startObject("type")
.startObject("_source")
.startArray("excludes").value("*.excludes").endArray()
.endObject().endObject()
).get();
assertTrue(putResponse.isAcknowledged());
getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
typeMapping = getMappingsResponse.getMappings().get("test").get("type");
assertThat((Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("includes"));
includes = (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("includes");
assertThat(includes, contains("include"));
assertThat((Map<String, Object>) typeMapping.getSourceAsMap().get("_source"), hasKey("excludes"));
ArrayList<String> excludes = (ArrayList<String>) ((Map<String, Object>) typeMapping.getSourceAsMap().get("_source")).get("excludes");
assertThat(excludes, contains("*.excludes"));
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Test @Test
public void updateDefaultMappingSettings() throws Exception { public void updateDefaultMappingSettings() throws Exception {

View File

@ -18,19 +18,18 @@
*/ */
package org.elasticsearch.plugins; package org.elasticsearch.plugins;
import org.apache.http.impl.client.HttpClients;
import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.plugins.responseheader.TestResponseHeaderPlugin;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
import org.elasticsearch.test.rest.client.http.HttpResponse; import org.elasticsearch.test.rest.client.http.HttpResponse;
import org.elasticsearch.plugins.responseheader.TestResponseHeaderPlugin;
import org.junit.Test; import org.junit.Test;
import static org.elasticsearch.rest.RestStatus.OK;
import static org.elasticsearch.rest.RestStatus.UNAUTHORIZED;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
/** /**
@ -52,11 +51,11 @@ public class ResponseHeaderPluginTests extends ElasticsearchIntegrationTest {
public void testThatSettingHeadersWorks() throws Exception { public void testThatSettingHeadersWorks() throws Exception {
ensureGreen(); ensureGreen();
HttpResponse response = httpClient().method("GET").path("/_protected").execute(); HttpResponse response = httpClient().method("GET").path("/_protected").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.UNAUTHORIZED.getStatus())); assertThat(response, hasStatus(UNAUTHORIZED));
assertThat(response.getHeaders().get("Secret"), equalTo("required")); assertThat(response.getHeaders().get("Secret"), equalTo("required"));
HttpResponse authResponse = httpClient().method("GET").path("/_protected").addHeader("Secret", "password").execute(); HttpResponse authResponse = httpClient().method("GET").path("/_protected").addHeader("Secret", "password").execute();
assertThat(authResponse.getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(authResponse, hasStatus(OK));
assertThat(authResponse.getHeaders().get("Secret"), equalTo("granted")); assertThat(authResponse.getHeaders().get("Secret"), equalTo("granted"));
} }

View File

@ -0,0 +1,90 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.plugins;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
import org.elasticsearch.test.rest.client.http.HttpResponse;
import org.junit.Test;
import java.nio.file.Path;
import static org.apache.lucene.util.Constants.WINDOWS;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.rest.RestStatus.OK;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope.SUITE;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
@ClusterScope(scope = SUITE, numDataNodes = 1)
public class SitePluginRelativePathConfigTests extends ElasticsearchIntegrationTest {
private final Path root = PathUtils.get(".").toAbsolutePath().getRoot();
@Override
protected Settings nodeSettings(int nodeOrdinal) {
String cwdToRoot = getRelativePath(PathUtils.get(".").toAbsolutePath());
Path pluginDir = PathUtils.get(cwdToRoot, relativizeToRootIfNecessary(getDataPath("/org/elasticsearch/plugins")).toString());
Path tempDir = createTempDir();
boolean useRelativeInMiddleOfPath = randomBoolean();
if (useRelativeInMiddleOfPath) {
pluginDir = PathUtils.get(tempDir.toString(), getRelativePath(tempDir), pluginDir.toString());
}
return settingsBuilder()
.put(super.nodeSettings(nodeOrdinal))
.put("path.plugins", pluginDir)
.put("force.http.enabled", true)
.build();
}
@Test
public void testThatRelativePathsDontAffectPlugins() throws Exception {
HttpResponse response = httpClient().method("GET").path("/_plugin/dummy/").execute();
assertThat(response, hasStatus(OK));
}
private Path relativizeToRootIfNecessary(Path path) {
if (WINDOWS) {
return root.relativize(path);
}
return path;
}
private String getRelativePath(Path path) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < path.getNameCount(); i++) {
sb.append("..");
sb.append(path.getFileSystem().getSeparator());
}
return sb.toString();
}
public HttpRequestBuilder httpClient() {
CloseableHttpClient httpClient = HttpClients.createDefault();
return new HttpRequestBuilder(httpClient).httpTransport(internalCluster().getDataNodeInstance(HttpServerTransport.class));
}
}

View File

@ -21,27 +21,24 @@ package org.elasticsearch.plugins;
import org.apache.http.client.config.RequestConfig; import org.apache.http.client.config.RequestConfig;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClients;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope; import org.elasticsearch.test.ElasticsearchIntegrationTest.ClusterScope;
import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
import org.elasticsearch.test.rest.client.http.HttpResponse; import org.elasticsearch.test.rest.client.http.HttpResponse;
import org.junit.Test; import org.junit.Test;
import java.net.URISyntaxException;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder; import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.rest.RestStatus.*;
import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope; import static org.elasticsearch.test.ElasticsearchIntegrationTest.Scope;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
/** /**
* We want to test site plugins * We want to test site plugins
@ -70,12 +67,12 @@ public class SitePluginTests extends ElasticsearchIntegrationTest {
public void testRedirectSitePlugin() throws Exception { public void testRedirectSitePlugin() throws Exception {
// We use an HTTP Client to test redirection // We use an HTTP Client to test redirection
HttpResponse response = httpClient().method("GET").path("/_plugin/dummy").execute(); HttpResponse response = httpClient().method("GET").path("/_plugin/dummy").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.MOVED_PERMANENTLY.getStatus())); assertThat(response, hasStatus(MOVED_PERMANENTLY));
assertThat(response.getBody(), containsString("/_plugin/dummy/")); assertThat(response.getBody(), containsString("/_plugin/dummy/"));
// We test the real URL // We test the real URL
response = httpClient().method("GET").path("/_plugin/dummy/").execute(); response = httpClient().method("GET").path("/_plugin/dummy/").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response, hasStatus(OK));
assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>")); assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>"));
} }
@ -85,7 +82,7 @@ public class SitePluginTests extends ElasticsearchIntegrationTest {
@Test @Test
public void testAnyPage() throws Exception { public void testAnyPage() throws Exception {
HttpResponse response = httpClient().path("/_plugin/dummy/index.html").execute(); HttpResponse response = httpClient().path("/_plugin/dummy/index.html").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response, hasStatus(OK));
assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>")); assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>"));
} }
@ -108,12 +105,12 @@ public class SitePluginTests extends ElasticsearchIntegrationTest {
for (String uri : notFoundUris) { for (String uri : notFoundUris) {
HttpResponse response = httpClient().path(uri).execute(); HttpResponse response = httpClient().path(uri).execute();
String message = String.format(Locale.ROOT, "URI [%s] expected to be not found", uri); String message = String.format(Locale.ROOT, "URI [%s] expected to be not found", uri);
assertThat(message, response.getStatusCode(), equalTo(RestStatus.NOT_FOUND.getStatus())); assertThat(message, response, hasStatus(NOT_FOUND));
} }
// using relative path inside of the plugin should work // using relative path inside of the plugin should work
HttpResponse response = httpClient().path("/_plugin/dummy/dir1/../dir1/../index.html").execute(); HttpResponse response = httpClient().path("/_plugin/dummy/dir1/../dir1/../index.html").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response, hasStatus(OK));
assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>")); assertThat(response.getBody(), containsString("<title>Dummy Site Plugin</title>"));
} }
@ -124,14 +121,14 @@ public class SitePluginTests extends ElasticsearchIntegrationTest {
@Test @Test
public void testWelcomePageInSubDirs() throws Exception { public void testWelcomePageInSubDirs() throws Exception {
HttpResponse response = httpClient().path("/_plugin/subdir/dir/").execute(); HttpResponse response = httpClient().path("/_plugin/subdir/dir/").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response, hasStatus(OK));
assertThat(response.getBody(), containsString("<title>Dummy Site Plugin (subdir)</title>")); assertThat(response.getBody(), containsString("<title>Dummy Site Plugin (subdir)</title>"));
response = httpClient().path("/_plugin/subdir/dir_without_index/").execute(); response = httpClient().path("/_plugin/subdir/dir_without_index/").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.FORBIDDEN.getStatus())); assertThat(response, hasStatus(FORBIDDEN));
response = httpClient().path("/_plugin/subdir/dir_without_index/page.html").execute(); response = httpClient().path("/_plugin/subdir/dir_without_index/page.html").execute();
assertThat(response.getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response, hasStatus(OK));
assertThat(response.getBody(), containsString("<title>Dummy Site Plugin (page)</title>")); assertThat(response.getBody(), containsString("<title>Dummy Site Plugin (page)</title>"));
} }
} }

View File

@ -125,6 +125,72 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
assertEquals(1983.0, hits.getAt(1).field("foo").getValue()); assertEquals(1983.0, hits.getAt(1).field("foo").getValue());
} }
public void testMultiValueMethods() throws Exception {
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double"));
ensureGreen("test");
indexRandom(true,
client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4"),
client().prepareIndex("test", "doc", "2").setSource("double0", "5.0", "double1", "3.0"),
client().prepareIndex("test", "doc", "3").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double0", "-1.5", "double1", "4.0"));
SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get();
assertSearchResponse(rsp);
SearchHits hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(5.0, hits.getAt(0).field("foo").getValue());
assertEquals(2.0, hits.getAt(1).field("foo").getValue());
assertEquals(5.0, hits.getAt(2).field("foo").getValue());
rsp = buildRequest("doc['double0'].sum()").get();
assertSearchResponse(rsp);
hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(7.5, hits.getAt(0).field("foo").getValue());
assertEquals(5.0, hits.getAt(1).field("foo").getValue());
assertEquals(6.0, hits.getAt(2).field("foo").getValue());
rsp = buildRequest("doc['double0'].avg() + doc['double1'].avg()").get();
assertSearchResponse(rsp);
hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(4.3, hits.getAt(0).field("foo").getValue());
assertEquals(8.0, hits.getAt(1).field("foo").getValue());
assertEquals(5.5, hits.getAt(2).field("foo").getValue());
rsp = buildRequest("doc['double0'].median()").get();
assertSearchResponse(rsp);
hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(1.5, hits.getAt(0).field("foo").getValue());
assertEquals(5.0, hits.getAt(1).field("foo").getValue());
assertEquals(1.25, hits.getAt(2).field("foo").getValue());
rsp = buildRequest("doc['double0'].min()").get();
assertSearchResponse(rsp);
hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(1.0, hits.getAt(0).field("foo").getValue());
assertEquals(5.0, hits.getAt(1).field("foo").getValue());
assertEquals(-1.5, hits.getAt(2).field("foo").getValue());
rsp = buildRequest("doc['double0'].max()").get();
assertSearchResponse(rsp);
hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(5.0, hits.getAt(0).field("foo").getValue());
assertEquals(5.0, hits.getAt(1).field("foo").getValue());
assertEquals(5.0, hits.getAt(2).field("foo").getValue());
rsp = buildRequest("doc['double0'].sum()/doc['double0'].count()").get();
assertSearchResponse(rsp);
hits = rsp.getHits();
assertEquals(3, hits.getTotalHits());
assertEquals(2.5, hits.getAt(0).field("foo").getValue());
assertEquals(5.0, hits.getAt(1).field("foo").getValue());
assertEquals(1.5, hits.getAt(2).field("foo").getValue());
}
public void testInvalidDateMethodCall() throws Exception { public void testInvalidDateMethodCall() throws Exception {
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double")); ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double"));
ensureGreen("test"); ensureGreen("test");

View File

@ -32,6 +32,8 @@ import org.elasticsearch.test.ElasticsearchTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class MultiValueModeTests extends ElasticsearchTestCase { public class MultiValueModeTests extends ElasticsearchTestCase {
@ -122,6 +124,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase {
private void verify(SortedNumericDocValues values, int maxDoc) { private void verify(SortedNumericDocValues values, int maxDoc) {
for (long missingValue : new long[] { 0, randomLong() }) { for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : MultiValueMode.values()) { for (MultiValueMode mode : MultiValueMode.values()) {
if (MultiValueMode.MEDIAN.equals(mode)) {
continue;
}
final NumericDocValues selected = mode.select(values, missingValue); final NumericDocValues selected = mode.select(values, missingValue);
for (int i = 0; i < maxDoc; ++i) { for (int i = 0; i < maxDoc; ++i) {
final long actual = selected.get(i); final long actual = selected.get(i);
@ -147,6 +152,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase {
private void verify(SortedNumericDocValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { private void verify(SortedNumericDocValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) { for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : MultiValueMode.values()) { for (MultiValueMode mode : MultiValueMode.values()) {
if (MultiValueMode.MEDIAN.equals(mode)) {
continue;
}
final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitDocIdSet(innerDocs), maxDoc); final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitDocIdSet(innerDocs), maxDoc);
int prevRoot = -1; int prevRoot = -1;
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
@ -239,6 +247,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase {
private void verify(SortedNumericDoubleValues values, int maxDoc) { private void verify(SortedNumericDoubleValues values, int maxDoc) {
for (long missingValue : new long[] { 0, randomLong() }) { for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : MultiValueMode.values()) { for (MultiValueMode mode : MultiValueMode.values()) {
if (MultiValueMode.MEDIAN.equals(mode)) {
continue;
}
final NumericDoubleValues selected = mode.select(values, missingValue); final NumericDoubleValues selected = mode.select(values, missingValue);
for (int i = 0; i < maxDoc; ++i) { for (int i = 0; i < maxDoc; ++i) {
final double actual = selected.get(i); final double actual = selected.get(i);
@ -264,6 +275,9 @@ public class MultiValueModeTests extends ElasticsearchTestCase {
private void verify(SortedNumericDoubleValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { private void verify(SortedNumericDoubleValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) { for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : MultiValueMode.values()) { for (MultiValueMode mode : MultiValueMode.values()) {
if (MultiValueMode.MEDIAN.equals(mode)) {
continue;
}
final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitDocIdSet(innerDocs), maxDoc); final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitDocIdSet(innerDocs), maxDoc);
int prevRoot = -1; int prevRoot = -1;
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {

View File

@ -562,10 +562,9 @@ public class HighlighterSearchTests extends ElasticsearchIntegrationTest {
} }
@Test @Test
public void testForceSourceWithSourceDisabledBackcompat() throws Exception { public void testForceSourceWithSourceDisabled() throws Exception {
assertAcked(prepareCreate("test") assertAcked(prepareCreate("test")
.setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_1_4_2.id)
.addMapping("type1", jsonBuilder().startObject().startObject("type1") .addMapping("type1", jsonBuilder().startObject().startObject("type1")
.startObject("_source").field("enabled", false).endObject() .startObject("_source").field("enabled", false).endObject()
.startObject("properties") .startObject("properties")

View File

@ -435,6 +435,34 @@ public class ContextSuggestSearchTests extends ElasticsearchIntegrationTest {
} }
@Test // see issue #10987
public void testEmptySuggestion() throws Exception {
String mapping = jsonBuilder()
.startObject()
.startObject(TYPE)
.startObject("properties")
.startObject(FIELD)
.field("type", "completion")
.startObject("context")
.startObject("type_context")
.field("path", "_type")
.field("type", "category")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.string();
assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, mapping).get());
ensureGreen();
client().prepareIndex(INDEX, TYPE, "1").setSource(FIELD, "")
.setRefresh(true).get();
}
@Test @Test
public void testMultiValueField() throws Exception { public void testMultiValueField() throws Exception {
assertAcked(prepareCreate(INDEX).addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category")))); assertAcked(prepareCreate(INDEX).addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category"))));

View File

@ -18,11 +18,12 @@
*/ */
package org.elasticsearch.search.suggest; package org.elasticsearch.search.suggest;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.StringText;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import java.io.IOException; import java.io.IOException;
import java.util.Locale; import java.util.Locale;
@ -36,7 +37,7 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
// This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123
@Override @Override
public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name, CustomSuggestionsContext suggestion, IndexReader indexReader, CharsRefBuilder spare) throws IOException { public Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>> innerExecute(String name, CustomSuggestionsContext suggestion, IndexSearcher searcher, CharsRefBuilder spare) throws IOException {
// Get the suggestion context // Get the suggestion context
String text = suggestion.getText().utf8ToString(); String text = suggestion.getText().utf8ToString();
@ -63,7 +64,7 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
public SuggestContextParser getContextParser() { public SuggestContextParser getContextParser() {
return new SuggestContextParser() { return new SuggestContextParser() {
@Override @Override
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService) throws IOException { public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
Map<String, Object> options = parser.map(); Map<String, Object> options = parser.map();
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options); CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
suggestionContext.setField((String) options.get("field")); suggestionContext.setField((String) options.get("field"));

View File

@ -1247,12 +1247,29 @@ public class SuggestSearchTests extends ElasticsearchIntegrationTest {
// expected // expected
} }
// collate request with prune set to true // collate query request with prune set to true
PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params).collatePrune(true); PhraseSuggestionBuilder phraseSuggestWithParamsAndReturn = suggest.collateFilter(null).collateQuery(collateWithParams).collateParams(params).collatePrune(true);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn); searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn);
assertSuggestionSize(searchSuggest, 0, 10, "title"); assertSuggestionSize(searchSuggest, 0, 10, "title");
assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2); assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2);
collateWithParams = XContentFactory.jsonBuilder()
.startObject()
.startObject("query")
.startObject("{{query_type}}")
.field("{{query_field}}", "{{suggestion}}")
.endObject()
.endObject()
.endObject().string();
params.clear();
params.put("query_type", "match_phrase");
params.put("query_field", "title");
// collate filter request with prune set to true
phraseSuggestWithParamsAndReturn = suggest.collateFilter(collateWithParams).collateQuery(null).collateParams(params).collatePrune(true);
searchSuggest = searchSuggest("united states house of representatives elections in washington 2006", phraseSuggestWithParamsAndReturn);
assertSuggestionSize(searchSuggest, 0, 10, "title");
assertSuggestionPhraseCollateMatchExists(searchSuggest, "title", 2);
} }
protected Suggest searchSuggest(SuggestionBuilder<?>... suggestion) { protected Suggest searchSuggest(SuggestionBuilder<?>... suggestion) {

View File

@ -243,6 +243,26 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
public @interface Integration { public @interface Integration {
} }
/**
* Property that controls whether ThirdParty Integration tests are run (not the default).
*/
public static final String SYSPROP_THIRDPARTY = "tests.thirdparty";
/**
* Annotation for third-party integration tests.
* <p>
* These are tests the require a third-party service in order to run. They
* may require the user to manually configure an external process (such as rabbitmq),
* or may additionally require some external configuration (e.g. AWS credentials)
* via the {@code tests.config} system property.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = false, sysProperty = ElasticsearchIntegrationTest.SYSPROP_THIRDPARTY)
public @interface ThirdParty {
}
/** node names of the corresponding clusters will start with these prefixes */ /** node names of the corresponding clusters will start with these prefixes */
public static final String SUITE_CLUSTER_NODE_PREFIX = "node_s"; public static final String SUITE_CLUSTER_NODE_PREFIX = "node_s";
public static final String TEST_CLUSTER_NODE_PREFIX = "node_t"; public static final String TEST_CLUSTER_NODE_PREFIX = "node_t";

View File

@ -66,6 +66,7 @@ import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.test.rest.client.http.HttpResponse;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.junit.Assert; import org.junit.Assert;
@ -490,6 +491,10 @@ public class ElasticsearchAssertions {
return new ElasticsearchMatchers.SearchHitHasScoreMatcher(score); return new ElasticsearchMatchers.SearchHitHasScoreMatcher(score);
} }
public static Matcher<HttpResponse> hasStatus(RestStatus restStatus) {
return new ElasticsearchMatchers.HttpResponseHasStatusMatcher(restStatus);
}
public static <T extends Query> T assertBooleanSubQuery(Query query, Class<T> subqueryType, int i) { public static <T extends Query> T assertBooleanSubQuery(Query query, Class<T> subqueryType, int i) {
assertThat(query, instanceOf(BooleanQuery.class)); assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery q = (BooleanQuery) query; BooleanQuery q = (BooleanQuery) query;

View File

@ -18,8 +18,11 @@
*/ */
package org.elasticsearch.test.hamcrest; package org.elasticsearch.test.hamcrest;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.rest.client.http.HttpResponse;
import org.hamcrest.Description; import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher; import org.hamcrest.TypeSafeMatcher;
public class ElasticsearchMatchers { public class ElasticsearchMatchers {
@ -115,4 +118,28 @@ public class ElasticsearchMatchers {
description.appendText("searchHit score should be ").appendValue(score); description.appendText("searchHit score should be ").appendValue(score);
} }
} }
public static class HttpResponseHasStatusMatcher extends TypeSafeMatcher<HttpResponse> {
private RestStatus restStatus;
public HttpResponseHasStatusMatcher(RestStatus restStatus) {
this.restStatus = restStatus;
}
@Override
protected boolean matchesSafely(HttpResponse response) {
return response.getStatusCode() == restStatus.getStatus();
}
@Override
public void describeMismatchSafely(final HttpResponse response, final Description mismatchDescription) {
mismatchDescription.appendText(" was ").appendValue(response.getStatusCode());
}
@Override
public void describeTo(final Description description) {
description.appendText("HTTP response status code should be ").appendValue(restStatus.getStatus());
}
}
} }