Merge branch 'master' into feature/query-refactoring

This commit is contained in:
javanna 2015-06-19 10:23:48 +02:00 committed by Luca Cavanna
commit 7162c45ca7
50 changed files with 638 additions and 301 deletions

View File

@ -25,7 +25,6 @@ import org.elasticsearch.cluster.routing.SimpleHashFunction;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import com.google.common.collect.ImmutableSet;
@ -181,7 +180,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
"index.translog.flush_threshold_period",
"index.translog.interval",
"index.translog.sync_interval",
UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT);
UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING);
/**
* Elasticsearch 2.0 requires units on byte/memory and time settings; this method adds the default unit to any such settings that are

View File

@ -146,6 +146,7 @@ public class RoutingService extends AbstractLifecycleComponent<RoutingService> i
@Override
protected void doRun() throws Exception {
routingTableDirty = true;
registeredNextDelaySetting = Long.MAX_VALUE;
reroute();
}

View File

@ -41,8 +41,8 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime");
public static final String DELAYED_NODE_LEFT_TIMEOUT = "index.unassigned.node_left.delayed_timeout";
public static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMillis(0);
public static final String INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING = "index.unassigned.node_left.delayed_timeout";
private static final TimeValue DEFAULT_DELAYED_NODE_LEFT_TIMEOUT = TimeValue.timeValueMillis(0);
/**
* Reason why the shard is in unassigned state.
@ -155,7 +155,7 @@ public class UnassignedInfo implements ToXContent, Writeable<UnassignedInfo> {
if (reason != Reason.NODE_LEFT) {
return 0;
}
TimeValue delayTimeout = indexSettings.getAsTime(DELAYED_NODE_LEFT_TIMEOUT, settings.getAsTime(DELAYED_NODE_LEFT_TIMEOUT, DEFAULT_DELAYED_NODE_LEFT_TIMEOUT));
TimeValue delayTimeout = indexSettings.getAsTime(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, settings.getAsTime(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, DEFAULT_DELAYED_NODE_LEFT_TIMEOUT));
return Math.max(0l, delayTimeout.millis());
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import org.apache.lucene.document.Field;

View File

@ -304,12 +304,12 @@ public abstract class AbstractFieldMapper implements FieldMapper {
this.fieldType = fieldType.clone();
if (fieldType.indexAnalyzer() == null && fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE) {
this.fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
this.fieldType().setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
this.fieldType().setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
this.fieldType.setHasDocValues(docValues == null ? defaultDocValues() : docValues);
this.fieldType.setFieldDataType(fieldDataType);
this.fieldType.freeze();
this.fieldType().setHasDocValues(docValues == null ? defaultDocValues() : docValues);
this.fieldType().setFieldDataType(fieldDataType);
this.fieldType().freeze();
this.multiFields = multiFields;
this.copyTo = copyTo;
@ -319,14 +319,14 @@ public abstract class AbstractFieldMapper implements FieldMapper {
if (indexCreatedBefore2x) {
return false;
} else {
return fieldType.tokenized() == false && fieldType.indexOptions() != IndexOptions.NONE;
return fieldType().tokenized() == false && fieldType().indexOptions() != IndexOptions.NONE;
}
}
@Override
public String name() {
// TODO: cleanup names so Mapper knows about paths, so that it is always clear whether we are using short or full name
return fieldType.names().shortName();
return fieldType().names().shortName();
}
public abstract MappedFieldType defaultFieldType();
@ -350,12 +350,12 @@ public abstract class AbstractFieldMapper implements FieldMapper {
parseCreateField(context, fields);
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(fieldType.boost());
field.setBoost(fieldType().boost());
}
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e);
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
}
multiFields.parse(this, context);
return null;
@ -387,94 +387,94 @@ public abstract class AbstractFieldMapper implements FieldMapper {
if (mergeWith instanceof AbstractFieldMapper) {
mergedType = ((AbstractFieldMapper) mergeWith).contentType();
}
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] of different type, current_type [" + contentType() + "], merged_type [" + mergedType + "]");
// different types, return
return;
}
AbstractFieldMapper fieldMergeWith = (AbstractFieldMapper) mergeWith;
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
boolean mergeWithIndexed = fieldMergeWith.fieldType().indexOptions() != IndexOptions.NONE;
if (indexed != mergeWithIndexed || this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different index values");
}
if (this.fieldType().stored() != fieldMergeWith.fieldType().stored()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store values");
}
if (!this.fieldType().hasDocValues() && fieldMergeWith.fieldType().hasDocValues()) {
// don't add conflict if this mapper has doc values while the mapper to merge doesn't since doc values are implicitely set
// when the doc_values field data format is configured
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different " + TypeParsers.DOC_VALUES + " values");
}
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType.omitNorms()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] cannot enable norms (`norms.enabled`)");
if (this.fieldType().omitNorms() && !fieldMergeWith.fieldType().omitNorms()) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] cannot enable norms (`norms.enabled`)");
}
if (this.fieldType().tokenized() != fieldMergeWith.fieldType().tokenized()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tokenize values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different tokenize values");
}
if (this.fieldType().storeTermVectors() != fieldMergeWith.fieldType().storeTermVectors()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector values");
}
if (this.fieldType().storeTermVectorOffsets() != fieldMergeWith.fieldType().storeTermVectorOffsets()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_offsets values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector_offsets values");
}
if (this.fieldType().storeTermVectorPositions() != fieldMergeWith.fieldType().storeTermVectorPositions()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_positions values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector_positions values");
}
if (this.fieldType().storeTermVectorPayloads() != fieldMergeWith.fieldType().storeTermVectorPayloads()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different store_term_vector_payloads values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different store_term_vector_payloads values");
}
// null and "default"-named index analyzers both mean the default is used
if (this.fieldType.indexAnalyzer() == null || "default".equals(this.fieldType.indexAnalyzer().name())) {
if (fieldMergeWith.fieldType.indexAnalyzer() != null && "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer");
if (this.fieldType().indexAnalyzer() == null || "default".equals(this.fieldType().indexAnalyzer().name())) {
if (fieldMergeWith.fieldType().indexAnalyzer() != null && "default".equals(fieldMergeWith.fieldType().indexAnalyzer().name()) == false) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different analyzer");
}
} else if (fieldMergeWith.fieldType.indexAnalyzer() == null || "default".equals(fieldMergeWith.fieldType.indexAnalyzer().name())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer");
} else if (this.fieldType.indexAnalyzer().name().equals(fieldMergeWith.fieldType.indexAnalyzer().name()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different analyzer");
} else if (fieldMergeWith.fieldType().indexAnalyzer() == null || "default".equals(fieldMergeWith.fieldType().indexAnalyzer().name())) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different analyzer");
} else if (this.fieldType().indexAnalyzer().name().equals(fieldMergeWith.fieldType().indexAnalyzer().name()) == false) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different analyzer");
}
if (!this.fieldType().names().equals(fieldMergeWith.fieldType().names())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different index_name");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different index_name");
}
if (this.fieldType.similarity() == null) {
if (fieldMergeWith.fieldType.similarity() != null) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity");
if (this.fieldType().similarity() == null) {
if (fieldMergeWith.fieldType().similarity() != null) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different similarity");
}
} else if (fieldMergeWith.fieldType().similarity() == null) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different similarity");
} else if (!this.fieldType().similarity().equals(fieldMergeWith.fieldType().similarity())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different similarity");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different similarity");
}
multiFields.merge(mergeWith, mergeResult);
if (!mergeResult.simulate()) {
// apply changeable values
this.fieldType = this.fieldType.clone();
this.fieldType.setOmitNorms(fieldMergeWith.fieldType.omitNorms());
this.fieldType.setBoost(fieldMergeWith.fieldType.boost());
this.fieldType.setNormsLoading(fieldMergeWith.fieldType.normsLoading());
if (fieldMergeWith.fieldType.searchAnalyzer() != null) {
this.fieldType.setSearchAnalyzer(fieldMergeWith.fieldType.searchAnalyzer());
this.fieldType = this.fieldType().clone();
this.fieldType().setOmitNorms(fieldMergeWith.fieldType().omitNorms());
this.fieldType().setBoost(fieldMergeWith.fieldType().boost());
this.fieldType().setNormsLoading(fieldMergeWith.fieldType().normsLoading());
if (fieldMergeWith.fieldType().searchAnalyzer() != null) {
this.fieldType().setSearchAnalyzer(fieldMergeWith.fieldType().searchAnalyzer());
}
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
this.fieldType().setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
Settings.builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
));
}
}
this.fieldType.freeze();
this.fieldType().freeze();
this.copyTo = fieldMergeWith.copyTo;
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(fieldType.names().shortName());
builder.startObject(fieldType().names().shortName());
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
doXContentBody(builder, includeDefaults, params);
return builder.endObject();
@ -483,40 +483,40 @@ public abstract class AbstractFieldMapper implements FieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
builder.field("type", contentType());
if (indexCreatedBefore2x && (includeDefaults || !fieldType.names().shortName().equals(fieldType.names().originalIndexName()))) {
builder.field("index_name", fieldType.names().originalIndexName());
if (indexCreatedBefore2x && (includeDefaults || !fieldType().names().shortName().equals(fieldType().names().originalIndexName()))) {
builder.field("index_name", fieldType().names().originalIndexName());
}
if (includeDefaults || fieldType.boost() != 1.0f) {
builder.field("boost", fieldType.boost());
if (includeDefaults || fieldType().boost() != 1.0f) {
builder.field("boost", fieldType().boost());
}
FieldType defaultFieldType = defaultFieldType();
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE;
if (includeDefaults || indexed != defaultIndexed ||
fieldType.tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOptionToString(indexed, fieldType.tokenized()));
fieldType().tokenized() != defaultFieldType.tokenized()) {
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
}
if (includeDefaults || fieldType.stored() != defaultFieldType.stored()) {
builder.field("store", fieldType.stored());
if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) {
builder.field("store", fieldType().stored());
}
doXContentDocValues(builder, includeDefaults);
if (includeDefaults || fieldType.storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType));
if (includeDefaults || fieldType().storeTermVectors() != defaultFieldType.storeTermVectors()) {
builder.field("term_vector", termVectorOptionsToString(fieldType()));
}
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms() || fieldType.normsLoading() != null) {
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms() || fieldType().normsLoading() != null) {
builder.startObject("norms");
if (includeDefaults || fieldType.omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType.omitNorms());
if (includeDefaults || fieldType().omitNorms() != defaultFieldType.omitNorms()) {
builder.field("enabled", !fieldType().omitNorms());
}
if (fieldType.normsLoading() != null) {
builder.field(MappedFieldType.Loading.KEY, fieldType.normsLoading());
if (fieldType().normsLoading() != null) {
builder.field(MappedFieldType.Loading.KEY, fieldType().normsLoading());
}
builder.endObject();
}
if (indexed && (includeDefaults || fieldType.indexOptions() != defaultFieldType.indexOptions())) {
builder.field("index_options", indexOptionToString(fieldType.indexOptions()));
if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) {
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
}
doXContentAnalyzers(builder, includeDefaults);
@ -532,7 +532,7 @@ public abstract class AbstractFieldMapper implements FieldMapper {
orderedFielddataSettings.putAll(customFieldDataSettings.getAsMap());
builder.field("fielddata", orderedFielddataSettings);
} else if (includeDefaults) {
orderedFielddataSettings.putAll(fieldType.fieldDataType().getSettings().getAsMap());
orderedFielddataSettings.putAll(fieldType().fieldDataType().getSettings().getAsMap());
builder.field("fielddata", orderedFielddataSettings);
}
multiFields.toXContent(builder, params);
@ -543,14 +543,14 @@ public abstract class AbstractFieldMapper implements FieldMapper {
}
protected void doXContentAnalyzers(XContentBuilder builder, boolean includeDefaults) throws IOException {
if (fieldType.indexAnalyzer() == null) {
if (fieldType().indexAnalyzer() == null) {
if (includeDefaults) {
builder.field("analyzer", "default");
}
} else if (includeDefaults || fieldType.indexAnalyzer().name().startsWith("_") == false && fieldType.indexAnalyzer().name().equals("default") == false) {
builder.field("analyzer", fieldType.indexAnalyzer().name());
if (fieldType.searchAnalyzer().name().equals(fieldType.indexAnalyzer().name()) == false) {
builder.field("search_analyzer", fieldType.searchAnalyzer().name());
} else if (includeDefaults || fieldType().indexAnalyzer().name().startsWith("_") == false && fieldType().indexAnalyzer().name().equals("default") == false) {
builder.field("analyzer", fieldType().indexAnalyzer().name());
if (fieldType().searchAnalyzer().name().equals(fieldType().indexAnalyzer().name()) == false) {
builder.field("search_analyzer", fieldType().searchAnalyzer().name());
}
}
}

View File

@ -198,7 +198,7 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
@Override
public BooleanFieldType fieldType() {
return (BooleanFieldType)fieldType;
return (BooleanFieldType) super.fieldType();
}
@Override
@ -233,9 +233,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
if (value == null) {
return;
}
fields.add(new Field(fieldType.names().indexName(), value ? "T" : "F", fieldType));
fields.add(new Field(fieldType().names().indexName(), value ? "T" : "F", fieldType()));
if (fieldType().hasDocValues()) {
fields.add(new SortedNumericDocValuesField(fieldType.names().indexName(), value ? 1 : 0));
fields.add(new SortedNumericDocValuesField(fieldType().names().indexName(), value ? 1 : 0));
}
}
@ -247,9 +247,9 @@ public class BooleanFieldMapper extends AbstractFieldMapper {
}
if (!mergeResult.simulate()) {
this.fieldType = this.fieldType.clone();
this.fieldType.setNullValue(((BooleanFieldMapper) mergeWith).fieldType().nullValue());
this.fieldType.freeze();
this.fieldType = fieldType().clone();
fieldType().setNullValue(((BooleanFieldMapper) mergeWith).fieldType().nullValue());
fieldType().freeze();
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;

View File

@ -297,7 +297,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
@Override
public CompletionFieldType fieldType() {
return (CompletionFieldType)fieldType;
return (CompletionFieldType) super.fieldType();
}
@Override
@ -448,7 +448,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
+ "] at position " + i + " is a reserved character");
}
}
return new SuggestField(fieldType.names().indexName(), ctx, input, this.fieldType, payload, fieldType().analyzingSuggestLookupProvider);
return new SuggestField(fieldType().names().indexName(), ctx, input, fieldType(), payload, fieldType().analyzingSuggestLookupProvider);
}
public static int correctSubStringLen(String input, int len) {
@ -487,9 +487,9 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
builder.startObject(fieldType().names().shortName())
.field(Fields.TYPE, CONTENT_TYPE);
builder.field(Fields.ANALYZER, fieldType.indexAnalyzer().name());
if (fieldType.indexAnalyzer().name().equals(fieldType.searchAnalyzer().name()) == false) {
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType.searchAnalyzer().name());
builder.field(Fields.ANALYZER, fieldType().indexAnalyzer().name());
if (fieldType().indexAnalyzer().name().equals(fieldType().searchAnalyzer().name()) == false) {
builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType().searchAnalyzer().name());
}
builder.field(Fields.PAYLOADS, fieldType().analyzingSuggestLookupProvider.hasPayloads());
builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), fieldType().analyzingSuggestLookupProvider.getPreserveSep());
@ -536,16 +536,16 @@ public class CompletionFieldMapper extends AbstractFieldMapper {
super.merge(mergeWith, mergeResult);
CompletionFieldMapper fieldMergeWith = (CompletionFieldMapper) mergeWith;
if (fieldType().analyzingSuggestLookupProvider.hasPayloads() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.hasPayloads()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different payload values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different payload values");
}
if (fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_position_increments' values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different 'preserve_position_increments' values");
}
if (fieldType().analyzingSuggestLookupProvider.getPreserveSep() != fieldMergeWith.fieldType().analyzingSuggestLookupProvider.getPreserveSep()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'preserve_separators' values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different 'preserve_separators' values");
}
if(!ContextMapping.mappingsAreEqual(fieldType().getContextMapping(), fieldMergeWith.fieldType().getContextMapping())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different 'context_mapping' values");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different 'context_mapping' values");
}
if (!mergeResult.simulate()) {
this.maxInputLength = fieldMergeWith.maxInputLength;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;

View File

@ -23,10 +23,8 @@ import com.carrotsearch.hppc.FloatArrayList;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;

View File

@ -188,7 +188,7 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
private ThreadLocal<NumericTokenStream> tokenStream = new ThreadLocal<NumericTokenStream>() {
@Override
protected NumericTokenStream initialValue() {
return new NumericTokenStream(fieldType.numericPrecisionStep());
return new NumericTokenStream(fieldType().numericPrecisionStep());
}
};
@ -319,9 +319,9 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
if (!mergeResult.simulate()) {
NumberFieldMapper nfmMergeWith = (NumberFieldMapper) mergeWith;
this.fieldType = this.fieldType.clone();
this.fieldType.setNumericPrecisionStep(nfmMergeWith.fieldType.numericPrecisionStep());
this.fieldType.freeze();
this.fieldType = this.fieldType().clone();
this.fieldType().setNumericPrecisionStep(nfmMergeWith.fieldType().numericPrecisionStep());
this.fieldType().freeze();
this.includeInAll = nfmMergeWith.includeInAll;
if (nfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = nfmMergeWith.ignoreMalformed;
@ -337,13 +337,13 @@ public abstract class NumberFieldMapper extends AbstractFieldMapper implements A
}
protected NumericTokenStream popCachedStream() {
if (fieldType.numericPrecisionStep() == 4) {
if (fieldType().numericPrecisionStep() == 4) {
return tokenStream4.get();
} else if (fieldType.numericPrecisionStep() == 8) {
} else if (fieldType().numericPrecisionStep() == 8) {
return tokenStream8.get();
} else if (fieldType.numericPrecisionStep() == 16) {
} else if (fieldType().numericPrecisionStep() == 16) {
return tokenStream16.get();
} else if (fieldType.numericPrecisionStep() == Integer.MAX_VALUE) {
} else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) {
return tokenStreamMax.get();
}
return tokenStream.get();

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;

View File

@ -276,7 +276,7 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType.boost());
ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType().boost());
if (valueAndBoost.value() == null) {
return;
}
@ -284,19 +284,19 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType.names().fullName(), valueAndBoost.value(), valueAndBoost.boost());
context.allEntries().addText(fieldType().names().fullName(), valueAndBoost.value(), valueAndBoost.boost());
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
Field field = new Field(fieldType.names().indexName(), valueAndBoost.value(), fieldType);
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), valueAndBoost.value(), fieldType());
field.setBoost(valueAndBoost.boost());
fields.add(field);
}
if (fieldType().hasDocValues()) {
fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(valueAndBoost.value())));
fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(valueAndBoost.value())));
}
if (fields.isEmpty()) {
context.ignoredValue(fieldType.names().indexName(), valueAndBoost.value());
context.ignoredValue(fieldType().names().indexName(), valueAndBoost.value());
}
}
@ -353,9 +353,9 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
if (!mergeResult.simulate()) {
this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll;
this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove;
this.fieldType = this.fieldType.clone();
this.fieldType.setNullValue(((StringFieldMapper) mergeWith).fieldType().nullValue());
this.fieldType.freeze();
this.fieldType = this.fieldType().clone();
this.fieldType().setNullValue(((StringFieldMapper) mergeWith).fieldType().nullValue());
this.fieldType().freeze();
}
}
@ -375,8 +375,8 @@ public class StringFieldMapper extends AbstractFieldMapper implements AllFieldMa
if (includeDefaults || positionOffsetGap != Defaults.POSITION_OFFSET_GAP) {
builder.field("position_offset_gap", positionOffsetGap);
}
NamedAnalyzer searchQuoteAnalyzer = fieldType.searchQuoteAnalyzer();
if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType.searchAnalyzer().name())) {
NamedAnalyzer searchQuoteAnalyzer = fieldType().searchQuoteAnalyzer();
if (searchQuoteAnalyzer != null && !searchQuoteAnalyzer.name().equals(fieldType().searchAnalyzer().name())) {
builder.field("search_quote_analyzer", searchQuoteAnalyzer.name());
} else if (includeDefaults) {
if (searchQuoteAnalyzer == null) {

View File

@ -36,7 +36,6 @@ import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.StringFieldMapper.ValueAndBoost;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.geo;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.base.Objects;
import com.google.common.collect.Iterators;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
@ -568,7 +567,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
@Override
public GeoPointFieldType fieldType() {
return (GeoPointFieldType)fieldType;
return (GeoPointFieldType) super.fieldType();
}
@Override
@ -679,8 +678,8 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
Field field = new Field(fieldType.names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType);
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
context.doc().add(field);
}
if (fieldType().isGeohashEnabled()) {
@ -728,32 +727,32 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implements ArrayVal
GeoPointFieldMapper fieldMergeWith = (GeoPointFieldMapper) mergeWith;
if (this.fieldType().isLatLonEnabled() != fieldMergeWith.fieldType().isLatLonEnabled()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different lat_lon");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different lat_lon");
}
if (this.fieldType().isGeohashEnabled() != fieldMergeWith.fieldType().isGeohashEnabled()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different geohash");
}
if (this.fieldType().geohashPrecision() != fieldMergeWith.fieldType().geohashPrecision()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_precision");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different geohash_precision");
}
if (this.fieldType().isGeohashPrefixEnabled() != fieldMergeWith.fieldType().isGeohashPrefixEnabled()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different geohash_prefix");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different geohash_prefix");
}
if (this.fieldType().normalizeLat() != fieldMergeWith.fieldType().normalizeLat()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lat");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different normalize_lat");
}
if (this.fieldType().normalizeLon() != fieldMergeWith.fieldType().normalizeLon()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different normalize_lon");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different normalize_lon");
}
if (fieldType().isLatLonEnabled() &&
this.fieldType().latFieldType().numericPrecisionStep() != fieldMergeWith.fieldType().latFieldType().numericPrecisionStep()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different precision_step");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different precision_step");
}
if (this.fieldType().validateLat() != fieldMergeWith.fieldType().validateLat()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lat");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different validate_lat");
}
if (this.fieldType().validateLon() != fieldMergeWith.fieldType().validateLon()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different validate_lon");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different validate_lon");
}
}

View File

@ -38,7 +38,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -340,7 +339,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
@Override
public GeoShapeFieldType fieldType() {
return (GeoShapeFieldType)fieldType;
return (GeoShapeFieldType) super.fieldType();
}
@Override
@ -370,12 +369,12 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
for (Field field : fields) {
if (!customBoost()) {
field.setBoost(fieldType.boost());
field.setBoost(fieldType().boost());
}
context.doc().add(field);
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + fieldType.names().fullName() + "]", e);
throw new MapperParsingException("failed to parse [" + fieldType().names().fullName() + "]", e);
}
return null;
}
@ -384,28 +383,28 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different field type");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different field type");
return;
}
final GeoShapeFieldMapper fieldMergeWith = (GeoShapeFieldMapper) mergeWith;
// prevent user from changing strategies
if (fieldType().strategyName().equals(fieldMergeWith.fieldType().strategyName()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different strategy");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different strategy");
}
// prevent user from changing trees (changes encoding)
if (fieldType().tree().equals(fieldMergeWith.fieldType().tree()) == false) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different tree");
}
// TODO we should allow this, but at the moment levels is used to build bookkeeping variables
// in lucene's SpatialPrefixTree implementations, need a patch to correct that first
if (fieldType().treeLevels() != fieldMergeWith.fieldType().treeLevels()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different tree_levels");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different tree_levels");
}
if (fieldType().precisionInMeters() != fieldMergeWith.fieldType().precisionInMeters()) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] has different precision");
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different precision");
}
// bail if there were merge conflicts
@ -414,12 +413,12 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper {
}
// change distance error percent
this.fieldType = this.fieldType.clone();
this.fieldType = fieldType().clone();
this.fieldType().setDistanceErrorPct(fieldMergeWith.fieldType().distanceErrorPct());
// change orientation - this is allowed because existing dateline spanning shapes
// have already been unwound and segmented
this.fieldType().setOrientation(fieldMergeWith.fieldType().orientation());
this.fieldType.freeze();
fieldType().freeze();
}
@Override

View File

@ -239,11 +239,11 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
// reset the entries
context.allEntries().reset();
Analyzer analyzer = findAnalyzer(context);
fields.add(new AllField(fieldType.names().indexName(), context.allEntries(), analyzer, fieldType));
fields.add(new AllField(fieldType().names().indexName(), context.allEntries(), analyzer, fieldType()));
}
private Analyzer findAnalyzer(ParseContext context) {
Analyzer analyzer = fieldType.indexAnalyzer();
Analyzer analyzer = fieldType().indexAnalyzer();
if (analyzer == null) {
analyzer = context.docMapper().mappers().indexAnalyzer();
if (analyzer == null) {
@ -285,23 +285,23 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
if (includeDefaults || enabledState != Defaults.ENABLED) {
builder.field("enabled", enabledState.enabled);
}
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType.stored());
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType().stored());
}
if (includeDefaults || fieldType.storeTermVectors() != Defaults.FIELD_TYPE.storeTermVectors()) {
builder.field("store_term_vectors", fieldType.storeTermVectors());
if (includeDefaults || fieldType().storeTermVectors() != Defaults.FIELD_TYPE.storeTermVectors()) {
builder.field("store_term_vectors", fieldType().storeTermVectors());
}
if (includeDefaults || fieldType.storeTermVectorOffsets() != Defaults.FIELD_TYPE.storeTermVectorOffsets()) {
builder.field("store_term_vector_offsets", fieldType.storeTermVectorOffsets());
if (includeDefaults || fieldType().storeTermVectorOffsets() != Defaults.FIELD_TYPE.storeTermVectorOffsets()) {
builder.field("store_term_vector_offsets", fieldType().storeTermVectorOffsets());
}
if (includeDefaults || fieldType.storeTermVectorPositions() != Defaults.FIELD_TYPE.storeTermVectorPositions()) {
builder.field("store_term_vector_positions", fieldType.storeTermVectorPositions());
if (includeDefaults || fieldType().storeTermVectorPositions() != Defaults.FIELD_TYPE.storeTermVectorPositions()) {
builder.field("store_term_vector_positions", fieldType().storeTermVectorPositions());
}
if (includeDefaults || fieldType.storeTermVectorPayloads() != Defaults.FIELD_TYPE.storeTermVectorPayloads()) {
builder.field("store_term_vector_payloads", fieldType.storeTermVectorPayloads());
if (includeDefaults || fieldType().storeTermVectorPayloads() != Defaults.FIELD_TYPE.storeTermVectorPayloads()) {
builder.field("store_term_vector_payloads", fieldType().storeTermVectorPayloads());
}
if (includeDefaults || fieldType.omitNorms() != Defaults.FIELD_TYPE.omitNorms()) {
builder.field("omit_norms", fieldType.omitNorms());
if (includeDefaults || fieldType().omitNorms() != Defaults.FIELD_TYPE.omitNorms()) {
builder.field("omit_norms", fieldType().omitNorms());
}
doXContentAnalyzers(builder, includeDefaults);
@ -315,14 +315,14 @@ public class AllFieldMapper extends AbstractFieldMapper implements RootMapper {
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
}
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) {
mergeResult.addConflict("mapper [" + fieldType.names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled());
}
super.merge(mergeWith, mergeResult);
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import com.google.common.collect.UnmodifiableIterator;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
@ -39,7 +38,6 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.search.highlight.HighlightBuilder;
import java.io.IOException;
import java.util.ArrayList;
@ -201,15 +199,15 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
this.defaultFieldType = Defaults.FIELD_TYPE;
this.pre13Index = Version.indexCreated(indexSettings).before(Version.V_1_3_0);
if (this.pre13Index) {
this.fieldType = this.fieldType.clone();
this.fieldType = fieldType().clone();
fieldType().setEnabled(false);
this.fieldType.freeze();
fieldType().freeze();
}
}
@Override
public FieldNamesFieldType fieldType() {
return (FieldNamesFieldType)fieldType;
return (FieldNamesFieldType) super.fieldType();
}
@Override
@ -281,8 +279,8 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implements RootMa
}
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
document.add(new Field(fieldType().names().indexName(), fieldName, fieldType));
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
document.add(new Field(fieldType().names().indexName(), fieldName, fieldType()));
}
}
}

View File

@ -31,7 +31,6 @@ import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
@ -288,11 +287,11 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
context.id(id);
} // else we are in the pre/post parse phase
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
fields.add(new Field(fieldType.names().indexName(), context.id(), fieldType));
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
fields.add(new Field(fieldType().names().indexName(), context.id(), fieldType()));
}
if (fieldType().hasDocValues()) {
fields.add(new BinaryDocValuesField(fieldType.names().indexName(), new BytesRef(context.id())));
fields.add(new BinaryDocValuesField(fieldType().names().indexName(), new BytesRef(context.id())));
}
}
@ -309,18 +308,18 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all are defaults, no sense to write it at all
if (!includeDefaults && fieldType.stored() == Defaults.FIELD_TYPE.stored()
&& fieldType.indexOptions() == Defaults.FIELD_TYPE.indexOptions()
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored()
&& fieldType().indexOptions() == Defaults.FIELD_TYPE.indexOptions()
&& path == Defaults.PATH
&& customFieldDataSettings == null) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType.stored());
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType().stored());
}
if (includeDefaults || fieldType.indexOptions() != Defaults.FIELD_TYPE.indexOptions()) {
builder.field("index", indexTokenizeOptionToString(fieldType.indexOptions() != IndexOptions.NONE, fieldType.tokenized()));
if (includeDefaults || fieldType().indexOptions() != Defaults.FIELD_TYPE.indexOptions()) {
builder.field("index", indexTokenizeOptionToString(fieldType().indexOptions() != IndexOptions.NONE, fieldType().tokenized()));
}
if (includeDefaults || path != Defaults.PATH) {
builder.field("path", path);
@ -329,7 +328,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements RootMapper {
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;

View File

@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RootMapper;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.search.highlight.HighlightBuilder;
import java.io.IOException;
import java.util.Iterator;
@ -170,7 +169,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
}
public String value(Document document) {
Field field = (Field) document.getField(fieldType.names().indexName());
Field field = (Field) document.getField(fieldType().names().indexName());
return field == null ? null : (String)fieldType().value(field);
}
@ -194,7 +193,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
if (!enabledState.enabled) {
return;
}
fields.add(new Field(fieldType.names().indexName(), context.index(), fieldType));
fields.add(new Field(fieldType().names().indexName(), context.index(), fieldType()));
}
@Override
@ -222,7 +221,7 @@ public class IndexFieldMapper extends AbstractFieldMapper implements RootMapper
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
}
}
builder.endObject();

View File

@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper.internal;
import com.google.common.base.Objects;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.queries.TermsQuery;
@ -237,9 +236,9 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
public ParentFieldMapper(Settings indexSettings) {
this(Defaults.FIELD_TYPE.clone(), null, null, indexSettings);
this.fieldType = this.fieldType.clone();
this.fieldType.setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
this.fieldType.freeze();
this.fieldType = this.fieldType().clone();
this.fieldType().setFieldDataType(new FieldDataType("_parent", settingsBuilder().put(MappedFieldType.Loading.KEY, MappedFieldType.Loading.LAZY_VALUE)));
this.fieldType().freeze();
}
public String type() {
@ -268,7 +267,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
boolean parent = context.docMapper().isParent(context.type());
if (parent && fieldType.hasDocValues()) {
if (parent && fieldType().hasDocValues()) {
fields.add(createJoinField(context.type(), context.id()));
}
@ -280,8 +279,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
// we are in the parsing of _parent phase
String parentId = context.parser().text();
context.sourceToParse().parent(parentId);
fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
if (fieldType.hasDocValues()) {
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType()));
if (fieldType().hasDocValues()) {
fields.add(createJoinField(type, parentId));
}
} else {
@ -294,8 +293,8 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
throw new MapperParsingException("No parent id provided, not within the document, and not externally");
}
// we did not add it in the parsing phase, add it now
fields.add(new Field(fieldType.names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType));
if (fieldType.hasDocValues()) {
fields.add(new Field(fieldType().names().indexName(), Uid.createUid(context.stringBuilder(), type, parentId), fieldType()));
if (fieldType().hasDocValues()) {
fields.add(createJoinField(type, parentId));
}
} else if (parentId != null && !parsedParentId.equals(Uid.createUid(context.stringBuilder(), type, parentId))) {
@ -332,7 +331,7 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
}
builder.endObject();
return builder;
@ -347,16 +346,16 @@ public class ParentFieldMapper extends AbstractFieldMapper implements RootMapper
if (!mergeResult.simulate()) {
ParentFieldMapper fieldMergeWith = (ParentFieldMapper) mergeWith;
this.fieldType = this.fieldType.clone();
this.fieldType = this.fieldType().clone();
if (fieldMergeWith.customFieldDataSettings != null) {
if (!Objects.equal(fieldMergeWith.customFieldDataSettings, this.customFieldDataSettings)) {
this.customFieldDataSettings = fieldMergeWith.customFieldDataSettings;
this.fieldType.setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
this.fieldType().setFieldDataType(new FieldDataType(defaultFieldDataType().getType(),
builder().put(defaultFieldDataType().getSettings()).put(this.customFieldDataSettings)
));
}
}
this.fieldType.freeze();
this.fieldType().freeze();
}
}

View File

@ -184,7 +184,7 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
}
public String value(Document document) {
Field field = (Field) document.getField(fieldType.names().indexName());
Field field = (Field) document.getField(fieldType().names().indexName());
return field == null ? null : (String)fieldType().value(field);
}
@ -210,11 +210,11 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
if (context.sourceToParse().routing() != null) {
String routing = context.sourceToParse().routing();
if (routing != null) {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
context.ignoredValue(fieldType.names().indexName(), routing);
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) {
context.ignoredValue(fieldType().names().indexName(), routing);
return;
}
fields.add(new Field(fieldType.names().indexName(), routing, fieldType));
fields.add(new Field(fieldType().names().indexName(), routing, fieldType()));
}
}
}
@ -229,18 +229,18 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements RootMappe
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all are defaults, no sense to write it at all
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
if (!includeDefaults && indexed == indexedDefault &&
fieldType.stored() == Defaults.FIELD_TYPE.stored() && required == Defaults.REQUIRED && path == Defaults.PATH) {
fieldType().stored() == Defaults.FIELD_TYPE.stored() && required == Defaults.REQUIRED && path == Defaults.PATH) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (indexCreatedBefore2x && (includeDefaults || indexed != indexedDefault)) {
builder.field("index", indexTokenizeOptionToString(indexed, fieldType.tokenized()));
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
}
if (indexCreatedBefore2x && (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored())) {
builder.field("store", fieldType.stored());
if (indexCreatedBefore2x && (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored())) {
builder.field("store", fieldType().stored());
}
if (includeDefaults || required != Defaults.REQUIRED) {
builder.field("required", required);

View File

@ -313,7 +313,7 @@ public class SourceFieldMapper extends AbstractFieldMapper implements RootMapper
if (!enabled) {
return;
}
if (!fieldType.stored()) {
if (!fieldType().stored()) {
return;
}
if (context.flyweight()) {

View File

@ -178,12 +178,12 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) {
return;
}
fields.add(new Field(fieldType.names().indexName(), context.type(), fieldType));
fields.add(new Field(fieldType().names().indexName(), context.type(), fieldType()));
if (fieldType().hasDocValues()) {
fields.add(new SortedSetDocValuesField(fieldType.names().indexName(), new BytesRef(context.type())));
fields.add(new SortedSetDocValuesField(fieldType().names().indexName(), new BytesRef(context.type())));
}
}
@ -200,17 +200,17 @@ public class TypeFieldMapper extends AbstractFieldMapper implements RootMapper {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// if all are defaults, no sense to write it at all
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
boolean indexed = fieldType().indexOptions() != IndexOptions.NONE;
boolean defaultIndexed = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
if (!includeDefaults && fieldType.stored() == Defaults.FIELD_TYPE.stored() && indexed == defaultIndexed) {
if (!includeDefaults && fieldType().stored() == Defaults.FIELD_TYPE.stored() && indexed == defaultIndexed) {
return builder;
}
builder.startObject(CONTENT_TYPE);
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType.stored());
if (includeDefaults || fieldType().stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType().stored());
}
if (includeDefaults || indexed != defaultIndexed) {
builder.field("index", indexTokenizeOptionToString(indexed, fieldType.tokenized()));
builder.field("index", indexTokenizeOptionToString(indexed, fieldType().tokenized()));
}
builder.endObject();
return builder;

View File

@ -229,7 +229,7 @@ public class UidFieldMapper extends AbstractFieldMapper implements RootMapper {
if (customFieldDataSettings != null) {
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
} else if (includeDefaults) {
builder.field("fielddata", (Map) fieldType.fieldDataType().getSettings().getAsMap());
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
}
builder.endObject();

View File

@ -23,7 +23,6 @@ import com.google.common.net.InetAddresses;
import org.apache.lucene.analysis.NumericTokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
@ -275,13 +274,13 @@ public class IpFieldMapper extends NumberFieldMapper {
return;
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(fieldType.names().fullName(), ipAsString, fieldType.boost());
context.allEntries().addText(fieldType().names().fullName(), ipAsString, fieldType().boost());
}
final long value = ipToLong(ipAsString);
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType);
field.setBoost(fieldType.boost());
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType());
field.setBoost(fieldType().boost());
fields.add(field);
}
if (fieldType().hasDocValues()) {
@ -301,8 +300,8 @@ public class IpFieldMapper extends NumberFieldMapper {
return;
}
if (!mergeResult.simulate()) {
this.fieldType = this.fieldType.clone();
this.fieldType.setNullValue(((IpFieldMapper) mergeWith).fieldType().nullValue());
this.fieldType = this.fieldType().clone();
this.fieldType().setNullValue(((IpFieldMapper) mergeWith).fieldType().nullValue());
}
}
@ -310,8 +309,8 @@ public class IpFieldMapper extends NumberFieldMapper {
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", fieldType.numericPrecisionStep());
if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", fieldType().numericPrecisionStep());
}
if (includeDefaults || fieldType().nullValueAsString() != null) {
builder.field("null_value", fieldType().nullValueAsString());

View File

@ -111,7 +111,7 @@ public class IndexDynamicSettingsModule extends AbstractModule {
indexDynamicSettings.addDynamicSetting(TranslogConfig.INDEX_TRANSLOG_DURABILITY);
indexDynamicSettings.addDynamicSetting(IndicesWarmer.INDEX_WARMER_ENABLED);
indexDynamicSettings.addDynamicSetting(IndicesQueryCache.INDEX_CACHE_QUERY_ENABLED, Validator.BOOLEAN);
indexDynamicSettings.addDynamicSetting(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, Validator.TIME);
indexDynamicSettings.addDynamicSetting(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, Validator.TIME);
}
public void addDynamicSettings(String... settings) {

View File

@ -56,6 +56,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.query.TemplateQueryParser;
import org.elasticsearch.script.expression.ExpressionScriptEngineService;
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
@ -232,6 +233,16 @@ public class ScriptService extends AbstractComponent implements Closeable {
if (canExecuteScript(lang, scriptEngineService, script.getType(), scriptContext) == false) {
throw new ScriptException("scripts of type [" + script.getType() + "], operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are disabled");
}
// special exception to prevent expressions from compiling as update or mapping scripts
boolean expression = scriptEngineService instanceof ExpressionScriptEngineService;
boolean notSupported = scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey()) ||
scriptContext.getKey().equals(ScriptContext.Standard.MAPPING.getKey());
if (expression && notSupported) {
throw new ScriptException("scripts of type [" + script.getType() + "]," +
" operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported");
}
return compileInternal(script);
}

View File

@ -0,0 +1,89 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.expression;
import org.apache.lucene.expressions.Expression;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptException;
import java.util.HashMap;
import java.util.Map;
/**
* A bridge to evaluate an {@link Expression} against a map of variables in the context
* of an {@link ExecutableScript}.
*/
public class ExpressionExecutableScript implements ExecutableScript {
private final int NO_DOCUMENT = -1;
public final Expression expression;
public final Map<String, ReplaceableConstFunctionValues> functionValuesMap;
public final ReplaceableConstFunctionValues[] functionValuesArray;
public ExpressionExecutableScript(Object compiledScript, Map<String, Object> vars) {
expression = (Expression)compiledScript;
int functionValuesLength = expression.variables.length;
if (vars.size() != functionValuesLength) {
throw new ScriptException("The number of variables in an executable expression script [" +
functionValuesLength + "] must match the number of variables in the variable map" +
" [" + vars.size() + "].");
}
functionValuesArray = new ReplaceableConstFunctionValues[functionValuesLength];
functionValuesMap = new HashMap<>();
for (int functionValuesIndex = 0; functionValuesIndex < functionValuesLength; ++functionValuesIndex) {
String variableName = expression.variables[functionValuesIndex];
functionValuesArray[functionValuesIndex] = new ReplaceableConstFunctionValues();
functionValuesMap.put(variableName, functionValuesArray[functionValuesIndex]);
}
for (String varsName : vars.keySet()) {
setNextVar(varsName, vars.get(varsName));
}
}
@Override
public void setNextVar(String name, Object value) {
if (functionValuesMap.containsKey(name)) {
if (value instanceof Number) {
double doubleValue = ((Number)value).doubleValue();
functionValuesMap.get(name).setValue(doubleValue);
} else {
throw new ScriptException("Executable expressions scripts can only process numbers." +
" The variable [" + name + "] is not a number.");
}
} else {
throw new ScriptException("The variable [" + name + "] does not exist in the executable expressions script.");
}
}
@Override
public Object run() {
return expression.evaluate(NO_DOCUMENT, functionValuesArray);
}
@Override
public Object unwrap(Object value) {
return value;
}
}

View File

@ -31,7 +31,6 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
@ -172,7 +171,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
}
}
return new ExpressionScript((Expression)compiledScript, bindings, specialValue);
return new ExpressionSearchScript((Expression)compiledScript, bindings, specialValue);
}
protected ValueSource getMethodValueSource(MappedFieldType fieldType, IndexFieldData<?> fieldData, String fieldName, String methodName) {
@ -215,13 +214,14 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
}
@Override
public ExecutableScript executable(Object compiledScript, @Nullable Map<String, Object> vars) {
throw new UnsupportedOperationException("Cannot use expressions for updates");
public ExecutableScript executable(Object compiledScript, Map<String, Object> vars) {
return new ExpressionExecutableScript(compiledScript, vars);
}
@Override
public Object execute(Object compiledScript, Map<String, Object> vars) {
throw new UnsupportedOperationException("Cannot use expressions for updates");
ExpressionExecutableScript expressionExecutableScript = new ExpressionExecutableScript(compiledScript, vars);
return expressionExecutableScript.run();
}
@Override

View File

@ -38,7 +38,7 @@ import java.util.Map;
* A bridge to evaluate an {@link Expression} against {@link Bindings} in the context
* of a {@link SearchScript}.
*/
class ExpressionScript implements SearchScript {
class ExpressionSearchScript implements SearchScript {
final Expression expression;
final SimpleBindings bindings;
@ -47,7 +47,7 @@ class ExpressionScript implements SearchScript {
Scorer scorer;
int docid;
ExpressionScript(Expression e, SimpleBindings b, ReplaceableConstValueSource v) {
ExpressionSearchScript(Expression e, SimpleBindings b, ReplaceableConstValueSource v) {
expression = e;
bindings = b;
source = expression.getValueSource(bindings);

View File

@ -0,0 +1,44 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.expression;
import org.apache.lucene.queries.function.FunctionValues;
/**
* A support class for an executable expression script that allows the double returned
* by a {@link FunctionValues} to be modified.
*/
public class ReplaceableConstFunctionValues extends FunctionValues {
private double value = 0;
public void setValue(double value) {
this.value = value;
}
@Override
public double doubleVal(int doc) {
return value;
}
@Override
public String toString(int i) {
return "ReplaceableConstFunctionValues: " + value;
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.script.expression;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.queries.function.FunctionValues;
import org.apache.lucene.queries.function.ValueSource;
import org.apache.lucene.queries.function.docvalues.DoubleDocValues;
import java.io.IOException;
import java.util.Map;
@ -31,16 +30,10 @@ import java.util.Map;
* A {@link ValueSource} which has a stub {@link FunctionValues} that holds a dynamically replaceable constant double.
*/
class ReplaceableConstValueSource extends ValueSource {
double value;
final FunctionValues fv;
final ReplaceableConstFunctionValues fv;
public ReplaceableConstValueSource() {
fv = new DoubleDocValues(this) {
@Override
public double doubleVal(int i) {
return value;
}
};
fv = new ReplaceableConstFunctionValues();
}
@Override
@ -64,6 +57,6 @@ class ReplaceableConstValueSource extends ValueSource {
}
public void setValue(double v) {
value = v;
fv.setValue(v);
}
}

View File

@ -101,4 +101,7 @@ grant {
// needed by Mockito
permission java.lang.RuntimePermission "reflectionFactoryAccess";
// needed to install SSLFactories, advanced SSL configuration, etc.
permission java.lang.RuntimePermission "setFactory";
};

View File

@ -49,7 +49,7 @@ public class DelayedAllocationTests extends ElasticsearchIntegrationTest {
prepareCreate("test").setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(UnassignedInfo.DEFAULT_DELAYED_NODE_LEFT_TIMEOUT, 0)).get();
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, 0)).get();
ensureGreen("test");
indexRandomData();
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
@ -69,11 +69,16 @@ public class DelayedAllocationTests extends ElasticsearchIntegrationTest {
prepareCreate("test").setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, TimeValue.timeValueHours(1))).get();
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get();
ensureGreen("test");
indexRandomData();
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
assertThat(client().admin().cluster().prepareState().all().get().getState().routingNodes().hasUnassigned(), equalTo(true));
assertBusy(new Runnable() {
@Override
public void run() {
assertThat(client().admin().cluster().prepareState().all().get().getState().routingNodes().hasUnassigned(), equalTo(true));
}
});
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1));
internalCluster().startNode(); // this will use the same data location as the stopped node
ensureGreen("test");
@ -89,11 +94,16 @@ public class DelayedAllocationTests extends ElasticsearchIntegrationTest {
prepareCreate("test").setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, TimeValue.timeValueMillis(100))).get();
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get();
ensureGreen("test");
indexRandomData();
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
ensureGreen("test");
internalCluster().startNode();
// do a second round with longer delay to make sure it happens
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get());
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
ensureGreen("test");
}
/**
@ -107,13 +117,18 @@ public class DelayedAllocationTests extends ElasticsearchIntegrationTest {
prepareCreate("test").setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, TimeValue.timeValueHours(1))).get();
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get();
ensureGreen("test");
indexRandomData();
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
assertThat(client().admin().cluster().prepareState().all().get().getState().routingNodes().hasUnassigned(), equalTo(true));
assertBusy(new Runnable() {
@Override
public void run() {
assertThat(client().admin().cluster().prepareState().all().get().getState().routingNodes().hasUnassigned(), equalTo(true));
}
});
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1));
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, TimeValue.timeValueMillis(100))).get());
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(100))).get());
ensureGreen("test");
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0));
}
@ -129,13 +144,18 @@ public class DelayedAllocationTests extends ElasticsearchIntegrationTest {
prepareCreate("test").setSettings(Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, TimeValue.timeValueHours(1))).get();
.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueHours(1))).get();
ensureGreen("test");
indexRandomData();
internalCluster().stopRandomNode(InternalTestCluster.nameFilter(findNodeWithShard()));
assertThat(client().admin().cluster().prepareState().all().get().getState().routingNodes().hasUnassigned(), equalTo(true));
assertBusy(new Runnable() {
@Override
public void run() {
assertThat(client().admin().cluster().prepareState().all().get().getState().routingNodes().hasUnassigned(), equalTo(true));
}
});
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(1));
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, TimeValue.timeValueMillis(0))).get());
assertAcked(client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, TimeValue.timeValueMillis(0))).get());
ensureGreen("test");
assertThat(client().admin().cluster().prepareHealth().get().getDelayedUnassignedShards(), equalTo(0));
}

View File

@ -264,12 +264,12 @@ public class UnassignedInfoTests extends ElasticsearchAllocationTestCase {
@Test
public void testUnassignedDelayedOnlyOnNodeLeft() throws Exception {
final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, null);
long delay = unassignedInfo.getAllocationDelayTimeoutSetting(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), Settings.EMPTY);
long delay = unassignedInfo.getAllocationDelayTimeoutSetting(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
assertThat(delay, equalTo(TimeValue.timeValueHours(10).millis()));
assertBusy(new Runnable() {
@Override
public void run() {
long delay = unassignedInfo.getDelayAllocationExpirationIn(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), Settings.EMPTY);
long delay = unassignedInfo.getDelayAllocationExpirationIn(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
assertThat(delay, greaterThan(0l));
assertThat(delay, lessThan(TimeValue.timeValueHours(10).millis()));
}
@ -284,9 +284,9 @@ public class UnassignedInfoTests extends ElasticsearchAllocationTestCase {
EnumSet<UnassignedInfo.Reason> reasons = EnumSet.allOf(UnassignedInfo.Reason.class);
reasons.remove(UnassignedInfo.Reason.NODE_LEFT);
UnassignedInfo unassignedInfo = new UnassignedInfo(RandomPicks.randomFrom(getRandom(), reasons), null);
long delay = unassignedInfo.getAllocationDelayTimeoutSetting(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), Settings.EMPTY);
long delay = unassignedInfo.getAllocationDelayTimeoutSetting(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
assertThat(delay, equalTo(0l));
delay = unassignedInfo.getDelayAllocationExpirationIn(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), Settings.EMPTY);
delay = unassignedInfo.getDelayAllocationExpirationIn(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), Settings.EMPTY);
assertThat(delay, equalTo(0l));
}
@ -302,7 +302,7 @@ public class UnassignedInfoTests extends ElasticsearchAllocationTestCase {
.routingTable(RoutingTable.builder().addAsNew(metaData.index("test1")).addAsNew(metaData.index("test2"))).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState)).build();
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), clusterState), equalTo(0));
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState), equalTo(0));
// starting primaries
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING))).build();
// starting replicas
@ -311,7 +311,7 @@ public class UnassignedInfoTests extends ElasticsearchAllocationTestCase {
// remove node2 and reroute
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState)).build();
assertThat(clusterState.prettyPrint(), UnassignedInfo.getNumberOfDelayedUnassigned(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), clusterState), equalTo(2));
assertThat(clusterState.prettyPrint(), UnassignedInfo.getNumberOfDelayedUnassigned(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState), equalTo(2));
}
@Test
@ -326,7 +326,7 @@ public class UnassignedInfoTests extends ElasticsearchAllocationTestCase {
.routingTable(RoutingTable.builder().addAsNew(metaData.index("test1")).addAsNew(metaData.index("test2"))).build();
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder().put(newNode("node1")).put(newNode("node2"))).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState)).build();
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), clusterState), equalTo(0));
assertThat(UnassignedInfo.getNumberOfDelayedUnassigned(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState), equalTo(0));
// starting primaries
clusterState = ClusterState.builder(clusterState).routingResult(allocation.applyStartedShards(clusterState, clusterState.routingNodes().shardsWithState(INITIALIZING))).build();
// starting replicas
@ -336,10 +336,10 @@ public class UnassignedInfoTests extends ElasticsearchAllocationTestCase {
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).remove("node2")).build();
clusterState = ClusterState.builder(clusterState).routingResult(allocation.reroute(clusterState)).build();
long nextDelaySetting = UnassignedInfo.findSmallestDelayedAllocationSetting(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), clusterState);
long nextDelaySetting = UnassignedInfo.findSmallestDelayedAllocationSetting(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState);
assertThat(nextDelaySetting, equalTo(TimeValue.timeValueHours(10).millis()));
long nextDelay = UnassignedInfo.findNextDelayedAllocationIn(Settings.builder().put(UnassignedInfo.DELAYED_NODE_LEFT_TIMEOUT, "10h").build(), clusterState);
long nextDelay = UnassignedInfo.findNextDelayedAllocationIn(Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, "10h").build(), clusterState);
assertThat(nextDelay, greaterThan(TimeValue.timeValueHours(9).millis()));
assertThat(nextDelay, lessThanOrEqualTo(TimeValue.timeValueHours(10).millis()));
}

View File

@ -72,7 +72,7 @@ public class CustomScriptContextTests extends ElasticsearchIntegrationTest {
}
CompiledScript compiledScript = scriptService.compile(new Script("1", ScriptService.ScriptType.INLINE, "expression", null),
randomFrom(ScriptContext.Standard.values()));
randomFrom(new ScriptContext[] {ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH}));
assertThat(compiledScript, notNullValue());
compiledScript = scriptService.compile(new Script("1", ScriptService.ScriptType.INLINE, "mustache", null),

View File

@ -162,7 +162,7 @@ public class IndexedScriptTests extends ElasticsearchIntegrationTest {
fail("update script should have been rejected");
} catch(Exception e) {
assertThat(e.getMessage(), containsString("failed to execute script"));
assertThat(e.getCause().toString(), containsString("scripts of type [indexed], operation [update] and lang [expression] are disabled"));
assertThat(e.getCause().getMessage(), containsString("scripts of type [indexed], operation [update] and lang [expression] are disabled"));
}
try {
String query = "{ \"script_fields\" : { \"test1\" : { \"script_id\" : \"script1\", \"lang\":\"expression\" }}}";

View File

@ -142,8 +142,7 @@ public class OnDiskScriptTests extends ElasticsearchIntegrationTest {
fail("update script should have been rejected");
} catch (Exception e) {
assertThat(e.getMessage(), containsString("failed to execute script"));
assertThat(e.getCause().toString(),
containsString("scripts of type [file], operation [update] and lang [mustache] are disabled"));
assertThat(e.getCause().getMessage(), containsString("scripts of type [file], operation [update] and lang [mustache] are disabled"));
}
}

View File

@ -159,8 +159,8 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
CompiledScript groovyScript = scriptService.compile(
new Script("file_script", ScriptType.FILE, GroovyScriptEngineService.NAME, null), randomFrom(scriptContexts));
assertThat(groovyScript.lang(), equalTo(GroovyScriptEngineService.NAME));
CompiledScript expressionScript = scriptService.compile(new Script("file_script", ScriptType.FILE,
ExpressionScriptEngineService.NAME, null), randomFrom(scriptContexts));
CompiledScript expressionScript = scriptService.compile(new Script("file_script", ScriptType.FILE, ExpressionScriptEngineService.NAME,
null), randomFrom(new ScriptContext[] {ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH}));
assertThat(expressionScript.lang(), equalTo(ExpressionScriptEngineService.NAME));
}
@ -207,9 +207,12 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
assertCompileRejected(GroovyScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
assertCompileAccepted(GroovyScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext);
//expression engine is sandboxed, all scripts are enabled by default
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext);
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
assertCompileAccepted(ExpressionScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext);
if (!scriptContext.getKey().equals(ScriptContext.Standard.MAPPING.getKey()) &&
!scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey())) {
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext);
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
assertCompileAccepted(ExpressionScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext);
}
//mustache engine is sandboxed, all scripts are enabled by default
assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext);
assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
@ -311,6 +314,12 @@ public class ScriptServiceTests extends ElasticsearchTestCase {
//Otherwise they are always considered file ones as they can be found in the static cache.
String script = scriptType == ScriptType.FILE ? "file_script" : "script";
for (ScriptContext scriptContext : this.scriptContexts) {
// skip script contexts that aren't allowed for expressions
if (scriptEngineService instanceof ExpressionScriptEngineService &&
(scriptContext.getKey().equals(ScriptContext.Standard.MAPPING.getKey()) ||
scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey()))) {
continue;
}
//fallback mechanism: 1) engine specific settings 2) op based settings 3) source based settings
ScriptMode scriptMode = engineSettings.get(scriptEngineService.types()[0] + "." + scriptType + "." + scriptContext.getKey());
if (scriptMode == null) {

View File

@ -19,29 +19,45 @@
package org.elasticsearch.script.expression;
import org.apache.lucene.expressions.Expression;
import org.apache.lucene.expressions.js.JavascriptCompiler;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.pipeline.SimpleValue;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.seriesArithmetic;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.notNullValue;
public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
@ -49,15 +65,15 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
ensureGreen("test");
Map<String, Object> paramsMap = new HashMap<>();
assert(params.length % 2 == 0);
assert (params.length % 2 == 0);
for (int i = 0; i < params.length; i += 2) {
paramsMap.put(params[i].toString(), params[i + 1]);
}
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("_uid")
.order(SortOrder.ASC))
.addSort(SortBuilders.fieldSort("_uid")
.order(SortOrder.ASC))
.addScriptField("foo", new Script(script, ScriptType.INLINE, "expression", paramsMap));
return req;
}
@ -84,9 +100,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
createIndex("test");
ensureGreen("test");
indexRandom(true,
client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"),
client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"),
client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye"));
client().prepareIndex("test", "doc", "1").setSource("text", "hello goodbye"),
client().prepareIndex("test", "doc", "2").setSource("text", "hello hello hello goodbye"),
client().prepareIndex("test", "doc", "3").setSource("text", "hello hello goodebye"));
ScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction(new Script("1 / _score", ScriptType.INLINE, "expression", null));
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode("replace"));
@ -242,9 +258,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
createIndex("test");
ensureGreen("test");
indexRandom(true,
client().prepareIndex("test", "doc", "1").setSource("x", 10),
client().prepareIndex("test", "doc", "2").setSource("x", 3),
client().prepareIndex("test", "doc", "3").setSource("x", 5));
client().prepareIndex("test", "doc", "1").setSource("x", 10),
client().prepareIndex("test", "doc", "2").setSource("x", 3),
client().prepareIndex("test", "doc", "3").setSource("x", 5));
// a = int, b = double, c = long
String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)";
SearchResponse rsp = buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L).get();
@ -262,9 +278,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
fail("Expected expression compilation failure");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString() + "should have contained ExpressionScriptCompilationException",
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
assertThat(e.toString() + "should have contained compilation failure",
e.toString().contains("Failed to parse expression"), equalTo(true));
e.toString().contains("Failed to parse expression"), equalTo(true));
}
}
@ -275,9 +291,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
fail("Expected string parameter to cause failure");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString() + "should have contained ExpressionScriptCompilationException",
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
assertThat(e.toString() + "should have contained non-numeric parameter error",
e.toString().contains("must be a numeric type"), equalTo(true));
e.toString().contains("must be a numeric type"), equalTo(true));
}
}
@ -288,9 +304,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
fail("Expected text field to cause execution failure");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString() + "should have contained ExpressionScriptCompilationException",
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
assertThat(e.toString() + "should have contained non-numeric field error",
e.toString().contains("must be numeric"), equalTo(true));
e.toString().contains("must be numeric"), equalTo(true));
}
}
@ -301,9 +317,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
fail("Expected bogus variable to cause execution failure");
} catch (SearchPhaseExecutionException e) {
assertThat(e.toString() + "should have contained ExpressionScriptCompilationException",
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
e.toString().contains("ExpressionScriptCompilationException"), equalTo(true));
assertThat(e.toString() + "should have contained unknown variable error",
e.toString().contains("Unknown variable"), equalTo(true));
e.toString().contains("Unknown variable"), equalTo(true));
}
}
@ -338,14 +354,14 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
createIndex("test");
ensureGreen("test");
indexRandom(true,
client().prepareIndex("test", "doc", "1").setSource("x", 5, "y", 1.2),
client().prepareIndex("test", "doc", "2").setSource("x", 10, "y", 1.4),
client().prepareIndex("test", "doc", "3").setSource("x", 13, "y", 1.8));
client().prepareIndex("test", "doc", "1").setSource("x", 5, "y", 1.2),
client().prepareIndex("test", "doc", "2").setSource("x", 10, "y", 1.4),
client().prepareIndex("test", "doc", "3").setSource("x", 13, "y", 1.8));
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(AggregationBuilders.stats("int_agg").field("x").script("_value * 3").lang(ExpressionScriptEngineService.NAME))
.addAggregation(AggregationBuilders.stats("double_agg").field("y").script("_value - 1.1").lang(ExpressionScriptEngineService.NAME));
.addAggregation(AggregationBuilders.stats("int_agg").field("x").script("_value * 3").lang(ExpressionScriptEngineService.NAME))
.addAggregation(AggregationBuilders.stats("double_agg").field("y").script("_value - 1.1").lang(ExpressionScriptEngineService.NAME));
SearchResponse rsp = req.get();
assertEquals(3, rsp.getHits().getTotalHits());
@ -370,9 +386,9 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.matchAllQuery())
.addAggregation(
AggregationBuilders.terms("term_agg").field("text")
.script(new Script("_value", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)));
.addAggregation(
AggregationBuilders.terms("term_agg").field("text")
.script(new Script("_value", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)));
String message;
try {
@ -385,8 +401,161 @@ public class ExpressionScriptTests extends ElasticsearchIntegrationTest {
message = e.toString();
}
assertThat(message + "should have contained ExpressionScriptExecutionException",
message.contains("ExpressionScriptExecutionException"), equalTo(true));
message.contains("ExpressionScriptExecutionException"), equalTo(true));
assertThat(message + "should have contained text variable error",
message.contains("text variable"), equalTo(true));
message.contains("text variable"), equalTo(true));
}
// series of unit test for using expressions as executable scripts
public void testExecutableScripts() throws Exception {
Map<String, Object> vars = new HashMap<>();
vars.put("a", 2.5);
vars.put("b", 3);
vars.put("xyz", -1);
Expression expr = JavascriptCompiler.compile("a+b+xyz");
ExpressionExecutableScript ees = new ExpressionExecutableScript(expr, vars);
assertEquals((Double) ees.run(), 4.5, 0.001);
ees.setNextVar("b", -2.5);
assertEquals((Double) ees.run(), -1, 0.001);
ees.setNextVar("a", -2.5);
ees.setNextVar("b", -2.5);
ees.setNextVar("xyz", -2.5);
assertEquals((Double) ees.run(), -7.5, 0.001);
String message;
try {
vars = new HashMap<>();
vars.put("a", 1);
ees = new ExpressionExecutableScript(expr, vars);
ees.run();
fail("An incorrect number of variables were allowed to be used in an expression.");
} catch (ScriptException se) {
message = se.getMessage();
assertThat(message + " should have contained number of variables", message.contains("number of variables"), equalTo(true));
}
try {
vars = new HashMap<>();
vars.put("a", 1);
vars.put("b", 3);
vars.put("c", -1);
ees = new ExpressionExecutableScript(expr, vars);
ees.run();
fail("A variable was allowed to be set that does not exist in the expression.");
} catch (ScriptException se) {
message = se.getMessage();
assertThat(message + " should have contained does not exist in", message.contains("does not exist in"), equalTo(true));
}
try {
vars = new HashMap<>();
vars.put("a", 1);
vars.put("b", 3);
vars.put("xyz", "hello");
ees = new ExpressionExecutableScript(expr, vars);
ees.run();
fail("A non-number was allowed to be use in the expression.");
} catch (ScriptException se) {
message = se.getMessage();
assertThat(message + " should have contained process numbers", message.contains("process numbers"), equalTo(true));
}
}
// test to make sure expressions are not allowed to be used as update scripts
public void testInvalidUpdateScript() throws Exception {
try {
createIndex("test_index");
ensureGreen("test_index");
indexRandom(true, client().prepareIndex("test_index", "doc", "1").setSource("text_field", "text"));
UpdateRequestBuilder urb = client().prepareUpdate().setIndex("test_index");
urb.setType("doc");
urb.setId("1");
urb.setScript(new Script("0", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null));
urb.get();
fail("Expression scripts should not be allowed to run as update scripts.");
} catch (Exception e) {
String message = e.getMessage();
assertThat(message + " should have contained failed to execute", message.contains("failed to execute"), equalTo(true));
message = e.getCause().getMessage();
assertThat(message + " should have contained not supported", message.contains("not supported"), equalTo(true));
}
}
// test to make sure expressions are not allowed to be used as mapping scripts
public void testInvalidMappingScript() throws Exception{
try {
createIndex("test_index");
ensureGreen("test_index");
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
builder.startObject("transform");
builder.field("script", "1.0");
builder.field("lang", ExpressionScriptEngineService.NAME);
builder.endObject();
builder.startObject("properties");
builder.startObject("double_field");
builder.field("type", "double");
builder.endObject();
builder.endObject();
builder.endObject();
client().admin().indices().preparePutMapping("test_index").setType("trans_test").setSource(builder).get();
client().prepareIndex("test_index", "trans_test", "1").setSource("double_field", 0.0).get();
fail("Expression scripts should not be allowed to run as mapping scripts.");
} catch (Exception e) {
String message = ExceptionsHelper.detailedMessage(e);
assertThat(message + " should have contained failed to parse", message.contains("failed to parse"), equalTo(true));
assertThat(message + " should have contained not supported", message.contains("not supported"), equalTo(true));
}
}
// test to make sure expressions are allowed to be used for reduce in pipeline aggregations
public void testPipelineAggregationScript() throws Exception {
createIndex("agg_index");
ensureGreen("agg_index");
indexRandom(true,
client().prepareIndex("agg_index", "doc", "1").setSource("one", 1.0, "two", 2.0, "three", 3.0, "four", 4.0),
client().prepareIndex("agg_index", "doc", "2").setSource("one", 2.0, "two", 2.0, "three", 3.0, "four", 4.0),
client().prepareIndex("agg_index", "doc", "3").setSource("one", 3.0, "two", 2.0, "three", 3.0, "four", 4.0),
client().prepareIndex("agg_index", "doc", "4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0),
client().prepareIndex("agg_index", "doc", "5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0));
SearchResponse response = client()
.prepareSearch("agg_index")
.addAggregation(
histogram("histogram")
.field("one")
.interval(2)
.subAggregation(sum("twoSum").field("two"))
.subAggregation(sum("threeSum").field("three"))
.subAggregation(sum("fourSum").field("four"))
.subAggregation(
seriesArithmetic("totalSum").setBucketsPaths("twoSum", "threeSum", "fourSum").script(
new Script("_value0 + _value1 + _value2", ScriptType.INLINE, ExpressionScriptEngineService.NAME, null)))).execute().actionGet();
InternalHistogram<Bucket> histogram = response.getAggregations().get("histogram");
assertThat(histogram, notNullValue());
assertThat(histogram.getName(), equalTo("histogram"));
List<Bucket> buckets = histogram.getBuckets();
for (int bucketCount = 0; bucketCount < buckets.size(); ++bucketCount) {
Histogram.Bucket bucket = buckets.get(bucketCount);
if (bucket.getDocCount() == 1) {
SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
assertThat(seriesArithmetic, notNullValue());
double seriesArithmeticValue = seriesArithmetic.value();
assertEquals(9.0, seriesArithmeticValue, 0.001);
} else if (bucket.getDocCount() == 2) {
SimpleValue seriesArithmetic = bucket.getAggregations().get("totalSum");
assertThat(seriesArithmetic, notNullValue());
double seriesArithmeticValue = seriesArithmetic.value();
assertEquals(18.0, seriesArithmeticValue, 0.001);
} else {
fail("Incorrect number of documents in a bucket in the histogram.");
}
}
}
}

View File

@ -29,6 +29,7 @@ import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.impl.client.HttpClients;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.index.shard.MergeSchedulerConfig;
import org.apache.lucene.store.StoreRateLimiting;
import org.apache.lucene.util.IOUtils;
@ -508,6 +509,12 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
if (random.nextBoolean()) {
builder.put(NettyTransport.PING_SCHEDULE, RandomInts.randomIntBetween(random, 100, 2000) + "ms");
}
if (randomBoolean()) {
// keep this low so we don't stall tests
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING, RandomInts.randomIntBetween(random, 1, 15) + "ms");
}
return builder;
}

View File

@ -20,6 +20,7 @@ package org.elasticsearch.test.test;
import com.carrotsearch.randomizedtesting.annotations.Repeat;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.TestCluster;
import org.junit.Test;
@ -39,6 +40,7 @@ public class SuiteScopeClusterTests extends ElasticsearchIntegrationTest {
private static Long CLUSTER_SEED = null;
@Test
@SuppressForbidden(reason = "repeat is a feature here")
@Repeat(iterations = 10, useConstantSeed = true)
public void testReproducible() throws IOException {
if (ITER++ == 0) {

View File

@ -53,3 +53,5 @@ org.joda.time.DateTime#<init>(int, int, int, int, int)
org.joda.time.DateTime#<init>(int, int, int, int, int, int)
org.joda.time.DateTime#<init>(int, int, int, int, int, int, int)
org.joda.time.DateTime#now()
com.google.common.collect.Iterators#emptyIterator() @ Use Collections.emptyIterator instead

View File

@ -16,5 +16,6 @@
com.carrotsearch.randomizedtesting.RandomizedTest#globalTempDir() @ Use newTempDirPath() instead
com.carrotsearch.randomizedtesting.annotations.Seed @ Don't commit hardcoded seeds
com.carrotsearch.randomizedtesting.annotations.Repeat @ Don't commit hardcoded repeats
org.apache.lucene.codecs.Codec#setDefault(org.apache.lucene.codecs.Codec) @ Use the SuppressCodecs("*") annotation instead

View File

@ -48,6 +48,8 @@ To use it, first create a `BulkProcessor` instance:
[source,java]
--------------------------------------------------
import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
BulkProcessor bulkProcessor = BulkProcessor.builder(
client, <1>

View File

@ -34,6 +34,7 @@ import org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.repositories.RepositoryMissingException;
import org.elasticsearch.repositories.RepositoryVerificationException;
import org.elasticsearch.repositories.azure.AzureRepository.Repository;
@ -41,12 +42,13 @@ import org.elasticsearch.snapshots.SnapshotMissingException;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.store.MockFSDirectoryService;
import org.junit.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.net.URISyntaxException;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
@ -107,7 +109,7 @@ public class AzureSnapshotRestoreITest extends AbstractAzureTest {
.setType("azure").setSettings(Settings.settingsBuilder()
.put(Storage.CONTAINER, getContainerName())
.put(Storage.BASE_PATH, getRepositoryPath())
.put(Storage.CHUNK_SIZE, randomIntBetween(1000, 10000))
.put(Storage.CHUNK_SIZE, randomIntBetween(1000, 10000), ByteSizeUnit.BYTES)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
@ -180,14 +182,14 @@ public class AzureSnapshotRestoreITest extends AbstractAzureTest {
.setType("azure").setSettings(Settings.settingsBuilder()
.put(Repository.CONTAINER, getContainerName().concat("-1"))
.put(Repository.BASE_PATH, getRepositoryPath())
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000))
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000), ByteSizeUnit.BYTES)
).get();
assertThat(putRepositoryResponse1.isAcknowledged(), equalTo(true));
PutRepositoryResponse putRepositoryResponse2 = client.admin().cluster().preparePutRepository("test-repo2")
.setType("azure").setSettings(Settings.settingsBuilder()
.put(Repository.CONTAINER, getContainerName().concat("-2"))
.put(Repository.BASE_PATH, getRepositoryPath())
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000))
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000), ByteSizeUnit.BYTES)
).get();
assertThat(putRepositoryResponse2.isAcknowledged(), equalTo(true));
@ -359,7 +361,7 @@ public class AzureSnapshotRestoreITest extends AbstractAzureTest {
.setType("azure").setSettings(Settings.settingsBuilder()
.put(Repository.CONTAINER, container)
.put(Repository.BASE_PATH, getRepositoryPath())
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000))
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000), ByteSizeUnit.BYTES)
).get();
client().admin().cluster().prepareDeleteRepository("test-repo").get();
try {
@ -390,7 +392,7 @@ public class AzureSnapshotRestoreITest extends AbstractAzureTest {
.setType("azure").setSettings(Settings.settingsBuilder()
.put(Repository.CONTAINER, getContainerName())
.put(Repository.BASE_PATH, getRepositoryPath())
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000))
.put(Repository.CHUNK_SIZE, randomIntBetween(1000, 10000), ByteSizeUnit.BYTES)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));

View File

@ -27,6 +27,7 @@ import org.elasticsearch.client.Client;
import org.elasticsearch.cloud.azure.storage.AzureStorageServiceMock;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.junit.Test;
@ -52,7 +53,7 @@ public class AzureSnapshotRestoreTest extends AbstractAzureRepositoryServiceTest
PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository("test-repo")
.setType("azure").setSettings(Settings.settingsBuilder()
.put("base_path", basePath)
.put("chunk_size", randomIntBetween(1000, 10000))
.put("chunk_size", randomIntBetween(1000, 10000), ByteSizeUnit.BYTES)
).get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));