mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
[mapping] Remove transform
Removes the mapping transform feature which when used made debugging very difficult. Users should transform their documents on the way into Elasticsearch rather than having Elasticsearch do it. Closes #12674
This commit is contained in:
parent
7f76d91e3e
commit
28633fae21
@ -108,23 +108,6 @@ public class GetRequestBuilder extends SingleShardOperationRequestBuilder<GetReq
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should the source be transformed using the script to used at index time
|
||||
* (if any)? Note that calling this without having called setFetchSource
|
||||
* will automatically turn on source fetching.
|
||||
*
|
||||
* @return this for chaining
|
||||
*/
|
||||
public GetRequestBuilder setTransformSource(boolean transform) {
|
||||
FetchSourceContext context = request.fetchSourceContext();
|
||||
if (context == null) {
|
||||
context = new FetchSourceContext(true);
|
||||
request.fetchSourceContext(context);
|
||||
}
|
||||
context.transformSource(transform);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that _source should be returned, with an "include" and/or "exclude" set which can include simple wildcard
|
||||
* elements.
|
||||
|
@ -32,7 +32,6 @@ import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
@ -49,13 +48,9 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet;
|
||||
* are restored from a repository.
|
||||
*/
|
||||
public class MetaDataIndexUpgradeService extends AbstractComponent {
|
||||
|
||||
private final ScriptService scriptService;
|
||||
|
||||
@Inject
|
||||
public MetaDataIndexUpgradeService(Settings settings, ScriptService scriptService) {
|
||||
public MetaDataIndexUpgradeService(Settings settings) {
|
||||
super(settings);
|
||||
this.scriptService = scriptService;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -221,9 +216,8 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
|
||||
IndexSettings indexSettings = new IndexSettings(indexMetaData, this.settings, Collections.EMPTY_LIST);
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.EMPTY_MAP);
|
||||
|
||||
|
||||
try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) {
|
||||
try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, scriptService)) {
|
||||
try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService)) {
|
||||
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) {
|
||||
MappingMetaData mappingMetaData = cursor.value;
|
||||
mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), false, false);
|
||||
|
@ -282,14 +282,11 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||
|
||||
boolean sourceFieldFiltering = sourceFieldMapper.includes().length > 0 || sourceFieldMapper.excludes().length > 0;
|
||||
boolean sourceFetchFiltering = fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0;
|
||||
if (fetchSourceContext.transformSource() || sourceFieldFiltering || sourceFetchFiltering) {
|
||||
if (sourceFieldFiltering || sourceFetchFiltering) {
|
||||
// TODO: The source might parsed and available in the sourceLookup but that one uses unordered maps so different. Do we care?
|
||||
Tuple<XContentType, Map<String, Object>> typeMapTuple = XContentHelper.convertToMap(source.source, true);
|
||||
XContentType sourceContentType = typeMapTuple.v1();
|
||||
Map<String, Object> sourceAsMap = typeMapTuple.v2();
|
||||
if (fetchSourceContext.transformSource()) {
|
||||
sourceAsMap = docMapper.transformSourceAsMap(sourceAsMap);
|
||||
}
|
||||
if (sourceFieldFiltering) {
|
||||
sourceAsMap = XContentMapValues.filter(sourceAsMap, sourceFieldMapper.includes(), sourceFieldMapper.excludes());
|
||||
}
|
||||
@ -397,16 +394,13 @@ public final class ShardGetService extends AbstractIndexShardComponent {
|
||||
|
||||
if (!fetchSourceContext.fetchSource()) {
|
||||
source = null;
|
||||
} else if (fetchSourceContext.transformSource() || fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0) {
|
||||
} else if (fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0) {
|
||||
Map<String, Object> sourceAsMap;
|
||||
XContentType sourceContentType = null;
|
||||
// TODO: The source might parsed and available in the sourceLookup but that one uses unordered maps so different. Do we care?
|
||||
Tuple<XContentType, Map<String, Object>> typeMapTuple = XContentHelper.convertToMap(source, true);
|
||||
sourceContentType = typeMapTuple.v1();
|
||||
sourceAsMap = typeMapTuple.v2();
|
||||
if (fetchSourceContext.transformSource()) {
|
||||
sourceAsMap = docMapper.transformSourceAsMap(sourceAsMap);
|
||||
}
|
||||
sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
|
||||
try {
|
||||
source = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes();
|
||||
|
@ -35,7 +35,6 @@ import org.elasticsearch.common.util.concurrent.ReleasableLock;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.mapper.Mapping.SourceTransform;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
@ -79,8 +78,6 @@ public class DocumentMapper implements ToXContent {
|
||||
|
||||
private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> rootMappers = new LinkedHashMap<>();
|
||||
|
||||
private List<SourceTransform> sourceTransforms = new ArrayList<>(1);
|
||||
|
||||
private final Settings indexSettings;
|
||||
|
||||
private final RootObjectMapper rootObjectMapper;
|
||||
@ -126,24 +123,9 @@ public class DocumentMapper implements ToXContent {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder transform(ScriptService scriptService, Script script) {
|
||||
sourceTransforms.add(new ScriptTransform(scriptService, script));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #transform(ScriptService, Script)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language,
|
||||
Map<String, Object> parameters) {
|
||||
sourceTransforms.add(new ScriptTransform(scriptService, new Script(script, scriptType, language, parameters)));
|
||||
return this;
|
||||
}
|
||||
|
||||
public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) {
|
||||
Objects.requireNonNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
|
||||
return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, rootMappers, sourceTransforms, mapperService.mappingLock);
|
||||
return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, rootMappers, mapperService.mappingLock);
|
||||
}
|
||||
}
|
||||
|
||||
@ -171,7 +153,6 @@ public class DocumentMapper implements ToXContent {
|
||||
RootObjectMapper rootObjectMapper,
|
||||
Map<String, Object> meta,
|
||||
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> rootMappers,
|
||||
List<SourceTransform> sourceTransforms,
|
||||
ReentrantReadWriteLock mappingLock) {
|
||||
this.mapperService = mapperService;
|
||||
this.type = rootObjectMapper.name();
|
||||
@ -180,7 +161,6 @@ public class DocumentMapper implements ToXContent {
|
||||
Version.indexCreated(indexSettings),
|
||||
rootObjectMapper,
|
||||
rootMappers.values().toArray(new MetadataFieldMapper[rootMappers.values().size()]),
|
||||
sourceTransforms.toArray(new SourceTransform[sourceTransforms.size()]),
|
||||
meta);
|
||||
this.documentParser = new DocumentParser(indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock()));
|
||||
|
||||
@ -369,15 +349,6 @@ public class DocumentMapper implements ToXContent {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform the source when it is expressed as a map. This is public so it can be transformed the source is loaded.
|
||||
* @param sourceAsMap source to transform. This may be mutated by the script.
|
||||
* @return transformed version of transformMe. This may actually be the same object as sourceAsMap
|
||||
*/
|
||||
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
|
||||
return DocumentParser.transformSourceAsMap(mapping, sourceAsMap);
|
||||
}
|
||||
|
||||
public boolean isParent(String type) {
|
||||
return mapperService.getParentTypes().contains(type);
|
||||
}
|
||||
@ -430,42 +401,4 @@ public class DocumentMapper implements ToXContent {
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return mapping.toXContent(builder, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Script based source transformation.
|
||||
*/
|
||||
private static class ScriptTransform implements SourceTransform {
|
||||
private final ScriptService scriptService;
|
||||
/**
|
||||
* The script to transform the source document before indexing.
|
||||
*/
|
||||
private final Script script;
|
||||
|
||||
public ScriptTransform(ScriptService scriptService, Script script) {
|
||||
this.scriptService = scriptService;
|
||||
this.script = script;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
|
||||
try {
|
||||
// We use the ctx variable and the _source name to be consistent with the update api.
|
||||
ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING, null);
|
||||
Map<String, Object> ctx = new HashMap<>(1);
|
||||
ctx.put("_source", sourceAsMap);
|
||||
executable.setNextVar("ctx", ctx);
|
||||
executable.run();
|
||||
ctx = (Map<String, Object>) executable.unwrap(ctx);
|
||||
return (Map<String, Object>) ctx.get("_source");
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException("failed to execute script", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return script.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,8 +42,6 @@ import org.elasticsearch.index.mapper.ip.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.object.RootObjectMapper;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
@ -58,7 +56,6 @@ public class DocumentMapperParser {
|
||||
final AnalysisService analysisService;
|
||||
private static final ESLogger logger = Loggers.getLogger(DocumentMapperParser.class);
|
||||
private final SimilarityService similarityService;
|
||||
private final ScriptService scriptService;
|
||||
|
||||
private final RootObjectMapper.TypeParser rootObjectTypeParser = new RootObjectMapper.TypeParser();
|
||||
|
||||
@ -71,13 +68,12 @@ public class DocumentMapperParser {
|
||||
private volatile SortedMap<String, Mapper.TypeParser> additionalRootMappers;
|
||||
|
||||
public DocumentMapperParser(IndexSettings indexSettings, MapperService mapperService, AnalysisService analysisService,
|
||||
SimilarityService similarityService, ScriptService scriptService) {
|
||||
SimilarityService similarityService) {
|
||||
this.indexSettings = indexSettings.getSettings();
|
||||
this.parseFieldMatcher = new ParseFieldMatcher(this.indexSettings);
|
||||
this.mapperService = mapperService;
|
||||
this.analysisService = analysisService;
|
||||
this.similarityService = similarityService;
|
||||
this.scriptService = scriptService;
|
||||
Map<String, Mapper.TypeParser> typeParsers = new HashMap<>();
|
||||
typeParsers.put(ByteFieldMapper.CONTENT_TYPE, new ByteFieldMapper.TypeParser());
|
||||
typeParsers.put(ShortFieldMapper.CONTENT_TYPE, new ShortFieldMapper.TypeParser());
|
||||
@ -213,29 +209,13 @@ public class DocumentMapperParser {
|
||||
String fieldName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object fieldNode = entry.getValue();
|
||||
|
||||
if ("transform".equals(fieldName)) {
|
||||
if (fieldNode instanceof Map) {
|
||||
parseTransform(docBuilder, (Map<String, Object>) fieldNode, parserContext.indexVersionCreated());
|
||||
} else if (fieldNode instanceof List) {
|
||||
for (Object transformItem: (List)fieldNode) {
|
||||
if (!(transformItem instanceof Map)) {
|
||||
throw new MapperParsingException("Elements of transform list must be objects but one was: " + fieldNode);
|
||||
}
|
||||
parseTransform(docBuilder, (Map<String, Object>) transformItem, parserContext.indexVersionCreated());
|
||||
}
|
||||
} else {
|
||||
throw new MapperParsingException("Transform must be an object or an array but was: " + fieldNode);
|
||||
}
|
||||
Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName);
|
||||
if (typeParser != null) {
|
||||
iterator.remove();
|
||||
} else {
|
||||
Mapper.TypeParser typeParser = rootTypeParsers.get(fieldName);
|
||||
if (typeParser != null) {
|
||||
iterator.remove();
|
||||
Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
|
||||
docBuilder.put((MetadataFieldMapper.Builder)typeParser.parse(fieldName, fieldNodeMap, parserContext));
|
||||
fieldNodeMap.remove("type");
|
||||
checkNoRemainingFields(fieldName, fieldNodeMap, parserContext.indexVersionCreated());
|
||||
}
|
||||
Map<String, Object> fieldNodeMap = (Map<String, Object>) fieldNode;
|
||||
docBuilder.put((MetadataFieldMapper.Builder) typeParser.parse(fieldName, fieldNodeMap, parserContext));
|
||||
fieldNodeMap.remove("type");
|
||||
checkNoRemainingFields(fieldName, fieldNodeMap, parserContext.indexVersionCreated());
|
||||
}
|
||||
}
|
||||
|
||||
@ -273,14 +253,6 @@ public class DocumentMapperParser {
|
||||
return remainingFields.toString();
|
||||
}
|
||||
|
||||
private void parseTransform(DocumentMapper.Builder docBuilder, Map<String, Object> transformConfig, Version indexVersionCreated) {
|
||||
Script script = Script.parse(transformConfig, true, parseFieldMatcher);
|
||||
if (script != null) {
|
||||
docBuilder.transform(scriptService, script);
|
||||
}
|
||||
checkNoRemainingFields(transformConfig, indexVersionCreated, "Transform config has unsupported parameters: ");
|
||||
}
|
||||
|
||||
private Tuple<String, Map<String, Object>> extractMapping(String type, String source) throws MapperParsingException {
|
||||
Map<String, Object> root;
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
|
@ -92,9 +92,6 @@ class DocumentParser implements Closeable {
|
||||
if (parser == null) {
|
||||
parser = XContentHelper.createParser(source.source());
|
||||
}
|
||||
if (mapping.sourceTransforms.length > 0) {
|
||||
parser = transform(mapping, parser);
|
||||
}
|
||||
context.reset(parser, new ParseContext.Document(), source);
|
||||
|
||||
// will result in START_OBJECT
|
||||
@ -764,29 +761,10 @@ class DocumentParser implements Closeable {
|
||||
return mapper;
|
||||
}
|
||||
|
||||
private static XContentParser transform(Mapping mapping, XContentParser parser) throws IOException {
|
||||
Map<String, Object> transformed;
|
||||
try (XContentParser autoCloses = parser) {
|
||||
transformed = transformSourceAsMap(mapping, parser.mapOrdered());
|
||||
}
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()).value(transformed);
|
||||
return parser.contentType().xContent().createParser(builder.bytes());
|
||||
}
|
||||
|
||||
private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper.Dynamic dynamic) {
|
||||
return dynamic == null ? ObjectMapper.Dynamic.TRUE : dynamic;
|
||||
}
|
||||
|
||||
static Map<String, Object> transformSourceAsMap(Mapping mapping, Map<String, Object> sourceAsMap) {
|
||||
if (mapping.sourceTransforms.length == 0) {
|
||||
return sourceAsMap;
|
||||
}
|
||||
for (Mapping.SourceTransform transform : mapping.sourceTransforms) {
|
||||
sourceAsMap = transform.transformSourceAsMap(sourceAsMap);
|
||||
}
|
||||
return sourceAsMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
cache.close();
|
||||
|
@ -107,16 +107,15 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||
|
||||
@Inject
|
||||
public MapperService(IndexSettings indexSettings, AnalysisService analysisService,
|
||||
SimilarityService similarityService,
|
||||
ScriptService scriptService) {
|
||||
SimilarityService similarityService) {
|
||||
super(indexSettings);
|
||||
this.analysisService = analysisService;
|
||||
this.fieldTypes = new FieldTypeLookup();
|
||||
this.documentParser = new DocumentMapperParser(indexSettings, this, analysisService, similarityService, scriptService);
|
||||
this.documentParser = new DocumentMapperParser(indexSettings, this, analysisService, similarityService);
|
||||
this.indexAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultIndexAnalyzer(), p -> p.indexAnalyzer());
|
||||
this.searchAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchAnalyzer(), p -> p.searchAnalyzer());
|
||||
this.searchQuoteAnalyzer = new MapperAnalyzerWrapper(analysisService.defaultSearchQuoteAnalyzer(), p -> p.searchQuoteAnalyzer());
|
||||
|
||||
|
||||
this.dynamic = this.indexSettings.getSettings().getAsBoolean("index.mapper.dynamic", true);
|
||||
defaultPercolatorMappingSource = "{\n" +
|
||||
"\"_default_\":{\n" +
|
||||
|
@ -43,26 +43,13 @@ public final class Mapping implements ToXContent {
|
||||
|
||||
public static final List<String> LEGACY_INCLUDE_IN_OBJECT = Arrays.asList("_all", "_id", "_parent", "_routing", "_timestamp", "_ttl");
|
||||
|
||||
/**
|
||||
* Transformations to be applied to the source before indexing and/or after loading.
|
||||
*/
|
||||
public interface SourceTransform extends ToXContent {
|
||||
/**
|
||||
* Transform the source when it is expressed as a map. This is public so it can be transformed the source is loaded.
|
||||
* @param sourceAsMap source to transform. This may be mutated by the script.
|
||||
* @return transformed version of transformMe. This may actually be the same object as sourceAsMap
|
||||
*/
|
||||
Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap);
|
||||
}
|
||||
|
||||
final Version indexCreated;
|
||||
final RootObjectMapper root;
|
||||
final MetadataFieldMapper[] metadataMappers;
|
||||
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> rootMappersMap;
|
||||
final SourceTransform[] sourceTransforms;
|
||||
volatile Map<String, Object> meta;
|
||||
|
||||
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, SourceTransform[] sourceTransforms, Map<String, Object> meta) {
|
||||
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, MetadataFieldMapper[] metadataMappers, Map<String, Object> meta) {
|
||||
this.indexCreated = indexCreated;
|
||||
this.root = rootObjectMapper;
|
||||
this.metadataMappers = metadataMappers;
|
||||
@ -81,7 +68,6 @@ public final class Mapping implements ToXContent {
|
||||
}
|
||||
});
|
||||
this.rootMappersMap = unmodifiableMap(rootMappersMap);
|
||||
this.sourceTransforms = sourceTransforms;
|
||||
this.meta = meta;
|
||||
}
|
||||
|
||||
@ -94,7 +80,7 @@ public final class Mapping implements ToXContent {
|
||||
* Generate a mapping update for the given root object mapper.
|
||||
*/
|
||||
public Mapping mappingUpdate(Mapper rootObjectMapper) {
|
||||
return new Mapping(indexCreated, (RootObjectMapper) rootObjectMapper, metadataMappers, sourceTransforms, meta);
|
||||
return new Mapping(indexCreated, (RootObjectMapper) rootObjectMapper, metadataMappers, meta);
|
||||
}
|
||||
|
||||
/** Get the root mapper with the given class. */
|
||||
@ -126,19 +112,6 @@ public final class Mapping implements ToXContent {
|
||||
root.toXContent(builder, params, new ToXContent() {
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (sourceTransforms.length > 0) {
|
||||
if (sourceTransforms.length == 1) {
|
||||
builder.field("transform");
|
||||
sourceTransforms[0].toXContent(builder, params);
|
||||
} else {
|
||||
builder.startArray("transform");
|
||||
for (SourceTransform transform: sourceTransforms) {
|
||||
transform.toXContent(builder, params);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
}
|
||||
|
||||
if (meta != null && !meta.isEmpty()) {
|
||||
builder.field("_meta", meta);
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ public interface ScriptContext {
|
||||
*/
|
||||
enum Standard implements ScriptContext {
|
||||
|
||||
AGGS("aggs"), MAPPING("mapping"), SEARCH("search"), UPDATE("update");
|
||||
AGGS("aggs"), SEARCH("search"), UPDATE("update");
|
||||
|
||||
private final String key;
|
||||
|
||||
|
@ -246,8 +246,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
// TODO: fix this through some API or something, thats wrong
|
||||
// special exception to prevent expressions from compiling as update or mapping scripts
|
||||
boolean expression = "expression".equals(script.getLang());
|
||||
boolean notSupported = scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey()) ||
|
||||
scriptContext.getKey().equals(ScriptContext.Standard.MAPPING.getKey());
|
||||
boolean notSupported = scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey());
|
||||
if (expression && notSupported) {
|
||||
throw new ScriptException("scripts of type [" + script.getType() + "]," +
|
||||
" operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported");
|
||||
|
@ -47,7 +47,6 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
public static final FetchSourceContext FETCH_SOURCE = new FetchSourceContext(true);
|
||||
public static final FetchSourceContext DO_NOT_FETCH_SOURCE = new FetchSourceContext(false);
|
||||
private boolean fetchSource;
|
||||
private boolean transformSource;
|
||||
private String[] includes;
|
||||
private String[] excludes;
|
||||
|
||||
@ -62,7 +61,7 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
}
|
||||
|
||||
public FetchSourceContext(boolean fetchSource) {
|
||||
this(fetchSource, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY, false);
|
||||
this(fetchSource, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
public FetchSourceContext(String include) {
|
||||
@ -72,23 +71,21 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
public FetchSourceContext(String include, String exclude) {
|
||||
this(true,
|
||||
include == null ? Strings.EMPTY_ARRAY : new String[]{include},
|
||||
exclude == null ? Strings.EMPTY_ARRAY : new String[]{exclude},
|
||||
false);
|
||||
exclude == null ? Strings.EMPTY_ARRAY : new String[]{exclude});
|
||||
}
|
||||
|
||||
public FetchSourceContext(String[] includes) {
|
||||
this(true, includes, Strings.EMPTY_ARRAY, false);
|
||||
this(true, includes, Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
public FetchSourceContext(String[] includes, String[] excludes) {
|
||||
this(true, includes, excludes, false);
|
||||
this(true, includes, excludes);
|
||||
}
|
||||
|
||||
public FetchSourceContext(boolean fetchSource, String[] includes, String[] excludes, boolean transform) {
|
||||
public FetchSourceContext(boolean fetchSource, String[] includes, String[] excludes) {
|
||||
this.fetchSource = fetchSource;
|
||||
this.includes = includes == null ? Strings.EMPTY_ARRAY : includes;
|
||||
this.excludes = excludes == null ? Strings.EMPTY_ARRAY : excludes;
|
||||
this.transformSource = transform;
|
||||
}
|
||||
|
||||
public boolean fetchSource() {
|
||||
@ -100,22 +97,6 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should the document be transformed after the source is loaded?
|
||||
*/
|
||||
public boolean transformSource() {
|
||||
return this.transformSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should the document be transformed after the source is loaded?
|
||||
* @return this for chaining
|
||||
*/
|
||||
public FetchSourceContext transformSource(boolean transformSource) {
|
||||
this.transformSource = transformSource;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String[] includes() {
|
||||
return this.includes;
|
||||
}
|
||||
@ -179,10 +160,8 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
source_excludes = Strings.splitStringByCommaToArray(sExcludes);
|
||||
}
|
||||
|
||||
boolean transform = request.paramAsBoolean("_source_transform", false);
|
||||
|
||||
if (fetchSource != null || source_includes != null || source_excludes != null || transform) {
|
||||
return new FetchSourceContext(fetchSource == null ? true : fetchSource, source_includes, source_excludes, transform);
|
||||
if (fetchSource != null || source_includes != null || source_excludes != null) {
|
||||
return new FetchSourceContext(fetchSource == null ? true : fetchSource, source_includes, source_excludes);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
@ -272,7 +251,7 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
fetchSource = in.readBoolean();
|
||||
includes = in.readStringArray();
|
||||
excludes = in.readStringArray();
|
||||
transformSource = in.readBoolean();
|
||||
in.readBoolean(); // Used to be transformSource but that was dropped in 2.1
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -280,7 +259,7 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
out.writeBoolean(fetchSource);
|
||||
out.writeStringArray(includes);
|
||||
out.writeStringArray(excludes);
|
||||
out.writeBoolean(transformSource);
|
||||
out.writeBoolean(false); // Used to be transformSource but that was dropped in 2.1
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1724,7 +1724,7 @@ public class InternalEngineTests extends ESTestCase {
|
||||
private Mapping dynamicUpdate() {
|
||||
BuilderContext context = new BuilderContext(Settings.EMPTY, new ContentPath());
|
||||
final RootObjectMapper root = MapperBuilders.rootObject("some_type").build(context);
|
||||
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], new Mapping.SourceTransform[0], emptyMap());
|
||||
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], emptyMap());
|
||||
}
|
||||
|
||||
public void testUpgradeOldIndex() throws IOException {
|
||||
@ -1922,11 +1922,10 @@ public class InternalEngineTests extends ESTestCase {
|
||||
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings, Collections.EMPTY_LIST);
|
||||
AnalysisService analysisService = new AnalysisService(indexSettings, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP, Collections.EMPTY_MAP);
|
||||
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.EMPTY_MAP);
|
||||
MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, null);
|
||||
MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService);
|
||||
DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService);
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexSettings, mapperService, analysisService, similarityService, null);
|
||||
DocumentMapperParser parser = new DocumentMapperParser(indexSettings, mapperService, analysisService, similarityService);
|
||||
this.docMapper = b.build(mapperService, parser);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -183,7 +183,7 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
||||
fetchSourceContext = new FetchSourceContext(randomAsciiOfLengthBetween(5, 20), randomAsciiOfLengthBetween(5, 20));
|
||||
break;
|
||||
case 3:
|
||||
fetchSourceContext = new FetchSourceContext(true, includes, excludes, randomBoolean());
|
||||
fetchSourceContext = new FetchSourceContext(true, includes, excludes);
|
||||
break;
|
||||
case 4:
|
||||
fetchSourceContext = new FetchSourceContext(includes);
|
||||
|
@ -158,6 +158,12 @@ Previously, there were three settings for the ping timeout: `discovery.zen.initi
|
||||
the only setting key for the ping timeout is now `discovery.zen.ping_timeout`. The default value for
|
||||
ping timeouts remains at three seconds.
|
||||
|
||||
=== Mapping changes ===
|
||||
|
||||
==== Transform removed
|
||||
|
||||
The `transform` feature from mappings has been removed. It made issues very hard to debug.
|
||||
|
||||
=== Plugins
|
||||
|
||||
Plugins implementing custom queries need to implement the `fromXContent(QueryParseContext)` method in their
|
||||
|
@ -504,32 +504,6 @@ public class MoreExpressionTests extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
// test to make sure expressions are not allowed to be used as mapping scripts
|
||||
public void testInvalidMappingScript() throws Exception{
|
||||
try {
|
||||
createIndex("test_index");
|
||||
ensureGreen("test_index");
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||
builder.startObject("transform");
|
||||
builder.field("script", "1.0");
|
||||
builder.field("lang", ExpressionScriptEngineService.NAME);
|
||||
builder.endObject();
|
||||
builder.startObject("properties");
|
||||
builder.startObject("double_field");
|
||||
builder.field("type", "double");
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
client().admin().indices().preparePutMapping("test_index").setType("trans_test").setSource(builder).get();
|
||||
client().prepareIndex("test_index", "trans_test", "1").setSource("double_field", 0.0).get();
|
||||
fail("Expression scripts should not be allowed to run as mapping scripts.");
|
||||
} catch (Exception e) {
|
||||
String message = ExceptionsHelper.detailedMessage(e);
|
||||
assertThat(message + " should have contained failed to parse", message.contains("failed to parse"), equalTo(true));
|
||||
assertThat(message + " should have contained not supported", message.contains("not supported"), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
||||
// test to make sure expressions are allowed to be used for reduce in pipeline aggregations
|
||||
public void testPipelineAggregationScript() throws Exception {
|
||||
createIndex("agg_index");
|
||||
|
@ -1,180 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.suggest.SuggestResponse;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.groovy.GroovyPlugin;
|
||||
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilders;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertExists;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion;
|
||||
import static org.hamcrest.Matchers.both;
|
||||
import static org.hamcrest.Matchers.hasEntry;
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
/**
|
||||
* Tests for transforming the source document before indexing.
|
||||
*/
|
||||
@SuppressCodecs("*") // requires custom completion format
|
||||
public class TransformOnIndexMapperTests extends ESIntegTestCase {
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return Collections.singleton(GroovyPlugin.class);
|
||||
}
|
||||
|
||||
public void testSearchOnTransformed() throws Exception {
|
||||
setup(true);
|
||||
|
||||
// Searching by the field created in the transport finds the entry
|
||||
SearchResponse response = client().prepareSearch("test").setQuery(termQuery("destination", "findme")).get();
|
||||
assertSearchHits(response, "righttitle");
|
||||
// The field built in the transform isn't in the source but source is,
|
||||
// even though we didn't index it!
|
||||
assertRightTitleSourceUntransformed(response.getHits().getAt(0).sourceAsMap());
|
||||
|
||||
// Can't find by a field removed from the document by the transform
|
||||
response = client().prepareSearch("test").setQuery(termQuery("content", "findme")).get();
|
||||
assertHitCount(response, 0);
|
||||
}
|
||||
|
||||
public void testGetTransformed() throws Exception {
|
||||
setup(getRandom().nextBoolean());
|
||||
GetResponse response = client().prepareGet("test", "test", "righttitle").get();
|
||||
assertExists(response);
|
||||
assertRightTitleSourceUntransformed(response.getSource());
|
||||
|
||||
response = client().prepareGet("test", "test", "righttitle").setTransformSource(true).get();
|
||||
assertExists(response);
|
||||
assertRightTitleSourceTransformed(response.getSource());
|
||||
}
|
||||
|
||||
// TODO: the completion suggester currently returns payloads with no reencoding so this test
|
||||
// exists to make sure that _source transformation and completion work well together. If we
|
||||
// ever fix the completion suggester to reencode the payloads then we can remove this test.
|
||||
public void testContextSuggestPayloadTransformed() throws Exception {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||
builder.startObject("properties");
|
||||
builder.startObject("suggest").field("type", "completion").field("payloads", true).endObject();
|
||||
builder.endObject();
|
||||
builder.startObject("transform");
|
||||
builder.field("script", "ctx._source.suggest = ['input': ctx._source.text];ctx._source.suggest.payload = ['display': ctx._source.text, 'display_detail': 'on the fly']");
|
||||
builder.field("lang", GroovyScriptEngineService.NAME);
|
||||
builder.endObject();
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("test", builder));
|
||||
// Payload is stored using original source format (json, smile, yaml, whatever)
|
||||
XContentType type = XContentType.values()[between(0, XContentType.values().length - 1)];
|
||||
XContentBuilder source = XContentFactory.contentBuilder(type);
|
||||
source.startObject().field("text", "findme").endObject();
|
||||
indexRandom(true, client().prepareIndex("test", "test", "findme").setSource(source));
|
||||
SuggestResponse response = client().prepareSuggest("test").addSuggestion(
|
||||
SuggestBuilders.completionSuggestion("test").field("suggest").text("findme")).get();
|
||||
assertSuggestion(response.getSuggest(), 0, 0, "test", "findme");
|
||||
CompletionSuggestion.Entry.Option option = (CompletionSuggestion.Entry.Option)response.getSuggest().getSuggestion("test").getEntries().get(0).getOptions().get(0);
|
||||
// And it comes back in exactly that way.
|
||||
XContentBuilder expected = XContentFactory.contentBuilder(type);
|
||||
expected.startObject().field("display", "findme").field("display_detail", "on the fly").endObject();
|
||||
assertEquals(expected.string(), option.getPayloadAsString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup an index with some source transforms. Randomly picks the number of
|
||||
* transforms but all but one of the transforms is a noop. The other is a
|
||||
* script that fills the 'destination' field with the 'content' field only
|
||||
* if the 'title' field starts with 't' and then always removes the
|
||||
* 'content' field regarless of the contents of 't'. The actual script
|
||||
* randomly uses parameters or not.
|
||||
*
|
||||
* @param forceRefresh
|
||||
* should the data be flushed to disk? Set to false to test real
|
||||
* time fetching
|
||||
*/
|
||||
private void setup(boolean forceRefresh) throws IOException, InterruptedException, ExecutionException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
|
||||
builder.field("transform");
|
||||
if (getRandom().nextBoolean()) {
|
||||
// Single transform
|
||||
builder.startObject();
|
||||
buildTransformScript(builder);
|
||||
builder.field("lang", randomFrom(null, GroovyScriptEngineService.NAME));
|
||||
builder.endObject();
|
||||
} else {
|
||||
// Multiple transforms
|
||||
int total = between(1, 10);
|
||||
int actual = between(0, total - 1);
|
||||
builder.startArray();
|
||||
for (int s = 0; s < total; s++) {
|
||||
builder.startObject();
|
||||
if (s == actual) {
|
||||
buildTransformScript(builder);
|
||||
} else {
|
||||
builder.field("script", "true");
|
||||
}
|
||||
builder.field("lang", randomFrom(null, GroovyScriptEngineService.NAME));
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
assertAcked(client().admin().indices().prepareCreate("test").addMapping("test", builder));
|
||||
|
||||
indexRandom(forceRefresh, client().prepareIndex("test", "test", "notitle").setSource("content", "findme"),
|
||||
client().prepareIndex("test", "test", "badtitle").setSource("content", "findme", "title", "cat"),
|
||||
client().prepareIndex("test", "test", "righttitle").setSource("content", "findme", "title", "table"));
|
||||
}
|
||||
|
||||
private void buildTransformScript(XContentBuilder builder) throws IOException {
|
||||
String script = "if (ctx._source['title']?.startsWith('t')) { ctx._source['destination'] = ctx._source[sourceField] }; ctx._source.remove(sourceField);";
|
||||
if (getRandom().nextBoolean()) {
|
||||
script = script.replace("sourceField", "'content'");
|
||||
} else {
|
||||
builder.field("params", singletonMap("sourceField", "content"));
|
||||
}
|
||||
builder.field("script", script);
|
||||
}
|
||||
|
||||
private void assertRightTitleSourceUntransformed(Map<String, Object> source) {
|
||||
assertThat(source, both(hasEntry("content", (Object) "findme")).and(not(hasKey("destination"))));
|
||||
}
|
||||
|
||||
private void assertRightTitleSourceTransformed(Map<String, Object> source) {
|
||||
assertThat(source, both(hasEntry("destination", (Object) "findme")).and(not(hasKey("content"))));
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user