ML: Adding XContentObjectTransformer class (#35957)

* ML: Adding XContentObjectTransformer class

* adding license headers

* Adding custom deprecation handler, and test for checking parsing failures

* forwarding deprection logs to LoggingDeprecationHandler
This commit is contained in:
Benjamin Trent 2018-11-28 11:43:57 -06:00 committed by GitHub
parent a72430e1ef
commit 513e1ed095
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 247 additions and 0 deletions

View File

@ -0,0 +1,51 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.utils;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Very similar to {@link org.elasticsearch.common.xcontent.LoggingDeprecationHandler} main differences are:
* 1. Is not a Singleton
* 2. Accumulates all deprecation warnings into a list that can be retrieved
* with {@link LoggingDeprecationAccumulationHandler#getDeprecations()}
*
* NOTE: The accumulation is NOT THREAD SAFE
*/
public class LoggingDeprecationAccumulationHandler implements DeprecationHandler {
private final List<String> deprecations = new ArrayList<>();
@Override
public void usedDeprecatedName(String usedName, String modernName) {
LoggingDeprecationHandler.INSTANCE.usedDeprecatedName(usedName, modernName);
deprecations.add(LoggerMessageFormat.format("Deprecated field [{}] used, expected [{}] instead",
usedName,
modernName));
}
@Override
public void usedDeprecatedField(String usedName, String replacedWith) {
LoggingDeprecationHandler.INSTANCE.usedDeprecatedField(usedName, replacedWith);
deprecations.add(LoggerMessageFormat.format("Deprecated field [{}] used, replaced by [{}]",
usedName,
replacedWith));
}
/**
* The collected deprecation warnings
*/
public List<String> getDeprecations() {
return Collections.unmodifiableList(deprecations);
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.utils;
import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
/**
* This is a utility class that allows simple one-to-one transformations between an ToXContentObject type
* to and from other supported objects.
*
* @param <T> The type of the object that we will be transforming to/from
*/
public class XContentObjectTransformer<T extends ToXContentObject> {
private final NamedXContentRegistry registry;
private final CheckedFunction<XContentParser, T, IOException> parserFunction;
// We need this registry for parsing out Aggregations and Searches
private static NamedXContentRegistry searchRegistry;
static {
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
searchRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
}
public static XContentObjectTransformer<AggregatorFactories.Builder> aggregatorTransformer() {
return new XContentObjectTransformer<>(searchRegistry, (p) -> {
// Serializing a map creates an object, need to skip the start object for the aggregation parser
assert(XContentParser.Token.START_OBJECT.equals(p.nextToken()));
return AggregatorFactories.parseAggregators(p);
});
}
public static XContentObjectTransformer<QueryBuilder> queryBuilderTransformer() {
return new XContentObjectTransformer<>(searchRegistry, AbstractQueryBuilder::parseInnerQueryBuilder);
}
XContentObjectTransformer(NamedXContentRegistry registry, CheckedFunction<XContentParser, T, IOException> parserFunction) {
this.parserFunction = parserFunction;
this.registry = registry;
}
public T fromMap(Map<String, Object> stringObjectMap) throws IOException {
LoggingDeprecationAccumulationHandler deprecationLogger = new LoggingDeprecationAccumulationHandler();
try(XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(stringObjectMap);
XContentParser parser = XContentType.JSON
.xContent()
.createParser(registry,
deprecationLogger,
BytesReference.bytes(xContentBuilder).streamInput())) {
//TODO do something with the accumulated deprecation warnings
return parserFunction.apply(parser);
}
}
public Map<String, Object> toMap(T object) throws IOException {
try(XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = object.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
return XContentHelper.convertToMap(BytesReference.bytes(content), true, XContentType.JSON).v2();
}
}
}

View File

@ -0,0 +1,113 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.utils;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class XContentObjectTransformerTests extends ESTestCase {
public void testFromMap() throws IOException {
Map<String, Object> aggMap = Collections.singletonMap("fieldName",
Collections.singletonMap("max",
Collections.singletonMap("field", "fieldName")));
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer = XContentObjectTransformer.aggregatorTransformer();
assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggMap);
assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggTransformer.toMap(aggTransformer.fromMap(aggMap)));
Map<String, Object> queryMap = Collections.singletonMap("match",
Collections.singletonMap("fieldName", new HashMap<String, Object>(){{
// Add all the default fields so they are not added dynamically when the object is parsed
put("query","fieldValue");
put("operator","OR");
put("prefix_length",0);
put("max_expansions",50);
put("fuzzy_transpositions",true);
put("lenient",false);
put("zero_terms_query","NONE");
put("auto_generate_synonyms_phrase_query",true);
put("boost",1.0);
}}));
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer();
assertXContentAreEqual(queryBuilderTransformer.fromMap(queryMap), queryMap);
assertXContentAreEqual(queryBuilderTransformer.fromMap(queryMap),
queryBuilderTransformer.toMap(queryBuilderTransformer.fromMap(queryMap)));
}
public void testFromMapWithBadMaps() {
Map<String, Object> queryMap = Collections.singletonMap("match",
Collections.singletonMap("airline", new HashMap<String, Object>() {{
put("query", "notSupported");
put("type", "phrase"); //phrase stopped being supported for match in 6.x
}}));
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer();
ParsingException exception = expectThrows(ParsingException.class,
() -> queryBuilderTransformer.fromMap(queryMap));
assertThat(exception.getMessage(), equalTo("[match] query does not support [type]"));
Map<String, Object> aggMap = Collections.singletonMap("badTerms",
Collections.singletonMap("terms", new HashMap<String, Object>() {{
put("size", 0); //size being 0 in terms agg stopped being supported in 6.x
put("field", "myField");
}}));
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer = XContentObjectTransformer.aggregatorTransformer();
XContentParseException xContentParseException = expectThrows(XContentParseException.class, () -> aggTransformer.fromMap(aggMap));
assertThat(xContentParseException.getMessage(), containsString("[terms] failed to parse field [size]"));
}
public void testToMap() throws IOException {
XContentObjectTransformer<AggregatorFactories.Builder> aggTransformer = XContentObjectTransformer.aggregatorTransformer();
XContentObjectTransformer<QueryBuilder> queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer();
AggregatorFactories.Builder aggs = new AggregatorFactories.Builder();
long aggHistogramInterval = randomNonNegativeLong();
MaxAggregationBuilder maxTime = AggregationBuilders.max("time").field("time");
aggs.addAggregator(AggregationBuilders.dateHistogram("buckets")
.interval(aggHistogramInterval).subAggregation(maxTime).field("time"));
assertXContentAreEqual(aggs, aggTransformer.toMap(aggs));
assertXContentAreEqual(aggTransformer.fromMap(aggTransformer.toMap(aggs)), aggTransformer.toMap(aggs));
QueryBuilder queryBuilder = QueryBuilders.termQuery(randomAlphaOfLength(10), randomAlphaOfLength(10));
assertXContentAreEqual(queryBuilder, queryBuilderTransformer.toMap(queryBuilder));
assertXContentAreEqual(queryBuilderTransformer.fromMap(queryBuilderTransformer.toMap(queryBuilder)),
queryBuilderTransformer.toMap(queryBuilder));
}
private void assertXContentAreEqual(ToXContentObject object, Map<String, Object> map) throws IOException {
XContentType xContentType = XContentType.JSON;
BytesReference objectReference = XContentHelper.toXContent(object, xContentType, EMPTY_PARAMS, false);
BytesReference mapReference = BytesReference.bytes(XContentFactory.jsonBuilder().map(map));
assertToXContentEquivalent(objectReference, mapReference, xContentType);
}
}