store the doc type in the internal index (#39824)

store the doc type in the internal data frame index
This commit is contained in:
Hendrik Muhs 2019-03-08 12:15:09 +01:00
parent 5d68143b18
commit 50d742320d
4 changed files with 33 additions and 11 deletions

View File

@ -23,6 +23,7 @@ public final class DataFrameField {
public static final ParseField TIMEOUT = new ParseField("timeout");
public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion");
public static final ParseField STATS_FIELD = new ParseField("stats");
public static final ParseField INDEX_DOC_TYPE = new ParseField("doc_type");
// common strings
public static final String TASK_NAME = "data_frame/transforms";

View File

@ -36,7 +36,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optiona
*/
public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransformConfig> implements Writeable, ToXContentObject {
private static final String NAME = "data_frame_transform";
private static final String NAME = "data_frame_transform_config";
public static final ParseField HEADERS = new ParseField("headers");
public static final ParseField SOURCE = new ParseField("source");
public static final ParseField DESTINATION = new ParseField("dest");
@ -75,24 +75,26 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
String source = (String) args[1];
String dest = (String) args[2];
// ignored, only for internal storage: String docType = (String) args[3];
// on strict parsing do not allow injection of headers
if (lenient == false && args[3] != null) {
if (lenient == false && args[4] != null) {
throw new IllegalArgumentException("Found [headers], not allowed for strict parsing");
}
@SuppressWarnings("unchecked")
Map<String, String> headers = (Map<String, String>) args[3];
Map<String, String> headers = (Map<String, String>) args[4];
// default handling: if the user does not specify a query, we default to match_all
QueryConfig queryConfig = null;
if (args[4] == null) {
if (args[5] == null) {
queryConfig = new QueryConfig(Collections.singletonMap(MatchAllQueryBuilder.NAME, Collections.emptyMap()),
new MatchAllQueryBuilder());
} else {
queryConfig = (QueryConfig) args[4];
queryConfig = (QueryConfig) args[5];
}
PivotConfig pivotConfig = (PivotConfig) args[5];
PivotConfig pivotConfig = (PivotConfig) args[6];
return new DataFrameTransformConfig(id, source, dest, headers, queryConfig, pivotConfig);
});
@ -100,6 +102,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
parser.declareString(constructorArg(), SOURCE);
parser.declareString(constructorArg(), DESTINATION);
parser.declareString(optionalConstructorArg(), DataFrameField.INDEX_DOC_TYPE);
parser.declareObject(optionalConstructorArg(), (p, c) -> p.mapStrings(), HEADERS);
parser.declareObject(optionalConstructorArg(), (p, c) -> QueryConfig.fromXContent(p, lenient), QUERY);
parser.declareObject(optionalConstructorArg(), (p, c) -> PivotConfig.fromXContent(p, lenient), PIVOT_TRANSFORM);
@ -108,7 +111,7 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
}
public static String documentId(String transformId) {
return "data_frame-" + transformId;
return NAME + "-" + transformId;
}
public DataFrameTransformConfig(final String id,
@ -206,6 +209,9 @@ public class DataFrameTransformConfig extends AbstractDiffable<DataFrameTransfor
if (pivotConfig != null) {
builder.field(PIVOT_TRANSFORM.getPreferredName(), pivotConfig);
}
if (params.paramAsBoolean(DataFrameField.FOR_INTERNAL_STORAGE, false)) {
builder.field(DataFrameField.INDEX_DOC_TYPE.getPreferredName(), NAME);
}
if (headers.isEmpty() == false && params.paramAsBoolean(DataFrameField.FOR_INTERNAL_STORAGE, false) == true) {
builder.field(HEADERS.getPreferredName(), headers);
}

View File

@ -152,6 +152,24 @@ public class DataFrameTransformConfigTests extends AbstractSerializingDataFrameT
() -> createDataFrameTransformConfigFromString(pivotTransform, "test_header_injection"));
}
public void testXContentForInternalStorage() throws IOException {
DataFrameTransformConfig dataFrameTransformConfig = randomDataFrameTransformConfig();
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = dataFrameTransformConfig.toXContent(xContentBuilder, getToXContentParams());
String doc = Strings.toString(content);
assertThat(doc, matchesPattern(".*\"doc_type\"\\s*:\\s*\"data_frame_transform_config\".*"));
}
try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()) {
XContentBuilder content = dataFrameTransformConfig.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
String doc = Strings.toString(content);
assertFalse(doc.contains("doc_type"));
}
}
public void testSetIdInBody() throws IOException {
String pivotTransform = "{"
+ " \"id\" : \"body_id\","

View File

@ -38,9 +38,6 @@ public final class DataFrameInternalIndex {
public static final String DOUBLE = "double";
public static final String KEYWORD = "keyword";
// internal document types, e.g. "transform_config"
public static final String DOC_TYPE = "doc_type";
public static IndexTemplateMetaData getIndexTemplateMetaData() throws IOException {
IndexTemplateMetaData dataFrameTemplate = IndexTemplateMetaData.builder(INDEX_TEMPLATE_NAME)
.patterns(Collections.singletonList(INDEX_TEMPLATE_NAME))
@ -69,7 +66,7 @@ public final class DataFrameInternalIndex {
// the schema definitions
builder.startObject(PROPERTIES);
// overall doc type
builder.startObject(DOC_TYPE).field(TYPE, KEYWORD).endObject();
builder.startObject(DataFrameField.INDEX_DOC_TYPE.getPreferredName()).field(TYPE, KEYWORD).endObject();
// add the schema for transform configurations
addDataFrameTransformsConfigMappings(builder);