ingest: Streamline option naming for several processors:

* `rename` processor, renamed `to` to `target_field`
* `date` processor, renamed `match_field` to `field` and renamed `match_formats` to `formats`
* `geoip` processor, renamed `source_field` to `field` and renamed `fields` to `properties`
* `attachment` processor, renamed `source_field` to `field` and renamed `fields` to `properties`

Closes #17835
This commit is contained in:
Martijn van Groningen 2016-04-20 18:00:11 +02:00
parent 9eb242a5fe
commit dd2184ab25
22 changed files with 233 additions and 235 deletions

View File

@ -42,28 +42,28 @@ public final class DateProcessor extends AbstractProcessor {
private final DateTimeZone timezone;
private final Locale locale;
private final String matchField;
private final String field;
private final String targetField;
private final List<String> matchFormats;
private final List<String> formats;
private final List<Function<String, DateTime>> dateParsers;
DateProcessor(String tag, DateTimeZone timezone, Locale locale, String matchField, List<String> matchFormats, String targetField) {
DateProcessor(String tag, DateTimeZone timezone, Locale locale, String field, List<String> formats, String targetField) {
super(tag);
this.timezone = timezone;
this.locale = locale;
this.matchField = matchField;
this.field = field;
this.targetField = targetField;
this.matchFormats = matchFormats;
this.formats = formats;
this.dateParsers = new ArrayList<>();
for (String matchFormat : matchFormats) {
DateFormat dateFormat = DateFormat.fromString(matchFormat);
dateParsers.add(dateFormat.getFunction(matchFormat, timezone, locale));
for (String format : formats) {
DateFormat dateFormat = DateFormat.fromString(format);
dateParsers.add(dateFormat.getFunction(format, timezone, locale));
}
}
@Override
public void execute(IngestDocument ingestDocument) {
String value = ingestDocument.getFieldValue(matchField, String.class);
String value = ingestDocument.getFieldValue(field, String.class);
DateTime dateTime = null;
Exception lastException = null;
@ -96,23 +96,23 @@ public final class DateProcessor extends AbstractProcessor {
return locale;
}
String getMatchField() {
return matchField;
String getField() {
return field;
}
String getTargetField() {
return targetField;
}
List<String> getMatchFormats() {
return matchFormats;
List<String> getFormats() {
return formats;
}
public static final class Factory extends AbstractProcessorFactory<DateProcessor> {
@SuppressWarnings("unchecked")
public DateProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "match_field");
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD);
String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone");
DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString);
@ -125,8 +125,8 @@ public final class DateProcessor extends AbstractProcessor {
throw new IllegalArgumentException("Invalid language tag specified: " + localeString);
}
}
List<String> matchFormats = ConfigurationUtils.readList(TYPE, processorTag, config, "match_formats");
return new DateProcessor(processorTag, timezone, locale, matchField, matchFormats, targetField);
List<String> formats = ConfigurationUtils.readList(TYPE, processorTag, config, "formats");
return new DateProcessor(processorTag, timezone, locale, field, formats, targetField);
}
}
}

View File

@ -33,39 +33,39 @@ public final class RenameProcessor extends AbstractProcessor {
public static final String TYPE = "rename";
private final String oldFieldName;
private final String newFieldName;
private final String field;
private final String targetField;
RenameProcessor(String tag, String oldFieldName, String newFieldName) {
RenameProcessor(String tag, String field, String targetField) {
super(tag);
this.oldFieldName = oldFieldName;
this.newFieldName = newFieldName;
this.field = field;
this.targetField = targetField;
}
String getOldFieldName() {
return oldFieldName;
String getField() {
return field;
}
String getNewFieldName() {
return newFieldName;
String getTargetField() {
return targetField;
}
@Override
public void execute(IngestDocument document) {
if (document.hasField(oldFieldName) == false) {
throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist");
if (document.hasField(field) == false) {
throw new IllegalArgumentException("field [" + field + "] doesn't exist");
}
if (document.hasField(newFieldName)) {
throw new IllegalArgumentException("field [" + newFieldName + "] already exists");
if (document.hasField(targetField)) {
throw new IllegalArgumentException("field [" + targetField + "] already exists");
}
Object oldValue = document.getFieldValue(oldFieldName, Object.class);
document.setFieldValue(newFieldName, oldValue);
Object oldValue = document.getFieldValue(field, Object.class);
document.setFieldValue(targetField, oldValue);
try {
document.removeField(oldFieldName);
document.removeField(field);
} catch (Exception e) {
//remove the new field if the removal of the old one failed
document.removeField(newFieldName);
document.removeField(targetField);
throw e;
}
}
@ -79,8 +79,8 @@ public final class RenameProcessor extends AbstractProcessor {
@Override
public RenameProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
String newField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "to");
return new RenameProcessor(processorTag, field, newField);
String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field");
return new RenameProcessor(processorTag, field, targetField);
}
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ingest.core.AbstractProcessorFactory;
import org.elasticsearch.ingest.core.Processor;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTimeZone;
@ -42,15 +41,15 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("field", sourceField);
config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
DateProcessor processor = factory.create(config);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getMatchField(), equalTo(sourceField));
assertThat(processor.getField(), equalTo(sourceField));
assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD));
assertThat(processor.getMatchFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy")));
assertThat(processor.getFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy")));
assertThat(processor.getLocale(), equalTo(Locale.ENGLISH));
assertThat(processor.getTimezone(), equalTo(DateTimeZone.UTC));
}
@ -60,13 +59,13 @@ public class DateProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
String targetField = randomAsciiOfLengthBetween(1, 10);
config.put("target_field", targetField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
try {
factory.create(config);
fail("processor creation should have failed");
} catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[match_field] required property is missing"));
assertThat(e.getMessage(), containsString("[field] required property is missing"));
}
}
@ -75,14 +74,14 @@ public class DateProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
String targetField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("field", sourceField);
config.put("target_field", targetField);
try {
factory.create(config);
fail("processor creation should have failed");
} catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[match_formats] required property is missing"));
assertThat(e.getMessage(), containsString("[formats] required property is missing"));
}
}
@ -90,8 +89,8 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("field", sourceField);
config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
Locale locale = randomLocale(random());
config.put("locale", locale.toLanguageTag());
@ -103,8 +102,8 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("field", sourceField);
config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("locale", "invalid_locale");
try {
factory.create(config);
@ -118,8 +117,8 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("field", sourceField);
config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
DateTimeZone timezone = randomTimezone();
config.put("timezone", timezone.getID());
@ -131,7 +130,7 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("timezone", "invalid_timezone");
try {
@ -154,25 +153,25 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
config.put("field", sourceField);
config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
DateProcessor processor = factory.create(config);
assertThat(processor.getMatchFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")));
assertThat(processor.getFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")));
}
public void testParseMatchFormatsFailure() throws Exception {
DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("match_formats", "dd/MM/yyyy");
config.put("field", sourceField);
config.put("formats", "dd/MM/yyyy");
try {
factory.create(config);
fail("processor creation should have failed");
} catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[match_formats] property isn't a list, but of type [java.lang.String]"));
assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]"));
}
}
@ -181,9 +180,9 @@ public class DateProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10);
String targetField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField);
config.put("field", sourceField);
config.put("target_field", targetField);
config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
DateProcessor processor = factory.create(config);
assertThat(processor.getTargetField(), equalTo(targetField));

View File

@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ingest.core.AbstractProcessorFactory;
import org.elasticsearch.ingest.core.Processor;
import org.elasticsearch.test.ESTestCase;
import java.util.HashMap;
@ -35,19 +34,19 @@ public class RenameProcessorFactoryTests extends ESTestCase {
RenameProcessor.Factory factory = new RenameProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("field", "old_field");
config.put("to", "new_field");
config.put("target_field", "new_field");
String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
RenameProcessor renameProcessor = factory.create(config);
assertThat(renameProcessor.getTag(), equalTo(processorTag));
assertThat(renameProcessor.getOldFieldName(), equalTo("old_field"));
assertThat(renameProcessor.getNewFieldName(), equalTo("new_field"));
assertThat(renameProcessor.getField(), equalTo("old_field"));
assertThat(renameProcessor.getTargetField(), equalTo("new_field"));
}
public void testCreateNoFieldPresent() throws Exception {
RenameProcessor.Factory factory = new RenameProcessor.Factory();
Map<String, Object> config = new HashMap<>();
config.put("to", "new_field");
config.put("target_field", "new_field");
try {
factory.create(config);
fail("factory create should have failed");
@ -64,7 +63,7 @@ public class RenameProcessorFactoryTests extends ESTestCase {
factory.create(config);
fail("factory create should have failed");
} catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[to] required property is missing"));
assertThat(e.getMessage(), equalTo("[target_field] required property is missing"));
}
}
}

View File

@ -16,10 +16,10 @@ representation. The processor will skip the base64 decoding then.
[options="header"]
|======
| Name | Required | Default | Description
| `source_field` | yes | - | The field to get the base64 encoded field from
| `field` | yes | - | The field to get the base64 encoded field from
| `target_field` | no | attachment | The field that will hold the attachment information
| `indexed_chars` | no | 100000 | The number of chars being used for extraction to prevent huge fields. Use `-1` for no limit.
| `fields` | no | all | Properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language`
| `properties` | no | all | Properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language`
|======
[source,js]
@ -29,7 +29,7 @@ representation. The processor will skip the base64 decoding then.
"processors" : [
{
"attachment" : {
"source_field" : "data"
"field" : "data"
}
}
]

View File

@ -16,19 +16,19 @@ is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too
[options="header"]
|======
| Name | Required | Default | Description
| `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup.
| `field` | yes | - | The field to get the ip address from for the geographical lookup.
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files.
| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
| `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|======
*Depends on what is available in `database_field`:
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`.
and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`.
* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`,
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which fields were configured in `fields`.
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties were configured in `properties`.
Here is an example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field:
@ -39,7 +39,7 @@ Here is an example that uses the default city database and adds the geographical
"processors" : [
{
"geoip" : {
"source_field" : "ip"
"field" : "ip"
}
}
]
@ -55,7 +55,7 @@ Here is an example that uses the default country database and adds the geographi
"processors" : [
{
"geoip" : {
"source_field" : "ip",
"field" : "ip",
"target_field" : "geo",
"database_file" : "GeoLite2-Country.mmdb"
}

View File

@ -527,7 +527,7 @@ Elasticsearch.
{
"rename" : {
"field" : "foo",
"to" : "bar",
"target_field" : "bar",
"on_failure" : [
{
"set" : {
@ -713,7 +713,7 @@ in the same order they were defined as part of the processor definition.
[options="header"]
|======
| Name | Required | Default | Description
| `match_field` | yes | - | The field to get the date from.
| `field` | yes | - | The field to get the date from.
| `target_field` | no | @timestamp | The field that will hold the parsed date.
| `match_formats` | yes | - | An array of the expected date formats. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N.
| `timezone` | no | UTC | The timezone to use when parsing the date.
@ -729,7 +729,7 @@ Here is an example that adds the parsed date to the `timestamp` field based on t
"processors" : [
{
"date" : {
"match_field" : "initial_date",
"field" : "initial_date",
"target_field" : "timestamp",
"match_formats" : ["dd/MM/yyyy hh:mm:ss"],
"timezone" : "Europe/Amsterdam"
@ -1152,9 +1152,9 @@ Renames an existing field. If the field doesn't exist or the new name is already
.Rename Options
[options="header"]
|======
| Name | Required | Default | Description
| `field` | yes | - | The field to be renamed
| `to` | yes | - | The new name of the field
| Name | Required | Default | Description
| `field` | yes | - | The field to be renamed
| `target_field` | yes | - | The new name of the field
|======
[source,js]
@ -1162,7 +1162,7 @@ Renames an existing field. If the field doesn't exist or the new name is already
{
"rename": {
"field": "foo",
"to": "foobar"
"target_field": "foobar"
}
}
--------------------------------------------------

View File

@ -48,17 +48,17 @@ public final class AttachmentProcessor extends AbstractProcessor {
private static final int NUMBER_OF_CHARS_INDEXED = 100000;
private final String sourceField;
private final String field;
private final String targetField;
private final Set<Field> fields;
private final Set<Property> properties;
private final int indexedChars;
AttachmentProcessor(String tag, String sourceField, String targetField, Set<Field> fields,
AttachmentProcessor(String tag, String field, String targetField, Set<Property> properties,
int indexedChars) throws IOException {
super(tag);
this.sourceField = sourceField;
this.field = field;
this.targetField = targetField;
this.fields = fields;
this.properties = properties;
this.indexedChars = indexedChars;
}
@ -68,62 +68,62 @@ public final class AttachmentProcessor extends AbstractProcessor {
try {
Metadata metadata = new Metadata();
byte[] input = ingestDocument.getFieldValueAsBytes(sourceField);
byte[] input = ingestDocument.getFieldValueAsBytes(field);
String parsedContent = TikaImpl.parse(input, metadata, indexedChars);
if (fields.contains(Field.CONTENT) && Strings.hasLength(parsedContent)) {
if (properties.contains(Property.CONTENT) && Strings.hasLength(parsedContent)) {
// somehow tika seems to append a newline at the end automatically, lets remove that again
additionalFields.put(Field.CONTENT.toLowerCase(), parsedContent.trim());
additionalFields.put(Property.CONTENT.toLowerCase(), parsedContent.trim());
}
if (fields.contains(Field.LANGUAGE) && Strings.hasLength(parsedContent)) {
if (properties.contains(Property.LANGUAGE) && Strings.hasLength(parsedContent)) {
LanguageIdentifier identifier = new LanguageIdentifier(parsedContent);
String language = identifier.getLanguage();
additionalFields.put(Field.LANGUAGE.toLowerCase(), language);
additionalFields.put(Property.LANGUAGE.toLowerCase(), language);
}
if (fields.contains(Field.DATE)) {
if (properties.contains(Property.DATE)) {
String createdDate = metadata.get(TikaCoreProperties.CREATED);
if (createdDate != null) {
additionalFields.put(Field.DATE.toLowerCase(), createdDate);
additionalFields.put(Property.DATE.toLowerCase(), createdDate);
}
}
if (fields.contains(Field.TITLE)) {
if (properties.contains(Property.TITLE)) {
String title = metadata.get(TikaCoreProperties.TITLE);
if (Strings.hasLength(title)) {
additionalFields.put(Field.TITLE.toLowerCase(), title);
additionalFields.put(Property.TITLE.toLowerCase(), title);
}
}
if (fields.contains(Field.AUTHOR)) {
if (properties.contains(Property.AUTHOR)) {
String author = metadata.get("Author");
if (Strings.hasLength(author)) {
additionalFields.put(Field.AUTHOR.toLowerCase(), author);
additionalFields.put(Property.AUTHOR.toLowerCase(), author);
}
}
if (fields.contains(Field.KEYWORDS)) {
if (properties.contains(Property.KEYWORDS)) {
String keywords = metadata.get("Keywords");
if (Strings.hasLength(keywords)) {
additionalFields.put(Field.KEYWORDS.toLowerCase(), keywords);
additionalFields.put(Property.KEYWORDS.toLowerCase(), keywords);
}
}
if (fields.contains(Field.CONTENT_TYPE)) {
if (properties.contains(Property.CONTENT_TYPE)) {
String contentType = metadata.get(Metadata.CONTENT_TYPE);
if (Strings.hasLength(contentType)) {
additionalFields.put(Field.CONTENT_TYPE.toLowerCase(), contentType);
additionalFields.put(Property.CONTENT_TYPE.toLowerCase(), contentType);
}
}
if (fields.contains(Field.CONTENT_LENGTH)) {
if (properties.contains(Property.CONTENT_LENGTH)) {
String contentLength = metadata.get(Metadata.CONTENT_LENGTH);
String length = Strings.hasLength(contentLength) ? contentLength : String.valueOf(parsedContent.length());
additionalFields.put(Field.CONTENT_LENGTH.toLowerCase(), length);
additionalFields.put(Property.CONTENT_LENGTH.toLowerCase(), length);
}
} catch (Throwable e) {
throw new ElasticsearchParseException("Error parsing document in field [{}]", e, sourceField);
throw new ElasticsearchParseException("Error parsing document in field [{}]", e, field);
}
ingestDocument.setFieldValue(targetField, additionalFields);
@ -134,16 +134,16 @@ public final class AttachmentProcessor extends AbstractProcessor {
return TYPE;
}
String getSourceField() {
return sourceField;
String getField() {
return field;
}
String getTargetField() {
return targetField;
}
Set<Field> getFields() {
return fields;
Set<Property> getProperties() {
return properties;
}
int getIndexedChars() {
@ -152,35 +152,35 @@ public final class AttachmentProcessor extends AbstractProcessor {
public static final class Factory extends AbstractProcessorFactory<AttachmentProcessor> {
static final Set<Field> DEFAULT_FIELDS = EnumSet.allOf(Field.class);
static final Set<Property> DEFAULT_PROPERTIES = EnumSet.allOf(Property.class);
@Override
public AttachmentProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String sourceField = readStringProperty(TYPE, processorTag, config, "source_field");
String field = readStringProperty(TYPE, processorTag, config, "field");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment");
List<String> fieldNames = readOptionalList(TYPE, processorTag, config, "fields");
List<String> properyNames = readOptionalList(TYPE, processorTag, config, "properties");
int indexedChars = readIntProperty(TYPE, processorTag, config, "indexed_chars", NUMBER_OF_CHARS_INDEXED);
final Set<Field> fields;
if (fieldNames != null) {
fields = EnumSet.noneOf(Field.class);
for (String fieldName : fieldNames) {
final Set<Property> properties;
if (properyNames != null) {
properties = EnumSet.noneOf(Property.class);
for (String fieldName : properyNames) {
try {
fields.add(Field.parse(fieldName));
properties.add(Property.parse(fieldName));
} catch (Exception e) {
throw newConfigurationException(TYPE, processorTag, "fields", "illegal field option [" +
fieldName + "]. valid values are " + Arrays.toString(Field.values()));
throw newConfigurationException(TYPE, processorTag, "properties", "illegal field option [" +
fieldName + "]. valid values are " + Arrays.toString(Property.values()));
}
}
} else {
fields = DEFAULT_FIELDS;
properties = DEFAULT_PROPERTIES;
}
return new AttachmentProcessor(processorTag, sourceField, targetField, fields, indexedChars);
return new AttachmentProcessor(processorTag, field, targetField, properties, indexedChars);
}
}
public enum Field {
enum Property {
CONTENT,
TITLE,
@ -191,7 +191,7 @@ public final class AttachmentProcessor extends AbstractProcessor {
CONTENT_LENGTH,
LANGUAGE;
public static Field parse(String value) {
public static Property parse(String value) {
return valueOf(value.toUpperCase(Locale.ROOT));
}

View File

@ -43,22 +43,22 @@ public class AttachmentProcessorFactoryTests extends ESTestCase {
public void testBuildDefaults() throws Exception {
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
AttachmentProcessor processor = factory.create(config);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("attachment"));
assertThat(processor.getFields(), sameInstance(AttachmentProcessor.Factory.DEFAULT_FIELDS));
assertThat(processor.getProperties(), sameInstance(AttachmentProcessor.Factory.DEFAULT_PROPERTIES));
}
public void testConfigureIndexedChars() throws Exception {
int indexedChars = randomIntBetween(1, 100000);
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
config.put("indexed_chars", indexedChars);
String processorTag = randomAsciiOfLength(10);
@ -70,53 +70,53 @@ public class AttachmentProcessorFactoryTests extends ESTestCase {
public void testBuildTargetField() throws Exception {
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
config.put("target_field", "_field");
AttachmentProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("_field"));
}
public void testBuildFields() throws Exception {
Set<AttachmentProcessor.Field> fields = EnumSet.noneOf(AttachmentProcessor.Field.class);
Set<AttachmentProcessor.Property> properties = EnumSet.noneOf(AttachmentProcessor.Property.class);
List<String> fieldNames = new ArrayList<>();
int numFields = scaledRandomIntBetween(1, AttachmentProcessor.Field.values().length);
int numFields = scaledRandomIntBetween(1, AttachmentProcessor.Property.values().length);
for (int i = 0; i < numFields; i++) {
AttachmentProcessor.Field field = AttachmentProcessor.Field.values()[i];
fields.add(field);
fieldNames.add(field.name().toLowerCase(Locale.ROOT));
AttachmentProcessor.Property property = AttachmentProcessor.Property.values()[i];
properties.add(property);
fieldNames.add(property.name().toLowerCase(Locale.ROOT));
}
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("fields", fieldNames);
config.put("field", "_field");
config.put("properties", fieldNames);
AttachmentProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getFields(), equalTo(fields));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getProperties(), equalTo(properties));
}
public void testBuildIllegalFieldOption() throws Exception {
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("fields", Collections.singletonList("invalid"));
config.put("field", "_field");
config.put("properties", Collections.singletonList("invalid"));
try {
factory.create(config);
fail("exception expected");
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[fields] illegal field option [invalid]"));
assertThat(e.getMessage(), containsString("[properties] illegal field option [invalid]"));
// ensure allowed fields are mentioned
for (AttachmentProcessor.Field field : AttachmentProcessor.Field.values()) {
assertThat(e.getMessage(), containsString(field.name()));
for (AttachmentProcessor.Property property : AttachmentProcessor.Property.values()) {
assertThat(e.getMessage(), containsString(property.name()));
}
}
config = new HashMap<>();
config.put("source_field", "_field");
config.put("fields", "invalid");
config.put("field", "_field");
config.put("properties", "invalid");
try {
factory.create(config);
fail("exception expected");
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[fields] property isn't a list, but of type [java.lang.String]"));
assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
}
}
}

View File

@ -51,7 +51,7 @@ public class AttachmentProcessorTests extends ESTestCase {
@Before
public void createStandardProcessor() throws IOException {
processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field",
"target_field", EnumSet.allOf(AttachmentProcessor.Field.class), 10000);
"target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000);
}
public void testEnglishTextDocument() throws Exception {
@ -66,25 +66,25 @@ public class AttachmentProcessorTests extends ESTestCase {
public void testHtmlDocumentWithRandomFields() throws Exception {
//date is not present in the html doc
ArrayList<AttachmentProcessor.Field> fieldsList = new ArrayList<>(EnumSet.complementOf(EnumSet.of
(AttachmentProcessor.Field.DATE)));
Set<AttachmentProcessor.Field> selectedFields = new HashSet<>();
ArrayList<AttachmentProcessor.Property> fieldsList = new ArrayList<>(EnumSet.complementOf(EnumSet.of
(AttachmentProcessor.Property.DATE)));
Set<AttachmentProcessor.Property> selectedProperties = new HashSet<>();
int numFields = randomIntBetween(1, fieldsList.size());
String[] selectedFieldNames = new String[numFields];
for (int i = 0; i < numFields; i++) {
AttachmentProcessor.Field field;
AttachmentProcessor.Property property;
do {
field = randomFrom(fieldsList);
} while (selectedFields.add(field) == false);
property = randomFrom(fieldsList);
} while (selectedProperties.add(property) == false);
selectedFieldNames[i] = field.toLowerCase();
selectedFieldNames[i] = property.toLowerCase();
}
if (randomBoolean()) {
selectedFields.add(AttachmentProcessor.Field.DATE);
selectedProperties.add(AttachmentProcessor.Property.DATE);
}
processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field",
"target_field", selectedFields, 10000);
"target_field", selectedProperties, 10000);
Map<String, Object> attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor);
assertThat(attachmentData.keySet(), hasSize(selectedFieldNames.length));

View File

@ -9,7 +9,7 @@
"processors": [
{
"attachment" : {
"source_field" : "field1"
"field" : "field1"
}
}
]
@ -51,8 +51,8 @@
"processors": [
{
"attachment" : {
"source_field" : "field1",
"fields" : ["language"]
"field" : "field1",
"properties" : ["language"]
}
}
]
@ -87,7 +87,7 @@
"processors": [
{
"attachment" : {
"source_field" : "field1",
"field" : "field1",
"indexed_chars": 30
}
}

View File

@ -9,7 +9,7 @@
"processors": [
{
"attachment" : {
"source_field" : "field1"
"field" : "field1"
}
}
]
@ -49,7 +49,7 @@
"processors": [
{
"attachment" : {
"source_field" : "field1"
"field" : "field1"
}
}
]

View File

@ -59,22 +59,22 @@ public final class GeoIpProcessor extends AbstractProcessor {
public static final String TYPE = "geoip";
private final String sourceField;
private final String field;
private final String targetField;
private final DatabaseReader dbReader;
private final Set<Field> fields;
private final Set<Property> properties;
GeoIpProcessor(String tag, String sourceField, DatabaseReader dbReader, String targetField, Set<Field> fields) throws IOException {
GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set<Property> properties) throws IOException {
super(tag);
this.sourceField = sourceField;
this.field = field;
this.targetField = targetField;
this.dbReader = dbReader;
this.fields = fields;
this.properties = properties;
}
@Override
public void execute(IngestDocument ingestDocument) {
String ip = ingestDocument.getFieldValue(sourceField, String.class);
String ip = ingestDocument.getFieldValue(field, String.class);
final InetAddress ipAddress = InetAddresses.forString(ip);
Map<String, Object> geoData;
@ -104,8 +104,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
return TYPE;
}
String getSourceField() {
return sourceField;
String getField() {
return field;
}
String getTargetField() {
@ -116,8 +116,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
return dbReader;
}
Set<Field> getFields() {
return fields;
Set<Property> getProperties() {
return properties;
}
private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) {
@ -142,8 +142,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
Subdivision subdivision = response.getMostSpecificSubdivision();
Map<String, Object> geoData = new HashMap<>();
for (Field field : fields) {
switch (field) {
for (Property property : this.properties) {
switch (property) {
case IP:
geoData.put("ip", NetworkAddress.format(ipAddress));
break;
@ -195,8 +195,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
Continent continent = response.getContinent();
Map<String, Object> geoData = new HashMap<>();
for (Field field : fields) {
switch (field) {
for (Property property : this.properties) {
switch (property) {
case IP:
geoData.put("ip", NetworkAddress.format(ipAddress));
break;
@ -216,8 +216,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
public static final class Factory extends AbstractProcessorFactory<GeoIpProcessor> implements Closeable {
static final Set<Field> DEFAULT_FIELDS = EnumSet.of(
Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION
static final Set<Property> DEFAULT_PROPERTIES = EnumSet.of(
Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
);
private final Map<String, DatabaseReader> databaseReaders;
@ -228,30 +228,30 @@ public final class GeoIpProcessor extends AbstractProcessor {
@Override
public GeoIpProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String ipField = readStringProperty(TYPE, processorTag, config, "source_field");
String ipField = readStringProperty(TYPE, processorTag, config, "field");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip");
String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb");
List<String> fieldNames = readOptionalList(TYPE, processorTag, config, "fields");
List<String> propertyNames = readOptionalList(TYPE, processorTag, config, "properties");
final Set<Field> fields;
if (fieldNames != null) {
fields = EnumSet.noneOf(Field.class);
for (String fieldName : fieldNames) {
final Set<Property> properties;
if (propertyNames != null) {
properties = EnumSet.noneOf(Property.class);
for (String fieldName : propertyNames) {
try {
fields.add(Field.parse(fieldName));
properties.add(Property.parse(fieldName));
} catch (Exception e) {
throw newConfigurationException(TYPE, processorTag, "fields", "illegal field option [" + fieldName + "]. valid values are [" + Arrays.toString(Field.values()) + "]");
throw newConfigurationException(TYPE, processorTag, "properties", "illegal field option [" + fieldName + "]. valid values are [" + Arrays.toString(Property.values()) + "]");
}
}
} else {
fields = DEFAULT_FIELDS;
properties = DEFAULT_PROPERTIES;
}
DatabaseReader databaseReader = databaseReaders.get(databaseFile);
if (databaseReader == null) {
throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist");
}
return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, fields);
return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties);
}
@Override
@ -270,7 +270,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
}
}
public enum Field {
enum Property {
IP,
COUNTRY_ISO_CODE,
@ -283,7 +283,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
LONGITUDE,
LOCATION;
public static Field parse(String value) {
public static Property parse(String value) {
return valueOf(value.toUpperCase(Locale.ROOT));
}
}

View File

@ -69,36 +69,36 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
GeoIpProcessor processor = factory.create(config);
assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City"));
assertThat(processor.getFields(), sameInstance(GeoIpProcessor.Factory.DEFAULT_FIELDS));
assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_PROPERTIES));
}
public void testBuildTargetField() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
config.put("target_field", "_field");
GeoIpProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("_field"));
}
public void testBuildDbFile() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
config.put("database_file", "GeoLite2-Country.mmdb");
GeoIpProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country"));
}
@ -107,7 +107,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("field", "_field");
config.put("database_file", "does-not-exist.mmdb");
try {
factory.create(config);
@ -120,43 +120,43 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
public void testBuildFields() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Set<GeoIpProcessor.Field> fields = EnumSet.noneOf(GeoIpProcessor.Field.class);
Set<GeoIpProcessor.Property> properties = EnumSet.noneOf(GeoIpProcessor.Property.class);
List<String> fieldNames = new ArrayList<>();
int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Field.values().length);
int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Property.values().length);
for (int i = 0; i < numFields; i++) {
GeoIpProcessor.Field field = GeoIpProcessor.Field.values()[i];
fields.add(field);
fieldNames.add(field.name().toLowerCase(Locale.ROOT));
GeoIpProcessor.Property property = GeoIpProcessor.Property.values()[i];
properties.add(property);
fieldNames.add(property.name().toLowerCase(Locale.ROOT));
}
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("fields", fieldNames);
config.put("field", "_field");
config.put("properties", fieldNames);
GeoIpProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field"));
assertThat(processor.getFields(), equalTo(fields));
assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getProperties(), equalTo(properties));
}
public void testBuildIllegalFieldOption() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field");
config.put("fields", Collections.singletonList("invalid"));
config.put("field", "_field");
config.put("properties", Collections.singletonList("invalid"));
try {
factory.create(config);
fail("exception expected");
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[fields] illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]"));
assertThat(e.getMessage(), equalTo("[properties] illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]"));
}
config = new HashMap<>();
config.put("source_field", "_field");
config.put("fields", "invalid");
config.put("field", "_field");
config.put("properties", "invalid");
try {
factory.create(config);
fail("exception expected");
} catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[fields] property isn't a list, but of type [java.lang.String]"));
assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
}
}
}

View File

@ -36,7 +36,7 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testCity() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class));
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "82.170.213.79");
@ -62,7 +62,7 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testCountry() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-Country.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class));
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "82.170.213.79");
@ -81,7 +81,7 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testAddressIsNotInTheDatabase() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class));
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "202.45.11.11");
@ -95,7 +95,7 @@ public class GeoIpProcessorTests extends ESTestCase {
/** Don't silently do DNS lookups or anything trappy on bogus data */
public void testInvalid() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class));
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>();
document.put("source_field", "www.google.com");

View File

@ -9,7 +9,7 @@
"processors": [
{
"geoip" : {
"source_field" : "field1"
"field" : "field1"
}
}
]
@ -53,8 +53,8 @@
"processors": [
{
"geoip" : {
"source_field" : "field1",
"fields" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"]
"field" : "field1",
"properties" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"]
}
}
]
@ -97,7 +97,7 @@
"processors": [
{
"geoip" : {
"source_field" : "field1",
"field" : "field1",
"database_file" : "GeoLite2-Country.mmdb"
}
}

View File

@ -292,7 +292,7 @@
"rename" : {
"tag" : "rename-status",
"field" : "status",
"to" : "bar",
"target_field" : "bar",
"on_failure" : [
{
"set" : {

View File

@ -26,14 +26,14 @@
},
{
"date" : {
"match_field" : "timestamp",
"field" : "timestamp",
"target_field" : "timestamp",
"match_formats" : ["dd/MMM/YYYY:HH:mm:ss Z"]
"formats" : ["dd/MMM/YYYY:HH:mm:ss Z"]
}
},
{
"geoip" : {
"source_field" : "clientip"
"field" : "clientip"
}
}
]
@ -128,7 +128,7 @@
{
"rename" : {
"field" : "eyeColor",
"to" : "eye_color"
"target_field" : "eye_color"
}
}
]

View File

@ -9,9 +9,9 @@
"processors": [
{
"date" : {
"match_field" : "date_source_field",
"field" : "date_source_field",
"target_field" : "date_target_field",
"match_formats" : ["dd/MM/yyyy"],
"formats" : ["dd/MM/yyyy"],
"timezone" : "Europe/Amsterdam"
}
}

View File

@ -22,7 +22,7 @@
{
"rename" : {
"field" : "field_to_rename",
"to": "renamed_field"
"target_field": "renamed_field"
}
},
{

View File

@ -143,7 +143,7 @@
{
"rename" : {
"field" : "does_not_exist",
"to" : "field2",
"target_field" : "field2",
"on_failure" : [
{
"set" : {
@ -425,7 +425,7 @@
"rename" : {
"tag" : "rename-1",
"field" : "foofield",
"to" : "field1",
"target_field" : "field1",
"on_failure" : [
{
"set" : {
@ -437,7 +437,7 @@
{
"rename" : {
"field" : "foofield2",
"to" : "field1",
"target_field" : "field1",
"on_failure" : [
{
"set" : {

View File

@ -15,9 +15,9 @@
},
{
"date" : {
"match_field" : "date",
"field" : "date",
"target_field" : "date",
"match_formats" : ["yyyy"]
"formats" : ["yyyy"]
}
}
],
@ -61,7 +61,7 @@
{
"rename" : {
"field" : "foofield",
"to" : "field1",
"target_field" : "field1",
"on_failure" : [
{
"set" : {
@ -72,7 +72,7 @@
{
"rename" : {
"field" : "foofield2",
"to" : "field1",
"target_field" : "field1",
"on_failure" : [
{
"set" : {