ingest: Streamline option naming for several processors:

* `rename` processor, renamed `to` to `target_field`
* `date` processor, renamed `match_field` to `field` and renamed `match_formats` to `formats`
* `geoip` processor, renamed `source_field` to `field` and renamed `fields` to `properties`
* `attachment` processor, renamed `source_field` to `field` and renamed `fields` to `properties`

Closes #17835
This commit is contained in:
Martijn van Groningen 2016-04-20 18:00:11 +02:00
parent 9eb242a5fe
commit dd2184ab25
22 changed files with 233 additions and 235 deletions

View File

@ -42,28 +42,28 @@ public final class DateProcessor extends AbstractProcessor {
private final DateTimeZone timezone; private final DateTimeZone timezone;
private final Locale locale; private final Locale locale;
private final String matchField; private final String field;
private final String targetField; private final String targetField;
private final List<String> matchFormats; private final List<String> formats;
private final List<Function<String, DateTime>> dateParsers; private final List<Function<String, DateTime>> dateParsers;
DateProcessor(String tag, DateTimeZone timezone, Locale locale, String matchField, List<String> matchFormats, String targetField) { DateProcessor(String tag, DateTimeZone timezone, Locale locale, String field, List<String> formats, String targetField) {
super(tag); super(tag);
this.timezone = timezone; this.timezone = timezone;
this.locale = locale; this.locale = locale;
this.matchField = matchField; this.field = field;
this.targetField = targetField; this.targetField = targetField;
this.matchFormats = matchFormats; this.formats = formats;
this.dateParsers = new ArrayList<>(); this.dateParsers = new ArrayList<>();
for (String matchFormat : matchFormats) { for (String format : formats) {
DateFormat dateFormat = DateFormat.fromString(matchFormat); DateFormat dateFormat = DateFormat.fromString(format);
dateParsers.add(dateFormat.getFunction(matchFormat, timezone, locale)); dateParsers.add(dateFormat.getFunction(format, timezone, locale));
} }
} }
@Override @Override
public void execute(IngestDocument ingestDocument) { public void execute(IngestDocument ingestDocument) {
String value = ingestDocument.getFieldValue(matchField, String.class); String value = ingestDocument.getFieldValue(field, String.class);
DateTime dateTime = null; DateTime dateTime = null;
Exception lastException = null; Exception lastException = null;
@ -96,23 +96,23 @@ public final class DateProcessor extends AbstractProcessor {
return locale; return locale;
} }
String getMatchField() { String getField() {
return matchField; return field;
} }
String getTargetField() { String getTargetField() {
return targetField; return targetField;
} }
List<String> getMatchFormats() { List<String> getFormats() {
return matchFormats; return formats;
} }
public static final class Factory extends AbstractProcessorFactory<DateProcessor> { public static final class Factory extends AbstractProcessorFactory<DateProcessor> {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public DateProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception { public DateProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "match_field"); String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD);
String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone"); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone");
DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString);
@ -125,8 +125,8 @@ public final class DateProcessor extends AbstractProcessor {
throw new IllegalArgumentException("Invalid language tag specified: " + localeString); throw new IllegalArgumentException("Invalid language tag specified: " + localeString);
} }
} }
List<String> matchFormats = ConfigurationUtils.readList(TYPE, processorTag, config, "match_formats"); List<String> formats = ConfigurationUtils.readList(TYPE, processorTag, config, "formats");
return new DateProcessor(processorTag, timezone, locale, matchField, matchFormats, targetField); return new DateProcessor(processorTag, timezone, locale, field, formats, targetField);
} }
} }
} }

View File

@ -33,39 +33,39 @@ public final class RenameProcessor extends AbstractProcessor {
public static final String TYPE = "rename"; public static final String TYPE = "rename";
private final String oldFieldName; private final String field;
private final String newFieldName; private final String targetField;
RenameProcessor(String tag, String oldFieldName, String newFieldName) { RenameProcessor(String tag, String field, String targetField) {
super(tag); super(tag);
this.oldFieldName = oldFieldName; this.field = field;
this.newFieldName = newFieldName; this.targetField = targetField;
} }
String getOldFieldName() { String getField() {
return oldFieldName; return field;
} }
String getNewFieldName() { String getTargetField() {
return newFieldName; return targetField;
} }
@Override @Override
public void execute(IngestDocument document) { public void execute(IngestDocument document) {
if (document.hasField(oldFieldName) == false) { if (document.hasField(field) == false) {
throw new IllegalArgumentException("field [" + oldFieldName + "] doesn't exist"); throw new IllegalArgumentException("field [" + field + "] doesn't exist");
} }
if (document.hasField(newFieldName)) { if (document.hasField(targetField)) {
throw new IllegalArgumentException("field [" + newFieldName + "] already exists"); throw new IllegalArgumentException("field [" + targetField + "] already exists");
} }
Object oldValue = document.getFieldValue(oldFieldName, Object.class); Object oldValue = document.getFieldValue(field, Object.class);
document.setFieldValue(newFieldName, oldValue); document.setFieldValue(targetField, oldValue);
try { try {
document.removeField(oldFieldName); document.removeField(field);
} catch (Exception e) { } catch (Exception e) {
//remove the new field if the removal of the old one failed //remove the new field if the removal of the old one failed
document.removeField(newFieldName); document.removeField(targetField);
throw e; throw e;
} }
} }
@ -79,8 +79,8 @@ public final class RenameProcessor extends AbstractProcessor {
@Override @Override
public RenameProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception { public RenameProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field");
String newField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "to"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field");
return new RenameProcessor(processorTag, field, newField); return new RenameProcessor(processorTag, field, targetField);
} }
} }
} }

View File

@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.AbstractProcessorFactory;
import org.elasticsearch.ingest.core.Processor;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -42,15 +41,15 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
String processorTag = randomAsciiOfLength(10); String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag); config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
DateProcessor processor = factory.create(config); DateProcessor processor = factory.create(config);
assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getMatchField(), equalTo(sourceField)); assertThat(processor.getField(), equalTo(sourceField));
assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD)); assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD));
assertThat(processor.getMatchFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy"))); assertThat(processor.getFormats(), equalTo(Collections.singletonList("dd/MM/yyyyy")));
assertThat(processor.getLocale(), equalTo(Locale.ENGLISH)); assertThat(processor.getLocale(), equalTo(Locale.ENGLISH));
assertThat(processor.getTimezone(), equalTo(DateTimeZone.UTC)); assertThat(processor.getTimezone(), equalTo(DateTimeZone.UTC));
} }
@ -60,13 +59,13 @@ public class DateProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String targetField = randomAsciiOfLengthBetween(1, 10); String targetField = randomAsciiOfLengthBetween(1, 10);
config.put("target_field", targetField); config.put("target_field", targetField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
try { try {
factory.create(config); factory.create(config);
fail("processor creation should have failed"); fail("processor creation should have failed");
} catch(ElasticsearchParseException e) { } catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[match_field] required property is missing")); assertThat(e.getMessage(), containsString("[field] required property is missing"));
} }
} }
@ -75,14 +74,14 @@ public class DateProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
String targetField = randomAsciiOfLengthBetween(1, 10); String targetField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("target_field", targetField); config.put("target_field", targetField);
try { try {
factory.create(config); factory.create(config);
fail("processor creation should have failed"); fail("processor creation should have failed");
} catch(ElasticsearchParseException e) { } catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[match_formats] required property is missing")); assertThat(e.getMessage(), containsString("[formats] required property is missing"));
} }
} }
@ -90,8 +89,8 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
Locale locale = randomLocale(random()); Locale locale = randomLocale(random());
config.put("locale", locale.toLanguageTag()); config.put("locale", locale.toLanguageTag());
@ -103,8 +102,8 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("locale", "invalid_locale"); config.put("locale", "invalid_locale");
try { try {
factory.create(config); factory.create(config);
@ -118,8 +117,8 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("formats", Collections.singletonList("dd/MM/yyyyy"));
DateTimeZone timezone = randomTimezone(); DateTimeZone timezone = randomTimezone();
config.put("timezone", timezone.getID()); config.put("timezone", timezone.getID());
@ -131,7 +130,7 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("match_formats", Collections.singletonList("dd/MM/yyyyy"));
config.put("timezone", "invalid_timezone"); config.put("timezone", "invalid_timezone");
try { try {
@ -154,25 +153,25 @@ public class DateProcessorFactoryTests extends ESTestCase {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
DateProcessor processor = factory.create(config); DateProcessor processor = factory.create(config);
assertThat(processor.getMatchFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"))); assertThat(processor.getFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")));
} }
public void testParseMatchFormatsFailure() throws Exception { public void testParseMatchFormatsFailure() throws Exception {
DateProcessor.Factory factory = new DateProcessor.Factory(); DateProcessor.Factory factory = new DateProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("match_formats", "dd/MM/yyyy"); config.put("formats", "dd/MM/yyyy");
try { try {
factory.create(config); factory.create(config);
fail("processor creation should have failed"); fail("processor creation should have failed");
} catch(ElasticsearchParseException e) { } catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[match_formats] property isn't a list, but of type [java.lang.String]")); assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]"));
} }
} }
@ -181,9 +180,9 @@ public class DateProcessorFactoryTests extends ESTestCase {
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
String sourceField = randomAsciiOfLengthBetween(1, 10); String sourceField = randomAsciiOfLengthBetween(1, 10);
String targetField = randomAsciiOfLengthBetween(1, 10); String targetField = randomAsciiOfLengthBetween(1, 10);
config.put("match_field", sourceField); config.put("field", sourceField);
config.put("target_field", targetField); config.put("target_field", targetField);
config.put("match_formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"));
DateProcessor processor = factory.create(config); DateProcessor processor = factory.create(config);
assertThat(processor.getTargetField(), equalTo(targetField)); assertThat(processor.getTargetField(), equalTo(targetField));

View File

@ -21,7 +21,6 @@ package org.elasticsearch.ingest.processor;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ingest.core.AbstractProcessorFactory; import org.elasticsearch.ingest.core.AbstractProcessorFactory;
import org.elasticsearch.ingest.core.Processor;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.util.HashMap; import java.util.HashMap;
@ -35,19 +34,19 @@ public class RenameProcessorFactoryTests extends ESTestCase {
RenameProcessor.Factory factory = new RenameProcessor.Factory(); RenameProcessor.Factory factory = new RenameProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("field", "old_field"); config.put("field", "old_field");
config.put("to", "new_field"); config.put("target_field", "new_field");
String processorTag = randomAsciiOfLength(10); String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag); config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
RenameProcessor renameProcessor = factory.create(config); RenameProcessor renameProcessor = factory.create(config);
assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getTag(), equalTo(processorTag));
assertThat(renameProcessor.getOldFieldName(), equalTo("old_field")); assertThat(renameProcessor.getField(), equalTo("old_field"));
assertThat(renameProcessor.getNewFieldName(), equalTo("new_field")); assertThat(renameProcessor.getTargetField(), equalTo("new_field"));
} }
public void testCreateNoFieldPresent() throws Exception { public void testCreateNoFieldPresent() throws Exception {
RenameProcessor.Factory factory = new RenameProcessor.Factory(); RenameProcessor.Factory factory = new RenameProcessor.Factory();
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("to", "new_field"); config.put("target_field", "new_field");
try { try {
factory.create(config); factory.create(config);
fail("factory create should have failed"); fail("factory create should have failed");
@ -64,7 +63,7 @@ public class RenameProcessorFactoryTests extends ESTestCase {
factory.create(config); factory.create(config);
fail("factory create should have failed"); fail("factory create should have failed");
} catch(ElasticsearchParseException e) { } catch(ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[to] required property is missing")); assertThat(e.getMessage(), equalTo("[target_field] required property is missing"));
} }
} }
} }

View File

@ -16,10 +16,10 @@ representation. The processor will skip the base64 decoding then.
[options="header"] [options="header"]
|====== |======
| Name | Required | Default | Description | Name | Required | Default | Description
| `source_field` | yes | - | The field to get the base64 encoded field from | `field` | yes | - | The field to get the base64 encoded field from
| `target_field` | no | attachment | The field that will hold the attachment information | `target_field` | no | attachment | The field that will hold the attachment information
| `indexed_chars` | no | 100000 | The number of chars being used for extraction to prevent huge fields. Use `-1` for no limit. | `indexed_chars` | no | 100000 | The number of chars being used for extraction to prevent huge fields. Use `-1` for no limit.
| `fields` | no | all | Properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language` | `properties` | no | all | Properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language`
|====== |======
[source,js] [source,js]
@ -29,7 +29,7 @@ representation. The processor will skip the base64 decoding then.
"processors" : [ "processors" : [
{ {
"attachment" : { "attachment" : {
"source_field" : "data" "field" : "data"
} }
} }
] ]

View File

@ -16,19 +16,19 @@ is located at `$ES_HOME/config/ingest/geoip` and holds the shipped databases too
[options="header"] [options="header"]
|====== |======
| Name | Required | Default | Description | Name | Required | Default | Description
| `source_field` | yes | - | The field to get the ip address or hostname from for the geographical lookup. | `field` | yes | - | The field to get the ip address from for the geographical lookup.
| `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database. | `target_field` | no | geoip | The field that will hold the geographical information looked up from the Maxmind database.
| `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files. | `database_file` | no | GeoLite2-City.mmdb | The database filename in the geoip config directory. The ingest-geoip plugin ships with the GeoLite2-City.mmdb and GeoLite2-Country.mmdb files.
| `fields` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup. | `properties` | no | [`continent_name`, `country_iso_code`, `region_name`, `city_name`, `location`] * | Controls what properties are added to the `target_field` based on the geoip lookup.
|====== |======
*Depends on what is available in `database_field`: *Depends on what is available in `database_field`:
* If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`, * If the GeoLite2 City database is used, then the following fields may be added under the `target_field`: `ip`,
`country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude` `country_iso_code`, `country_name`, `continent_name`, `region_name`, `city_name`, `timezone`, `latitude`, `longitude`
and `location`. The fields actually added depend on what has been found and which fields were configured in `fields`. and `location`. The fields actually added depend on what has been found and which properties were configured in `properties`.
* If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`, * If the GeoLite2 Country database is used, then the following fields may be added under the `target_field`: `ip`,
`country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which fields were configured in `fields`. `country_iso_code`, `country_name` and `continent_name`. The fields actually added depend on what has been found and which properties were configured in `properties`.
Here is an example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field: Here is an example that uses the default city database and adds the geographical information to the `geoip` field based on the `ip` field:
@ -39,7 +39,7 @@ Here is an example that uses the default city database and adds the geographical
"processors" : [ "processors" : [
{ {
"geoip" : { "geoip" : {
"source_field" : "ip" "field" : "ip"
} }
} }
] ]
@ -55,7 +55,7 @@ Here is an example that uses the default country database and adds the geographi
"processors" : [ "processors" : [
{ {
"geoip" : { "geoip" : {
"source_field" : "ip", "field" : "ip",
"target_field" : "geo", "target_field" : "geo",
"database_file" : "GeoLite2-Country.mmdb" "database_file" : "GeoLite2-Country.mmdb"
} }

View File

@ -527,7 +527,7 @@ Elasticsearch.
{ {
"rename" : { "rename" : {
"field" : "foo", "field" : "foo",
"to" : "bar", "target_field" : "bar",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {
@ -713,7 +713,7 @@ in the same order they were defined as part of the processor definition.
[options="header"] [options="header"]
|====== |======
| Name | Required | Default | Description | Name | Required | Default | Description
| `match_field` | yes | - | The field to get the date from. | `field` | yes | - | The field to get the date from.
| `target_field` | no | @timestamp | The field that will hold the parsed date. | `target_field` | no | @timestamp | The field that will hold the parsed date.
| `match_formats` | yes | - | An array of the expected date formats. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N. | `match_formats` | yes | - | An array of the expected date formats. Can be a Joda pattern or one of the following formats: ISO8601, UNIX, UNIX_MS, or TAI64N.
| `timezone` | no | UTC | The timezone to use when parsing the date. | `timezone` | no | UTC | The timezone to use when parsing the date.
@ -729,7 +729,7 @@ Here is an example that adds the parsed date to the `timestamp` field based on t
"processors" : [ "processors" : [
{ {
"date" : { "date" : {
"match_field" : "initial_date", "field" : "initial_date",
"target_field" : "timestamp", "target_field" : "timestamp",
"match_formats" : ["dd/MM/yyyy hh:mm:ss"], "match_formats" : ["dd/MM/yyyy hh:mm:ss"],
"timezone" : "Europe/Amsterdam" "timezone" : "Europe/Amsterdam"
@ -1152,9 +1152,9 @@ Renames an existing field. If the field doesn't exist or the new name is already
.Rename Options .Rename Options
[options="header"] [options="header"]
|====== |======
| Name | Required | Default | Description | Name | Required | Default | Description
| `field` | yes | - | The field to be renamed | `field` | yes | - | The field to be renamed
| `to` | yes | - | The new name of the field | `target_field` | yes | - | The new name of the field
|====== |======
[source,js] [source,js]
@ -1162,7 +1162,7 @@ Renames an existing field. If the field doesn't exist or the new name is already
{ {
"rename": { "rename": {
"field": "foo", "field": "foo",
"to": "foobar" "target_field": "foobar"
} }
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -48,17 +48,17 @@ public final class AttachmentProcessor extends AbstractProcessor {
private static final int NUMBER_OF_CHARS_INDEXED = 100000; private static final int NUMBER_OF_CHARS_INDEXED = 100000;
private final String sourceField; private final String field;
private final String targetField; private final String targetField;
private final Set<Field> fields; private final Set<Property> properties;
private final int indexedChars; private final int indexedChars;
AttachmentProcessor(String tag, String sourceField, String targetField, Set<Field> fields, AttachmentProcessor(String tag, String field, String targetField, Set<Property> properties,
int indexedChars) throws IOException { int indexedChars) throws IOException {
super(tag); super(tag);
this.sourceField = sourceField; this.field = field;
this.targetField = targetField; this.targetField = targetField;
this.fields = fields; this.properties = properties;
this.indexedChars = indexedChars; this.indexedChars = indexedChars;
} }
@ -68,62 +68,62 @@ public final class AttachmentProcessor extends AbstractProcessor {
try { try {
Metadata metadata = new Metadata(); Metadata metadata = new Metadata();
byte[] input = ingestDocument.getFieldValueAsBytes(sourceField); byte[] input = ingestDocument.getFieldValueAsBytes(field);
String parsedContent = TikaImpl.parse(input, metadata, indexedChars); String parsedContent = TikaImpl.parse(input, metadata, indexedChars);
if (fields.contains(Field.CONTENT) && Strings.hasLength(parsedContent)) { if (properties.contains(Property.CONTENT) && Strings.hasLength(parsedContent)) {
// somehow tika seems to append a newline at the end automatically, lets remove that again // somehow tika seems to append a newline at the end automatically, lets remove that again
additionalFields.put(Field.CONTENT.toLowerCase(), parsedContent.trim()); additionalFields.put(Property.CONTENT.toLowerCase(), parsedContent.trim());
} }
if (fields.contains(Field.LANGUAGE) && Strings.hasLength(parsedContent)) { if (properties.contains(Property.LANGUAGE) && Strings.hasLength(parsedContent)) {
LanguageIdentifier identifier = new LanguageIdentifier(parsedContent); LanguageIdentifier identifier = new LanguageIdentifier(parsedContent);
String language = identifier.getLanguage(); String language = identifier.getLanguage();
additionalFields.put(Field.LANGUAGE.toLowerCase(), language); additionalFields.put(Property.LANGUAGE.toLowerCase(), language);
} }
if (fields.contains(Field.DATE)) { if (properties.contains(Property.DATE)) {
String createdDate = metadata.get(TikaCoreProperties.CREATED); String createdDate = metadata.get(TikaCoreProperties.CREATED);
if (createdDate != null) { if (createdDate != null) {
additionalFields.put(Field.DATE.toLowerCase(), createdDate); additionalFields.put(Property.DATE.toLowerCase(), createdDate);
} }
} }
if (fields.contains(Field.TITLE)) { if (properties.contains(Property.TITLE)) {
String title = metadata.get(TikaCoreProperties.TITLE); String title = metadata.get(TikaCoreProperties.TITLE);
if (Strings.hasLength(title)) { if (Strings.hasLength(title)) {
additionalFields.put(Field.TITLE.toLowerCase(), title); additionalFields.put(Property.TITLE.toLowerCase(), title);
} }
} }
if (fields.contains(Field.AUTHOR)) { if (properties.contains(Property.AUTHOR)) {
String author = metadata.get("Author"); String author = metadata.get("Author");
if (Strings.hasLength(author)) { if (Strings.hasLength(author)) {
additionalFields.put(Field.AUTHOR.toLowerCase(), author); additionalFields.put(Property.AUTHOR.toLowerCase(), author);
} }
} }
if (fields.contains(Field.KEYWORDS)) { if (properties.contains(Property.KEYWORDS)) {
String keywords = metadata.get("Keywords"); String keywords = metadata.get("Keywords");
if (Strings.hasLength(keywords)) { if (Strings.hasLength(keywords)) {
additionalFields.put(Field.KEYWORDS.toLowerCase(), keywords); additionalFields.put(Property.KEYWORDS.toLowerCase(), keywords);
} }
} }
if (fields.contains(Field.CONTENT_TYPE)) { if (properties.contains(Property.CONTENT_TYPE)) {
String contentType = metadata.get(Metadata.CONTENT_TYPE); String contentType = metadata.get(Metadata.CONTENT_TYPE);
if (Strings.hasLength(contentType)) { if (Strings.hasLength(contentType)) {
additionalFields.put(Field.CONTENT_TYPE.toLowerCase(), contentType); additionalFields.put(Property.CONTENT_TYPE.toLowerCase(), contentType);
} }
} }
if (fields.contains(Field.CONTENT_LENGTH)) { if (properties.contains(Property.CONTENT_LENGTH)) {
String contentLength = metadata.get(Metadata.CONTENT_LENGTH); String contentLength = metadata.get(Metadata.CONTENT_LENGTH);
String length = Strings.hasLength(contentLength) ? contentLength : String.valueOf(parsedContent.length()); String length = Strings.hasLength(contentLength) ? contentLength : String.valueOf(parsedContent.length());
additionalFields.put(Field.CONTENT_LENGTH.toLowerCase(), length); additionalFields.put(Property.CONTENT_LENGTH.toLowerCase(), length);
} }
} catch (Throwable e) { } catch (Throwable e) {
throw new ElasticsearchParseException("Error parsing document in field [{}]", e, sourceField); throw new ElasticsearchParseException("Error parsing document in field [{}]", e, field);
} }
ingestDocument.setFieldValue(targetField, additionalFields); ingestDocument.setFieldValue(targetField, additionalFields);
@ -134,16 +134,16 @@ public final class AttachmentProcessor extends AbstractProcessor {
return TYPE; return TYPE;
} }
String getSourceField() { String getField() {
return sourceField; return field;
} }
String getTargetField() { String getTargetField() {
return targetField; return targetField;
} }
Set<Field> getFields() { Set<Property> getProperties() {
return fields; return properties;
} }
int getIndexedChars() { int getIndexedChars() {
@ -152,35 +152,35 @@ public final class AttachmentProcessor extends AbstractProcessor {
public static final class Factory extends AbstractProcessorFactory<AttachmentProcessor> { public static final class Factory extends AbstractProcessorFactory<AttachmentProcessor> {
static final Set<Field> DEFAULT_FIELDS = EnumSet.allOf(Field.class); static final Set<Property> DEFAULT_PROPERTIES = EnumSet.allOf(Property.class);
@Override @Override
public AttachmentProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception { public AttachmentProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String sourceField = readStringProperty(TYPE, processorTag, config, "source_field"); String field = readStringProperty(TYPE, processorTag, config, "field");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment");
List<String> fieldNames = readOptionalList(TYPE, processorTag, config, "fields"); List<String> properyNames = readOptionalList(TYPE, processorTag, config, "properties");
int indexedChars = readIntProperty(TYPE, processorTag, config, "indexed_chars", NUMBER_OF_CHARS_INDEXED); int indexedChars = readIntProperty(TYPE, processorTag, config, "indexed_chars", NUMBER_OF_CHARS_INDEXED);
final Set<Field> fields; final Set<Property> properties;
if (fieldNames != null) { if (properyNames != null) {
fields = EnumSet.noneOf(Field.class); properties = EnumSet.noneOf(Property.class);
for (String fieldName : fieldNames) { for (String fieldName : properyNames) {
try { try {
fields.add(Field.parse(fieldName)); properties.add(Property.parse(fieldName));
} catch (Exception e) { } catch (Exception e) {
throw newConfigurationException(TYPE, processorTag, "fields", "illegal field option [" + throw newConfigurationException(TYPE, processorTag, "properties", "illegal field option [" +
fieldName + "]. valid values are " + Arrays.toString(Field.values())); fieldName + "]. valid values are " + Arrays.toString(Property.values()));
} }
} }
} else { } else {
fields = DEFAULT_FIELDS; properties = DEFAULT_PROPERTIES;
} }
return new AttachmentProcessor(processorTag, sourceField, targetField, fields, indexedChars); return new AttachmentProcessor(processorTag, field, targetField, properties, indexedChars);
} }
} }
public enum Field { enum Property {
CONTENT, CONTENT,
TITLE, TITLE,
@ -191,7 +191,7 @@ public final class AttachmentProcessor extends AbstractProcessor {
CONTENT_LENGTH, CONTENT_LENGTH,
LANGUAGE; LANGUAGE;
public static Field parse(String value) { public static Property parse(String value) {
return valueOf(value.toUpperCase(Locale.ROOT)); return valueOf(value.toUpperCase(Locale.ROOT));
} }

View File

@ -43,22 +43,22 @@ public class AttachmentProcessorFactoryTests extends ESTestCase {
public void testBuildDefaults() throws Exception { public void testBuildDefaults() throws Exception {
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
String processorTag = randomAsciiOfLength(10); String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag); config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
AttachmentProcessor processor = factory.create(config); AttachmentProcessor processor = factory.create(config);
assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("attachment")); assertThat(processor.getTargetField(), equalTo("attachment"));
assertThat(processor.getFields(), sameInstance(AttachmentProcessor.Factory.DEFAULT_FIELDS)); assertThat(processor.getProperties(), sameInstance(AttachmentProcessor.Factory.DEFAULT_PROPERTIES));
} }
public void testConfigureIndexedChars() throws Exception { public void testConfigureIndexedChars() throws Exception {
int indexedChars = randomIntBetween(1, 100000); int indexedChars = randomIntBetween(1, 100000);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("indexed_chars", indexedChars); config.put("indexed_chars", indexedChars);
String processorTag = randomAsciiOfLength(10); String processorTag = randomAsciiOfLength(10);
@ -70,53 +70,53 @@ public class AttachmentProcessorFactoryTests extends ESTestCase {
public void testBuildTargetField() throws Exception { public void testBuildTargetField() throws Exception {
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("target_field", "_field"); config.put("target_field", "_field");
AttachmentProcessor processor = factory.create(config); AttachmentProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field"));
} }
public void testBuildFields() throws Exception { public void testBuildFields() throws Exception {
Set<AttachmentProcessor.Field> fields = EnumSet.noneOf(AttachmentProcessor.Field.class); Set<AttachmentProcessor.Property> properties = EnumSet.noneOf(AttachmentProcessor.Property.class);
List<String> fieldNames = new ArrayList<>(); List<String> fieldNames = new ArrayList<>();
int numFields = scaledRandomIntBetween(1, AttachmentProcessor.Field.values().length); int numFields = scaledRandomIntBetween(1, AttachmentProcessor.Property.values().length);
for (int i = 0; i < numFields; i++) { for (int i = 0; i < numFields; i++) {
AttachmentProcessor.Field field = AttachmentProcessor.Field.values()[i]; AttachmentProcessor.Property property = AttachmentProcessor.Property.values()[i];
fields.add(field); properties.add(property);
fieldNames.add(field.name().toLowerCase(Locale.ROOT)); fieldNames.add(property.name().toLowerCase(Locale.ROOT));
} }
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("fields", fieldNames); config.put("properties", fieldNames);
AttachmentProcessor processor = factory.create(config); AttachmentProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getFields(), equalTo(fields)); assertThat(processor.getProperties(), equalTo(properties));
} }
public void testBuildIllegalFieldOption() throws Exception { public void testBuildIllegalFieldOption() throws Exception {
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("fields", Collections.singletonList("invalid")); config.put("properties", Collections.singletonList("invalid"));
try { try {
factory.create(config); factory.create(config);
fail("exception expected"); fail("exception expected");
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), containsString("[fields] illegal field option [invalid]")); assertThat(e.getMessage(), containsString("[properties] illegal field option [invalid]"));
// ensure allowed fields are mentioned // ensure allowed fields are mentioned
for (AttachmentProcessor.Field field : AttachmentProcessor.Field.values()) { for (AttachmentProcessor.Property property : AttachmentProcessor.Property.values()) {
assertThat(e.getMessage(), containsString(field.name())); assertThat(e.getMessage(), containsString(property.name()));
} }
} }
config = new HashMap<>(); config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("fields", "invalid"); config.put("properties", "invalid");
try { try {
factory.create(config); factory.create(config);
fail("exception expected"); fail("exception expected");
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[fields] property isn't a list, but of type [java.lang.String]")); assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
} }
} }
} }

View File

@ -51,7 +51,7 @@ public class AttachmentProcessorTests extends ESTestCase {
@Before @Before
public void createStandardProcessor() throws IOException { public void createStandardProcessor() throws IOException {
processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field",
"target_field", EnumSet.allOf(AttachmentProcessor.Field.class), 10000); "target_field", EnumSet.allOf(AttachmentProcessor.Property.class), 10000);
} }
public void testEnglishTextDocument() throws Exception { public void testEnglishTextDocument() throws Exception {
@ -66,25 +66,25 @@ public class AttachmentProcessorTests extends ESTestCase {
public void testHtmlDocumentWithRandomFields() throws Exception { public void testHtmlDocumentWithRandomFields() throws Exception {
//date is not present in the html doc //date is not present in the html doc
ArrayList<AttachmentProcessor.Field> fieldsList = new ArrayList<>(EnumSet.complementOf(EnumSet.of ArrayList<AttachmentProcessor.Property> fieldsList = new ArrayList<>(EnumSet.complementOf(EnumSet.of
(AttachmentProcessor.Field.DATE))); (AttachmentProcessor.Property.DATE)));
Set<AttachmentProcessor.Field> selectedFields = new HashSet<>(); Set<AttachmentProcessor.Property> selectedProperties = new HashSet<>();
int numFields = randomIntBetween(1, fieldsList.size()); int numFields = randomIntBetween(1, fieldsList.size());
String[] selectedFieldNames = new String[numFields]; String[] selectedFieldNames = new String[numFields];
for (int i = 0; i < numFields; i++) { for (int i = 0; i < numFields; i++) {
AttachmentProcessor.Field field; AttachmentProcessor.Property property;
do { do {
field = randomFrom(fieldsList); property = randomFrom(fieldsList);
} while (selectedFields.add(field) == false); } while (selectedProperties.add(property) == false);
selectedFieldNames[i] = field.toLowerCase(); selectedFieldNames[i] = property.toLowerCase();
} }
if (randomBoolean()) { if (randomBoolean()) {
selectedFields.add(AttachmentProcessor.Field.DATE); selectedProperties.add(AttachmentProcessor.Property.DATE);
} }
processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field", processor = new AttachmentProcessor(randomAsciiOfLength(10), "source_field",
"target_field", selectedFields, 10000); "target_field", selectedProperties, 10000);
Map<String, Object> attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor); Map<String, Object> attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor);
assertThat(attachmentData.keySet(), hasSize(selectedFieldNames.length)); assertThat(attachmentData.keySet(), hasSize(selectedFieldNames.length));

View File

@ -9,7 +9,7 @@
"processors": [ "processors": [
{ {
"attachment" : { "attachment" : {
"source_field" : "field1" "field" : "field1"
} }
} }
] ]
@ -51,8 +51,8 @@
"processors": [ "processors": [
{ {
"attachment" : { "attachment" : {
"source_field" : "field1", "field" : "field1",
"fields" : ["language"] "properties" : ["language"]
} }
} }
] ]
@ -87,7 +87,7 @@
"processors": [ "processors": [
{ {
"attachment" : { "attachment" : {
"source_field" : "field1", "field" : "field1",
"indexed_chars": 30 "indexed_chars": 30
} }
} }

View File

@ -9,7 +9,7 @@
"processors": [ "processors": [
{ {
"attachment" : { "attachment" : {
"source_field" : "field1" "field" : "field1"
} }
} }
] ]
@ -49,7 +49,7 @@
"processors": [ "processors": [
{ {
"attachment" : { "attachment" : {
"source_field" : "field1" "field" : "field1"
} }
} }
] ]

View File

@ -59,22 +59,22 @@ public final class GeoIpProcessor extends AbstractProcessor {
public static final String TYPE = "geoip"; public static final String TYPE = "geoip";
private final String sourceField; private final String field;
private final String targetField; private final String targetField;
private final DatabaseReader dbReader; private final DatabaseReader dbReader;
private final Set<Field> fields; private final Set<Property> properties;
GeoIpProcessor(String tag, String sourceField, DatabaseReader dbReader, String targetField, Set<Field> fields) throws IOException { GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set<Property> properties) throws IOException {
super(tag); super(tag);
this.sourceField = sourceField; this.field = field;
this.targetField = targetField; this.targetField = targetField;
this.dbReader = dbReader; this.dbReader = dbReader;
this.fields = fields; this.properties = properties;
} }
@Override @Override
public void execute(IngestDocument ingestDocument) { public void execute(IngestDocument ingestDocument) {
String ip = ingestDocument.getFieldValue(sourceField, String.class); String ip = ingestDocument.getFieldValue(field, String.class);
final InetAddress ipAddress = InetAddresses.forString(ip); final InetAddress ipAddress = InetAddresses.forString(ip);
Map<String, Object> geoData; Map<String, Object> geoData;
@ -104,8 +104,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
return TYPE; return TYPE;
} }
String getSourceField() { String getField() {
return sourceField; return field;
} }
String getTargetField() { String getTargetField() {
@ -116,8 +116,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
return dbReader; return dbReader;
} }
Set<Field> getFields() { Set<Property> getProperties() {
return fields; return properties;
} }
private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) { private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) {
@ -142,8 +142,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
Subdivision subdivision = response.getMostSpecificSubdivision(); Subdivision subdivision = response.getMostSpecificSubdivision();
Map<String, Object> geoData = new HashMap<>(); Map<String, Object> geoData = new HashMap<>();
for (Field field : fields) { for (Property property : this.properties) {
switch (field) { switch (property) {
case IP: case IP:
geoData.put("ip", NetworkAddress.format(ipAddress)); geoData.put("ip", NetworkAddress.format(ipAddress));
break; break;
@ -195,8 +195,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
Continent continent = response.getContinent(); Continent continent = response.getContinent();
Map<String, Object> geoData = new HashMap<>(); Map<String, Object> geoData = new HashMap<>();
for (Field field : fields) { for (Property property : this.properties) {
switch (field) { switch (property) {
case IP: case IP:
geoData.put("ip", NetworkAddress.format(ipAddress)); geoData.put("ip", NetworkAddress.format(ipAddress));
break; break;
@ -216,8 +216,8 @@ public final class GeoIpProcessor extends AbstractProcessor {
public static final class Factory extends AbstractProcessorFactory<GeoIpProcessor> implements Closeable { public static final class Factory extends AbstractProcessorFactory<GeoIpProcessor> implements Closeable {
static final Set<Field> DEFAULT_FIELDS = EnumSet.of( static final Set<Property> DEFAULT_PROPERTIES = EnumSet.of(
Field.CONTINENT_NAME, Field.COUNTRY_ISO_CODE, Field.REGION_NAME, Field.CITY_NAME, Field.LOCATION Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION
); );
private final Map<String, DatabaseReader> databaseReaders; private final Map<String, DatabaseReader> databaseReaders;
@ -228,30 +228,30 @@ public final class GeoIpProcessor extends AbstractProcessor {
@Override @Override
public GeoIpProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception { public GeoIpProcessor doCreate(String processorTag, Map<String, Object> config) throws Exception {
String ipField = readStringProperty(TYPE, processorTag, config, "source_field"); String ipField = readStringProperty(TYPE, processorTag, config, "field");
String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip");
String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb"); String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb");
List<String> fieldNames = readOptionalList(TYPE, processorTag, config, "fields"); List<String> propertyNames = readOptionalList(TYPE, processorTag, config, "properties");
final Set<Field> fields; final Set<Property> properties;
if (fieldNames != null) { if (propertyNames != null) {
fields = EnumSet.noneOf(Field.class); properties = EnumSet.noneOf(Property.class);
for (String fieldName : fieldNames) { for (String fieldName : propertyNames) {
try { try {
fields.add(Field.parse(fieldName)); properties.add(Property.parse(fieldName));
} catch (Exception e) { } catch (Exception e) {
throw newConfigurationException(TYPE, processorTag, "fields", "illegal field option [" + fieldName + "]. valid values are [" + Arrays.toString(Field.values()) + "]"); throw newConfigurationException(TYPE, processorTag, "properties", "illegal field option [" + fieldName + "]. valid values are [" + Arrays.toString(Property.values()) + "]");
} }
} }
} else { } else {
fields = DEFAULT_FIELDS; properties = DEFAULT_PROPERTIES;
} }
DatabaseReader databaseReader = databaseReaders.get(databaseFile); DatabaseReader databaseReader = databaseReaders.get(databaseFile);
if (databaseReader == null) { if (databaseReader == null) {
throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist"); throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist");
} }
return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, fields); return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties);
} }
@Override @Override
@ -270,7 +270,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
} }
} }
public enum Field { enum Property {
IP, IP,
COUNTRY_ISO_CODE, COUNTRY_ISO_CODE,
@ -283,7 +283,7 @@ public final class GeoIpProcessor extends AbstractProcessor {
LONGITUDE, LONGITUDE,
LOCATION; LOCATION;
public static Field parse(String value) { public static Property parse(String value) {
return valueOf(value.toUpperCase(Locale.ROOT)); return valueOf(value.toUpperCase(Locale.ROOT));
} }
} }

View File

@ -69,36 +69,36 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
String processorTag = randomAsciiOfLength(10); String processorTag = randomAsciiOfLength(10);
config.put(AbstractProcessorFactory.TAG_KEY, processorTag); config.put(AbstractProcessorFactory.TAG_KEY, processorTag);
GeoIpProcessor processor = factory.create(config); GeoIpProcessor processor = factory.create(config);
assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getTag(), equalTo(processorTag));
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-City"));
assertThat(processor.getFields(), sameInstance(GeoIpProcessor.Factory.DEFAULT_FIELDS)); assertThat(processor.getProperties(), sameInstance(GeoIpProcessor.Factory.DEFAULT_PROPERTIES));
} }
public void testBuildTargetField() throws Exception { public void testBuildTargetField() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("target_field", "_field"); config.put("target_field", "_field");
GeoIpProcessor processor = factory.create(config); GeoIpProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field"));
} }
public void testBuildDbFile() throws Exception { public void testBuildDbFile() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("database_file", "GeoLite2-Country.mmdb"); config.put("database_file", "GeoLite2-Country.mmdb");
GeoIpProcessor processor = factory.create(config); GeoIpProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getTargetField(), equalTo("geoip"));
assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country"));
} }
@ -107,7 +107,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("database_file", "does-not-exist.mmdb"); config.put("database_file", "does-not-exist.mmdb");
try { try {
factory.create(config); factory.create(config);
@ -120,43 +120,43 @@ public class GeoIpProcessorFactoryTests extends ESTestCase {
public void testBuildFields() throws Exception { public void testBuildFields() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Set<GeoIpProcessor.Field> fields = EnumSet.noneOf(GeoIpProcessor.Field.class); Set<GeoIpProcessor.Property> properties = EnumSet.noneOf(GeoIpProcessor.Property.class);
List<String> fieldNames = new ArrayList<>(); List<String> fieldNames = new ArrayList<>();
int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Field.values().length); int numFields = scaledRandomIntBetween(1, GeoIpProcessor.Property.values().length);
for (int i = 0; i < numFields; i++) { for (int i = 0; i < numFields; i++) {
GeoIpProcessor.Field field = GeoIpProcessor.Field.values()[i]; GeoIpProcessor.Property property = GeoIpProcessor.Property.values()[i];
fields.add(field); properties.add(property);
fieldNames.add(field.name().toLowerCase(Locale.ROOT)); fieldNames.add(property.name().toLowerCase(Locale.ROOT));
} }
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("fields", fieldNames); config.put("properties", fieldNames);
GeoIpProcessor processor = factory.create(config); GeoIpProcessor processor = factory.create(config);
assertThat(processor.getSourceField(), equalTo("_field")); assertThat(processor.getField(), equalTo("_field"));
assertThat(processor.getFields(), equalTo(fields)); assertThat(processor.getProperties(), equalTo(properties));
} }
public void testBuildIllegalFieldOption() throws Exception { public void testBuildIllegalFieldOption() throws Exception {
GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders);
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("fields", Collections.singletonList("invalid")); config.put("properties", Collections.singletonList("invalid"));
try { try {
factory.create(config); factory.create(config);
fail("exception expected"); fail("exception expected");
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[fields] illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]")); assertThat(e.getMessage(), equalTo("[properties] illegal field option [invalid]. valid values are [[IP, COUNTRY_ISO_CODE, COUNTRY_NAME, CONTINENT_NAME, REGION_NAME, CITY_NAME, TIMEZONE, LATITUDE, LONGITUDE, LOCATION]]"));
} }
config = new HashMap<>(); config = new HashMap<>();
config.put("source_field", "_field"); config.put("field", "_field");
config.put("fields", "invalid"); config.put("properties", "invalid");
try { try {
factory.create(config); factory.create(config);
fail("exception expected"); fail("exception expected");
} catch (ElasticsearchParseException e) { } catch (ElasticsearchParseException e) {
assertThat(e.getMessage(), equalTo("[fields] property isn't a list, but of type [java.lang.String]")); assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]"));
} }
} }
} }

View File

@ -36,7 +36,7 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testCity() throws Exception { public void testCity() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("source_field", "82.170.213.79"); document.put("source_field", "82.170.213.79");
@ -62,7 +62,7 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testCountry() throws Exception { public void testCountry() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-Country.mmdb"); InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-Country.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("source_field", "82.170.213.79"); document.put("source_field", "82.170.213.79");
@ -81,7 +81,7 @@ public class GeoIpProcessorTests extends ESTestCase {
public void testAddressIsNotInTheDatabase() throws Exception { public void testAddressIsNotInTheDatabase() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("source_field", "202.45.11.11"); document.put("source_field", "202.45.11.11");
@ -95,7 +95,7 @@ public class GeoIpProcessorTests extends ESTestCase {
/** Don't silently do DNS lookups or anything trappy on bogus data */ /** Don't silently do DNS lookups or anything trappy on bogus data */
public void testInvalid() throws Exception { public void testInvalid() throws Exception {
InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb"); InputStream database = GeoIpProcessor.class.getResourceAsStream("/GeoLite2-City.mmdb");
GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Field.class)); GeoIpProcessor processor = new GeoIpProcessor(randomAsciiOfLength(10), "source_field", new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class));
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("source_field", "www.google.com"); document.put("source_field", "www.google.com");

View File

@ -9,7 +9,7 @@
"processors": [ "processors": [
{ {
"geoip" : { "geoip" : {
"source_field" : "field1" "field" : "field1"
} }
} }
] ]
@ -53,8 +53,8 @@
"processors": [ "processors": [
{ {
"geoip" : { "geoip" : {
"source_field" : "field1", "field" : "field1",
"fields" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"] "properties" : ["city_name", "country_iso_code", "ip", "latitude", "longitude", "location", "timezone", "country_name", "region_name", "continent_name"]
} }
} }
] ]
@ -97,7 +97,7 @@
"processors": [ "processors": [
{ {
"geoip" : { "geoip" : {
"source_field" : "field1", "field" : "field1",
"database_file" : "GeoLite2-Country.mmdb" "database_file" : "GeoLite2-Country.mmdb"
} }
} }

View File

@ -292,7 +292,7 @@
"rename" : { "rename" : {
"tag" : "rename-status", "tag" : "rename-status",
"field" : "status", "field" : "status",
"to" : "bar", "target_field" : "bar",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {

View File

@ -26,14 +26,14 @@
}, },
{ {
"date" : { "date" : {
"match_field" : "timestamp", "field" : "timestamp",
"target_field" : "timestamp", "target_field" : "timestamp",
"match_formats" : ["dd/MMM/YYYY:HH:mm:ss Z"] "formats" : ["dd/MMM/YYYY:HH:mm:ss Z"]
} }
}, },
{ {
"geoip" : { "geoip" : {
"source_field" : "clientip" "field" : "clientip"
} }
} }
] ]
@ -128,7 +128,7 @@
{ {
"rename" : { "rename" : {
"field" : "eyeColor", "field" : "eyeColor",
"to" : "eye_color" "target_field" : "eye_color"
} }
} }
] ]

View File

@ -9,9 +9,9 @@
"processors": [ "processors": [
{ {
"date" : { "date" : {
"match_field" : "date_source_field", "field" : "date_source_field",
"target_field" : "date_target_field", "target_field" : "date_target_field",
"match_formats" : ["dd/MM/yyyy"], "formats" : ["dd/MM/yyyy"],
"timezone" : "Europe/Amsterdam" "timezone" : "Europe/Amsterdam"
} }
} }

View File

@ -22,7 +22,7 @@
{ {
"rename" : { "rename" : {
"field" : "field_to_rename", "field" : "field_to_rename",
"to": "renamed_field" "target_field": "renamed_field"
} }
}, },
{ {

View File

@ -143,7 +143,7 @@
{ {
"rename" : { "rename" : {
"field" : "does_not_exist", "field" : "does_not_exist",
"to" : "field2", "target_field" : "field2",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {
@ -425,7 +425,7 @@
"rename" : { "rename" : {
"tag" : "rename-1", "tag" : "rename-1",
"field" : "foofield", "field" : "foofield",
"to" : "field1", "target_field" : "field1",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {
@ -437,7 +437,7 @@
{ {
"rename" : { "rename" : {
"field" : "foofield2", "field" : "foofield2",
"to" : "field1", "target_field" : "field1",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {

View File

@ -15,9 +15,9 @@
}, },
{ {
"date" : { "date" : {
"match_field" : "date", "field" : "date",
"target_field" : "date", "target_field" : "date",
"match_formats" : ["yyyy"] "formats" : ["yyyy"]
} }
} }
], ],
@ -61,7 +61,7 @@
{ {
"rename" : { "rename" : {
"field" : "foofield", "field" : "foofield",
"to" : "field1", "target_field" : "field1",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {
@ -72,7 +72,7 @@
{ {
"rename" : { "rename" : {
"field" : "foofield2", "field" : "foofield2",
"to" : "field1", "target_field" : "field1",
"on_failure" : [ "on_failure" : [
{ {
"set" : { "set" : {