move grok patterns loading to IngestGrokPlugin class out of the GrokProcessor.Factory

This commit is contained in:
javanna 2016-01-14 16:24:07 +01:00 committed by Luca Cavanna
parent 1e68ad0887
commit 169b3c75c5
4 changed files with 53 additions and 67 deletions

View File

@ -19,16 +19,10 @@
package org.elasticsearch.ingest.grok;
import org.elasticsearch.ingest.core.IngestDocument;
import org.elasticsearch.ingest.core.ConfigurationUtils;
import org.elasticsearch.ingest.core.IngestDocument;
import org.elasticsearch.ingest.core.Processor;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -70,47 +64,17 @@ public final class GrokProcessor implements Processor {
public final static class Factory implements Processor.Factory<GrokProcessor> {
private final static String[] PATTERN_NAMES = new String[] {
"aws", "bacula", "bro", "exim", "firewalls", "grok-patterns", "haproxy",
"java", "junos", "linux-syslog", "mcollective-patterns", "mongodb", "nagios",
"postgresql", "rails", "redis", "ruby"
};
private final Map<String, String> builtinPatternBank;
private final Map<String, String> builtinPatterns;
public Factory() throws IOException {
// TODO(simonw): we should have a static helper method to load these patterns and make this
// factory only accept a String->String map instead. That way we can load
// the patterns in the IngestGrokPlugin ctor or even in a static context and this ctor doesn't need to throw any exception.
Map<String, String> builtinPatterns = new HashMap<>();
for (String pattern : PATTERN_NAMES) {
try(InputStream is = getClass().getResourceAsStream("/patterns/" + pattern)) {
loadBankFromStream(builtinPatterns, is);
}
}
this.builtinPatternBank = Collections.unmodifiableMap(builtinPatterns);
}
static void loadBankFromStream(Map<String, String> patternBank, InputStream inputStream) throws IOException {
String line;
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
while ((line = br.readLine()) != null) {
String trimmedLine = line.replaceAll("^\\s+", "");
if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) {
continue;
}
String[] parts = trimmedLine.split("\\s+", 2);
if (parts.length == 2) {
patternBank.put(parts[0], parts[1]);
}
}
public Factory(Map<String, String> builtinPatterns) {
this.builtinPatterns = builtinPatterns;
}
public GrokProcessor create(Map<String, Object> config) throws Exception {
String matchField = ConfigurationUtils.readStringProperty(config, "field");
String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern");
Map<String, String> customPatternBank = ConfigurationUtils.readOptionalMap(config, "pattern_definitions");
Map<String, String> patternBank = new HashMap<>(builtinPatternBank);
Map<String, String> patternBank = new HashMap<>(builtinPatterns);
if (customPatternBank != null) {
patternBank.putAll(customPatternBank);
}

View File

@ -22,10 +22,29 @@ package org.elasticsearch.ingest.grok;
import org.elasticsearch.ingest.IngestModule;
import org.elasticsearch.plugins.Plugin;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class IngestGrokPlugin extends Plugin {
private static final String[] PATTERN_NAMES = new String[] {
"aws", "bacula", "bro", "exim", "firewalls", "grok-patterns", "haproxy",
"java", "junos", "linux-syslog", "mcollective-patterns", "mongodb", "nagios",
"postgresql", "rails", "redis", "ruby"
};
private final Map<String, String> builtinPatterns;
public IngestGrokPlugin() throws IOException {
this.builtinPatterns = loadBuiltinPatterns();
}
@Override
public String name() {
return "ingest-grok";
@ -37,12 +56,32 @@ public class IngestGrokPlugin extends Plugin {
}
public void onModule(IngestModule ingestModule) {
ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> {
try {
return new GrokProcessor.Factory();
} catch (IOException e) {
throw new RuntimeException(e);
ingestModule.registerProcessor(GrokProcessor.TYPE, (environment, templateService) -> new GrokProcessor.Factory(builtinPatterns));
}
static Map<String, String> loadBuiltinPatterns() throws IOException {
Map<String, String> builtinPatterns = new HashMap<>();
for (String pattern : PATTERN_NAMES) {
try(InputStream is = IngestGrokPlugin.class.getResourceAsStream("/patterns/" + pattern)) {
loadPatterns(builtinPatterns, is);
}
});
}
return Collections.unmodifiableMap(builtinPatterns);
}
private static void loadPatterns(Map<String, String> patternBank, InputStream inputStream) throws IOException {
String line;
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
while ((line = br.readLine()) != null) {
String trimmedLine = line.replaceAll("^\\s+", "");
if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) {
continue;
}
String[] parts = trimmedLine.split("\\s+", 2);
if (parts.length == 2) {
patternBank.put(parts[0], parts[1]);
}
}
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.ingest.grok;
import org.elasticsearch.ingest.grok.GrokProcessor;
import org.elasticsearch.test.ESTestCase;
import java.util.Collections;
@ -32,7 +31,7 @@ import static org.hamcrest.Matchers.notNullValue;
public class GrokProcessorFactoryTests extends ESTestCase {
public void testBuild() throws Exception {
GrokProcessor.Factory factory = new GrokProcessor.Factory();
GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap());
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");
@ -43,7 +42,7 @@ public class GrokProcessorFactoryTests extends ESTestCase {
}
public void testCreateWithCustomPatterns() throws Exception {
GrokProcessor.Factory factory = new GrokProcessor.Factory();
GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap());
Map<String, Object> config = new HashMap<>();
config.put("field", "_field");

View File

@ -19,13 +19,10 @@
package org.elasticsearch.ingest.grok;
import org.elasticsearch.ingest.grok.Grok;
import org.elasticsearch.ingest.grok.GrokProcessor;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@ -39,22 +36,9 @@ import static org.hamcrest.Matchers.nullValue;
public class GrokTests extends ESTestCase {
private Map<String, String> basePatterns;
private Map<String, String> newBankFromStreams(InputStream... inputStreams) throws IOException {
Map<String, String> patternBank = new HashMap<>();
for (InputStream is : inputStreams) {
GrokProcessor.Factory.loadBankFromStream(patternBank, is);
}
return patternBank;
}
@Before
public void setup() throws IOException {
basePatterns = newBankFromStreams(
getClass().getResourceAsStream("/patterns/grok-patterns"),
getClass().getResourceAsStream("/patterns/linux-syslog")
);
basePatterns = IngestGrokPlugin.loadBuiltinPatterns();
}
public void testMatchWithoutCaptures() {