Merge pull request #15166 from talevy/remove_pattern_utils

move PatternUtils#loadBankFromStream into GrokProcessor.Factory
This commit is contained in:
Tal Levy 2015-12-03 08:08:00 -08:00
commit cf1c393d70
3 changed files with 22 additions and 49 deletions

View File

@ -23,7 +23,11 @@ import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.processor.ConfigurationUtils;
import org.elasticsearch.ingest.processor.Processor;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
@ -75,6 +79,22 @@ public final class GrokProcessor implements Processor {
this.grokConfigDirectory = configDirectory.resolve("ingest").resolve("grok");
}
static void loadBankFromStream(Map<String, String> patternBank, InputStream inputStream) throws IOException {
String line;
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
while ((line = br.readLine()) != null) {
String trimmedLine = line.replaceAll("^\\s+", "");
if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) {
continue;
}
String[] parts = trimmedLine.split("\\s+", 2);
if (parts.length == 2) {
patternBank.put(parts[0], parts[1]);
}
}
}
public GrokProcessor create(Map<String, Object> config) throws Exception {
String matchField = ConfigurationUtils.readStringProperty(config, "field");
String matchPattern = ConfigurationUtils.readStringProperty(config, "pattern");
@ -84,7 +104,7 @@ public final class GrokProcessor implements Processor {
for (Path patternFilePath : stream) {
if (Files.isRegularFile(patternFilePath)) {
try(InputStream is = Files.newInputStream(patternFilePath, StandardOpenOption.READ)) {
PatternUtils.loadBankFromStream(patternBank, is);
loadBankFromStream(patternBank, is);
}
}
}

View File

@ -1,47 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.ingest.processor.grok;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Map;
final class PatternUtils {
private PatternUtils() {}
public static void loadBankFromStream(Map<String, String> patternBank, InputStream inputStream) throws IOException {
String line;
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
while ((line = br.readLine()) != null) {
String trimmedLine = line.replaceAll("^\\s+", "");
if (trimmedLine.startsWith("#") || trimmedLine.length() == 0) {
continue;
}
String[] parts = trimmedLine.split("\\s+", 2);
if (parts.length == 2) {
patternBank.put(parts[0], parts[1]);
}
}
}
}

View File

@ -40,7 +40,7 @@ public class GrokTests extends ESTestCase {
Map<String, String> patternBank = new HashMap<>();
for (InputStream is : inputStreams) {
PatternUtils.loadBankFromStream(patternBank, is);
GrokProcessor.Factory.loadBankFromStream(patternBank, is);
}
return patternBank;