mirror of https://github.com/apache/lucene.git
SOLR-14130: Add postlogs command line tool for indexing Solr logs
This commit is contained in:
parent
d2409fe499
commit
29c00d4fe7
|
@ -0,0 +1,33 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
############################################################################################
|
||||
#
|
||||
# A command line tool for indexing Solr logs in the out-of-the-box log format.
|
||||
#
|
||||
# First build the Solr distribution. Then run postlogs from inside the Solr distribution
|
||||
# home directory as described below:
|
||||
#
|
||||
# parameters:
|
||||
#
|
||||
# -- baseUrl: Example http://localhost:8983/solr/collection1
|
||||
# -- rootDir: All files found at or below the root will be indexed
|
||||
#
|
||||
# Sample syntax: ./bin/postlogs http://localhost:8983/solr/collection1 /user/foo/logs");
|
||||
#
|
||||
#
|
||||
############################################################################################
|
||||
|
||||
java -classpath dist/*:dist/solrj-lib/*: org.apache.solr.util.SolrLogPostTool $1 $2
|
|
@ -0,0 +1,496 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.util;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.UUID;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.regex.Matcher;
|
||||
import org.apache.solr.client.solrj.impl.HttpSolrClient;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.request.UpdateRequest;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
|
||||
|
||||
/**
|
||||
* A command line tool for indexing Solr logs in the out-of-the-box log format.
|
||||
**/
|
||||
|
||||
public class SolrLogPostTool {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
if(args.length != 2) {
|
||||
CLIO.out("");
|
||||
CLIO.out("postlogs is a simple tool for indexing Solr logs.");
|
||||
CLIO.out("");
|
||||
CLIO.out("parameters:");
|
||||
CLIO.out("");
|
||||
CLIO.out("-- baseUrl: Example http://localhost:8983/solr/collection1");
|
||||
CLIO.out("-- rootDir: All files found at or below the root will be indexed.");
|
||||
CLIO.out("");
|
||||
CLIO.out("Sample syntax: ./bin/postlogs http://localhost:8983/solr/collection1 /user/foo/logs");
|
||||
CLIO.out("");
|
||||
return;
|
||||
}
|
||||
|
||||
String baseUrl = args[0];
|
||||
String root = args[1];
|
||||
|
||||
HttpSolrClient.Builder builder = new HttpSolrClient.Builder();
|
||||
SolrClient client = null;
|
||||
try {
|
||||
client = builder.withBaseSolrUrl(baseUrl).build();
|
||||
File rf = new File(root);
|
||||
List<File> files = new ArrayList();
|
||||
gatherFiles(rf, files);
|
||||
int rec = 0;
|
||||
UpdateRequest request = new UpdateRequest();
|
||||
|
||||
for (File file : files) {
|
||||
|
||||
LineNumberReader bufferedReader = null;
|
||||
|
||||
try {
|
||||
bufferedReader = new LineNumberReader(new FileReader(file));
|
||||
LogRecordReader recordReader = new LogRecordReader(bufferedReader);
|
||||
SolrInputDocument doc = null;
|
||||
String fileName = file.getName();
|
||||
while (true) {
|
||||
try {
|
||||
doc = recordReader.readRecord();
|
||||
} catch (Throwable t) {
|
||||
CLIO.err("Error reading log record:"+ bufferedReader.getLineNumber() +" from file:"+ fileName);
|
||||
CLIO.err(t.getMessage());
|
||||
continue;
|
||||
}
|
||||
|
||||
if(doc == null) {
|
||||
break;
|
||||
}
|
||||
|
||||
rec++;
|
||||
UUID id = UUID.randomUUID();
|
||||
doc.addField("id", id.toString());
|
||||
doc.addField("file_s", fileName);
|
||||
request.add(doc);
|
||||
if (rec == 300) {
|
||||
CLIO.out("Sending batch of 300 log records...");
|
||||
request.process(client);
|
||||
CLIO.out("Batch sent");
|
||||
request = new UpdateRequest();
|
||||
rec = 0;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
bufferedReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
if (rec > 0) {
|
||||
//Process last batch
|
||||
CLIO.out("Sending last batch ...");
|
||||
request.process(client);
|
||||
client.commit();
|
||||
CLIO.out("Committed");
|
||||
}
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
static void gatherFiles(File rootFile, List<File> files) {
|
||||
|
||||
if(rootFile.isFile()) {
|
||||
files.add(rootFile);
|
||||
} else {
|
||||
File[] subFiles = rootFile.listFiles();
|
||||
for(File f : subFiles) {
|
||||
if(f.isFile()) {
|
||||
files.add(f);
|
||||
} else {
|
||||
gatherFiles(f, files);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class LogRecordReader {
|
||||
|
||||
private BufferedReader bufferedReader;
|
||||
private String pushedBack = null;
|
||||
private boolean finished = false;
|
||||
private String cause;
|
||||
Pattern p = Pattern.compile("^\\d\\d\\d\\d\\-\\d\\d\\-\\d\\d");
|
||||
|
||||
public LogRecordReader(BufferedReader bufferedReader) throws IOException {
|
||||
this.bufferedReader = bufferedReader;
|
||||
}
|
||||
|
||||
|
||||
public SolrInputDocument readRecord() throws IOException {
|
||||
while(true) {
|
||||
String line = null;
|
||||
|
||||
if(finished) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if(pushedBack != null) {
|
||||
line = pushedBack;
|
||||
pushedBack = null;
|
||||
} else {
|
||||
line = bufferedReader.readLine();
|
||||
}
|
||||
|
||||
if (line != null) {
|
||||
if (line.contains("QTime=")) {
|
||||
return parseQueryRecord(line);
|
||||
} else if (line.contains("Registered new searcher")) {
|
||||
return parseNewSearch(line);
|
||||
} else if (line.contains("path=/update")) {
|
||||
return parseUpdate(line);
|
||||
} else if (line.contains(" ERROR ")) {
|
||||
this.cause = null;
|
||||
return parseError(line, readTrace());
|
||||
} else if (line.contains("start commit")) {
|
||||
return parseCommit(line);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String readTrace() throws IOException {
|
||||
StringBuilder buf = new StringBuilder();
|
||||
buf.append("%html ");
|
||||
|
||||
while(true) {
|
||||
String line = bufferedReader.readLine();
|
||||
if (line == null) {
|
||||
finished = true;
|
||||
return buf.toString();
|
||||
} else {
|
||||
//look for a date at the beginning of the line
|
||||
//If it's not there then read into the stack trace buffer
|
||||
Matcher m = p.matcher(line);
|
||||
|
||||
if (!m.find()) {
|
||||
//Line does not start with a timestamp so append to the stack trace
|
||||
buf.append(line.replace("\t", " ") + "<br/>");
|
||||
if(line.startsWith("Caused by:")) {
|
||||
this.cause = line;
|
||||
}
|
||||
} else {
|
||||
pushedBack = line;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
private SolrInputDocument parseError(String line, String trace) throws IOException {
|
||||
String[] parts = line.split("\\s+");
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.addField("date_dt", parts[0]+"T"+parts[1]);
|
||||
doc.addField("type_s", "error");
|
||||
doc.addField("line_t", line);
|
||||
|
||||
if(trace != null) {
|
||||
doc.addField("stack_t", trace);
|
||||
}
|
||||
|
||||
if(this.cause != null) {
|
||||
doc.addField("root_cause_t", cause.replace("Caused by:", "").trim());
|
||||
}
|
||||
|
||||
doc.addField("collection_s", parseCollection(line));
|
||||
doc.addField("core_s", parseCore(line));
|
||||
doc.addField("shard_s", parseShard(line));
|
||||
doc.addField("replica_s", parseReplica(line));
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private SolrInputDocument parseCommit(String line) throws IOException {
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
String[] parts = line.split("\\s+");
|
||||
doc.addField("date_dt", parts[0]+"T"+parts[1]);
|
||||
doc.addField("type_s", "commit");
|
||||
doc.addField("line_t", line);
|
||||
if(line.contains("softCommit=true")) {
|
||||
doc.addField("soft_commit_s", "true");
|
||||
} else {
|
||||
doc.addField("soft_commit_s", "false");
|
||||
}
|
||||
|
||||
if(line.contains("openSearcher=true")) {
|
||||
doc.addField("open_searcher_s", "true");
|
||||
} else {
|
||||
doc.addField("open_searcher_s", "false");
|
||||
}
|
||||
|
||||
doc.addField("collection_s", parseCollection(line));
|
||||
doc.addField("core_s", parseCore(line));
|
||||
doc.addField("shard_s", parseShard(line));
|
||||
doc.addField("replica_s", parseReplica(line));
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private SolrInputDocument parseQueryRecord(String line) {
|
||||
|
||||
String[] parts = line.split("\\s+");
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.addField("date_dt", parts[0]+"T"+parts[1]);
|
||||
doc.addField("qtime_i", parseQTime(line));
|
||||
doc.addField("status_s", parseStatus(line));
|
||||
|
||||
if(line.contains("hits=")) {
|
||||
doc.addField("hits_l", parseHits(line));
|
||||
}
|
||||
|
||||
String params = parseParams(line);
|
||||
doc.addField("params_t", params);
|
||||
addParams(doc, params);
|
||||
|
||||
String ll = parts[2];
|
||||
doc.addField("log_level_s", ll);
|
||||
|
||||
doc.addField("collection_s", parseCollection(line));
|
||||
doc.addField("core_s", parseCore(line));
|
||||
doc.addField("node_s", parseNode(line));
|
||||
doc.addField("shard_s", parseShard(line));
|
||||
doc.addField("replica_s", parseReplica(line));
|
||||
|
||||
String path = parsePath(line);
|
||||
doc.addField("path_s", path);
|
||||
if(path != null && path.contains("/admin")) {
|
||||
doc.addField("type_s", "admin");
|
||||
} else {
|
||||
doc.addField("type_s", "query");
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private SolrInputDocument parseNewSearch(String line) {
|
||||
|
||||
String[] parts = line.split("\\s+");
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.addField("date_dt", parts[0]+"T"+parts[1]);
|
||||
doc.addField("core_s", parseNewSearcherCore(line));
|
||||
doc.addField("type_s", "newSearcher");
|
||||
doc.addField("line_t", line);
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private String parseCollection(String line) {
|
||||
char[] ca = {' ', ']'};
|
||||
String parts[] = line.split("c:");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private SolrInputDocument parseUpdate(String line) {
|
||||
String[] parts = line.split("\\s+");
|
||||
SolrInputDocument doc = new SolrInputDocument();
|
||||
doc.addField("date_dt", parts[0]+"T"+parts[1]);
|
||||
|
||||
if(line.contains("deleteByQuery=")) {
|
||||
doc.addField("type_s", "deleteByQuery");
|
||||
} else if(line.contains("delete=")) {
|
||||
doc.addField("type_s", "delete");
|
||||
} else {
|
||||
doc.addField("type_s", "update");
|
||||
}
|
||||
|
||||
doc.addField("collection_s", parseCollection(line));
|
||||
doc.addField("core_s", parseCore(line));
|
||||
doc.addField("shard_s", parseShard(line));
|
||||
doc.addField("replica_s", parseReplica(line));
|
||||
doc.addField("line_t", line);
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private String parseNewSearcherCore(String line) {
|
||||
char[] ca = {']'};
|
||||
String parts[] = line.split("\\[");
|
||||
if(parts.length > 3) {
|
||||
return readUntil(parts[2], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseCore(String line) {
|
||||
char[] ca = {' ', ']'};
|
||||
String parts[] = line.split("x:");
|
||||
if(parts.length >= 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseShard(String line) {
|
||||
char[] ca = {' ', ']'};
|
||||
String parts[] = line.split("s:");
|
||||
if(parts.length >= 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseReplica(String line) {
|
||||
char[] ca = {' ', ']'};
|
||||
String parts[] = line.split("r:");
|
||||
if(parts.length >= 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private String parsePath(String line) {
|
||||
char[] ca = {' '};
|
||||
String parts[] = line.split(" path=");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseQTime(String line) {
|
||||
char[] ca = {'\n', '\r'};
|
||||
String parts[] = line.split(" QTime=");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseNode(String line) {
|
||||
char[] ca = {' ', ']'};
|
||||
String parts[] = line.split("n:");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseStatus(String line) {
|
||||
char[] ca = {' ', '\n', '\r'};
|
||||
String parts[] = line.split(" status=");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseHits(String line) {
|
||||
char[] ca = {' '};
|
||||
String parts[] = line.split(" hits=");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1], ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String parseParams(String line) {
|
||||
char[] ca = {'}'};
|
||||
String parts[] = line.split(" params=");
|
||||
if(parts.length == 2) {
|
||||
return readUntil(parts[1].substring(1), ca);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String readUntil(String s, char[] chars) {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for(int i=0; i<s.length(); i++) {
|
||||
char a = s.charAt(i);
|
||||
for(char c : chars) {
|
||||
if(a == c) {
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
builder.append(a);
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private void addParams(SolrInputDocument doc, String params) {
|
||||
String[] pairs = params.split("&");
|
||||
for(String pair : pairs) {
|
||||
String[] parts = pair.split("=");
|
||||
if(parts.length == 2 && parts[0].equals("q")) {
|
||||
String dq = URLDecoder.decode(parts[1]);
|
||||
doc.addField("q_s", dq);
|
||||
doc.addField("q_t", dq);
|
||||
}
|
||||
|
||||
if(parts[0].equals("rows")) {
|
||||
String dr = URLDecoder.decode(parts[1]);
|
||||
doc.addField("rows_i", dr);
|
||||
}
|
||||
|
||||
if(parts[0].equals("distrib")) {
|
||||
String dr = URLDecoder.decode(parts[1]);
|
||||
doc.addField("distrib_s", dr);
|
||||
}
|
||||
|
||||
if(parts[0].equals("isShard")) {
|
||||
String dr = URLDecoder.decode(parts[1]);
|
||||
doc.addField("isShard_s", dr);
|
||||
}
|
||||
|
||||
if(parts[0].equals("wt")) {
|
||||
String dr = URLDecoder.decode(parts[1]);
|
||||
doc.addField("wt_s", dr);
|
||||
}
|
||||
|
||||
if(parts[0].equals("facet")) {
|
||||
String dr = URLDecoder.decode(parts[1]);
|
||||
doc.addField("facet_s", dr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,266 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.solr.util;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.StringReader;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.solr.SolrTestCaseJ4;
|
||||
import org.apache.solr.common.SolrInputDocument;
|
||||
import org.apache.solr.common.SolrInputField;
|
||||
import org.apache.solr.util.SolrLogPostTool.LogRecordReader;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
public class SolrLogPostToolTest extends SolrTestCaseJ4 {
|
||||
|
||||
@Before
|
||||
public void initVariousPostTools() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryRecord() throws Exception{
|
||||
String record = "2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n";
|
||||
List<SolrInputDocument> docs = readDocs(record);
|
||||
assertEquals(docs.size(), 1);
|
||||
SolrInputDocument doc = docs.get(0);
|
||||
|
||||
SolrInputField query = doc.getField("q_s");
|
||||
SolrInputField date = doc.getField("date_dt");
|
||||
SolrInputField collection = doc.getField("collection_s");
|
||||
SolrInputField path = doc.getField("path_s");
|
||||
SolrInputField hits = doc.getField("hits_l");
|
||||
SolrInputField type = doc.getField("type_s");
|
||||
SolrInputField status = doc.getField("status_s");
|
||||
SolrInputField loglevel = doc.getField("log_level_s");
|
||||
SolrInputField shard = doc.getField("shard_s");
|
||||
SolrInputField replica = doc.getField("replica_s");
|
||||
SolrInputField core = doc.getField("core_s");
|
||||
SolrInputField wt = doc.getField("wt_s");
|
||||
SolrInputField distrib = doc.getField("distrib_s");
|
||||
SolrInputField isShard = doc.getField("isShard_s");
|
||||
|
||||
assertEquals(query.getValue(), "*:*");
|
||||
assertEquals(date.getValue(), "2019-12-09T15:05:01.931");
|
||||
assertEquals(collection.getValue(), "logs4");
|
||||
assertEquals(path.getValue(), "/select");
|
||||
assertEquals(hits.getValue(), "234868");
|
||||
assertEquals(type.getValue(), "query");
|
||||
assertEquals(status.getValue(), "0");
|
||||
assertEquals(loglevel.getValue(), "INFO");
|
||||
assertEquals(shard.getValue(), "shard1");
|
||||
assertEquals(replica.getValue(), "core_node2");
|
||||
assertEquals(core.getValue(), "logs4_shard1_replica_n1");
|
||||
assertEquals(wt.getValue(), "javabin");
|
||||
assertEquals(distrib.getValue(), "false");
|
||||
assertEquals(isShard.getValue(), "true");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateRecords() throws Exception{
|
||||
String record = "2019-12-25 20:38:23.498 INFO (qtp2103763750-126) [c:logs3 s:shard1 r:core_node2 x:logs3_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [logs3_shard1_replica_n1] webapp=/solr path=/update params={commitWithin=1000&overwrite=true&wt=json&_=1577306114481}{deleteByQuery=*:* (-1653925534487281664)} 0 11\n" +
|
||||
"2019-12-25 20:42:13.411 INFO (qtp2103763750-303) [c:logs5 s:shard1 r:core_node2 x:logs5_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [logs5_shard1_replica_n1] webapp=/solr path=/update params={commitWithin=1000&overwrite=true&wt=json&_=1577306114481}{delete=[03bbe975-728a-4df8-aa25-fe25049dc0ef (-1653925775577972736)]} 0 1\n";
|
||||
List<SolrInputDocument> docs = readDocs(record);
|
||||
assertEquals(docs.size(), 2);
|
||||
SolrInputDocument doc = docs.get(0);
|
||||
SolrInputField date = doc.getField("date_dt");
|
||||
SolrInputField type = doc.getField("type_s");
|
||||
SolrInputField core = doc.getField("core_s");
|
||||
SolrInputField collection = doc.getField("collection_s");
|
||||
assertEquals(date.getValue(), "2019-12-25T20:38:23.498");
|
||||
assertEquals(type.getValue(), "deleteByQuery");
|
||||
assertEquals(collection.getValue(), "logs3");
|
||||
assertEquals(core.getValue(), "logs3_shard1_replica_n1");
|
||||
|
||||
SolrInputDocument doc1 = docs.get(1);
|
||||
SolrInputField date1 = doc1.getField("date_dt");
|
||||
SolrInputField type1 = doc1.getField("type_s");
|
||||
SolrInputField core1 = doc1.getField("core_s");
|
||||
SolrInputField collection1= doc1.getField("collection_s");
|
||||
assertEquals(date1.getValue(), "2019-12-25T20:42:13.411");
|
||||
assertEquals(type1.getValue(), "delete");
|
||||
assertEquals(collection1.getValue(), "logs5");
|
||||
assertEquals(core1.getValue(), "logs5_shard1_replica_n1");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testErrorRecord() throws Exception{
|
||||
String record = "2019-12-31 01:49:53.251 ERROR (qtp2103763750-240) [c:logs6 s:shard1 r:core_node2 x:logs6_shard1_replica_n1] o.a.s.h.RequestHandlerBase org.apache.solr.common.SolrException: org.apache.solr.search.SyntaxError: Cannot parse 'id:[* TO *': Encountered \"<EOF>\" at line 1, column 10.\n" +
|
||||
"Was expecting one of:\n" +
|
||||
" \"]\" ...\n" +
|
||||
" \"}\" ...\n" +
|
||||
" \n" +
|
||||
"\tat org.apache.solr.handler.component.QueryComponent.prepare(QueryComponent.java:218)\n" +
|
||||
"\tat org.apache.solr.handler.component.SearchHandler.handleRequestBody(SearchHandler.java:302)\n" +
|
||||
"\tat org.apache.solr.handler.RequestHandlerBase.handleRequest(RequestHandlerBase.java:197)\n" +
|
||||
"\tat org.apache.solr.core.SolrCore.execute(SolrCore.java:2582)\n" +
|
||||
"\tat org.apache.solr.servlet.HttpSolrCall.execute(HttpSolrCall.java:799)\n" +
|
||||
"\tat org.apache.solr.servlet.HttpSolrCall.call(HttpSolrCall.java:578)\n" +
|
||||
"\tat org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:419)\n" +
|
||||
"\tat org.apache.solr.servlet.SolrDispatchFilter.doFilter(SolrDispatchFilter.java:351)\n" +
|
||||
"\tat org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1602)\n" +
|
||||
"\tat org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:540)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:146)\n" +
|
||||
"\tat org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:548)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:257)\n" +
|
||||
"\tat org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1711)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1347)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)\n" +
|
||||
"\tat org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:480)\n" +
|
||||
"\tat org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1678)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1249)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:220)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:152)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" +
|
||||
"\tat org.eclipse.jetty.rewrite.handler.RewriteHandler.handle(RewriteHandler.java:335)\n" +
|
||||
"\tat org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)\n" +
|
||||
"\tat org.eclipse.jetty.server.Server.handle(Server.java:505)\n" +
|
||||
"\tat org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:370)\n" +
|
||||
"\tat org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:267)\n" +
|
||||
"\tat org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:305)\n" +
|
||||
"\tat org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:103)\n" +
|
||||
"\tat org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:117)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:781)\n" +
|
||||
"\tat org.eclipse.jetty.util.thread.QueuedThreadPool$Runner.run(QueuedThreadPool.java:917)\n" +
|
||||
"\tat java.base/java.lang.Thread.run(Thread.java:834)\n" +
|
||||
"Caused by: org.apache.solr.search.SyntaxError: Cannot parse 'id:[* TO *': Encountered \"<EOF>\" at line 1, column 10.\n" +
|
||||
"Was expecting one of:\n" +
|
||||
" \"]\" ...\n" +
|
||||
" \"}\" ...\n" +
|
||||
" \n" +
|
||||
"\tat org.apache.solr.parser.SolrQueryParserBase.parse(SolrQueryParserBase.java:266)\n" +
|
||||
"\tat org.apache.solr.search.LuceneQParser.parse(LuceneQParser.java:49)\n" +
|
||||
"\tat org.apache.solr.search.QParser.getQuery(QParser.java:174)\n" +
|
||||
"\tat org.apache.solr.handler.component.QueryComponent.prepare(QueryComponent.java:160)\n" +
|
||||
"\t... 41 more\n" +
|
||||
"Caused by: org.apache.solr.parser.ParseException: Encountered \"<EOF>\" at line 1, column 10.\n" +
|
||||
"Was expecting one of:\n" +
|
||||
" \"]\" ...\n" +
|
||||
" \"}\" ...\n" +
|
||||
" \n" +
|
||||
"\tat org.apache.solr.parser.QueryParser.generateParseException(QueryParser.java:885)\n" +
|
||||
"\tat org.apache.solr.parser.QueryParser.jj_consume_token(QueryParser.java:767)\n" +
|
||||
"\tat org.apache.solr.parser.QueryParser.Term(QueryParser.java:479)\n" +
|
||||
"\tat org.apache.solr.parser.QueryParser.Clause(QueryParser.java:278)\n" +
|
||||
"\tat org.apache.solr.parser.QueryParser.Query(QueryParser.java:162)\n" +
|
||||
"\tat org.apache.solr.parser.QueryParser.TopLevelQuery(QueryParser.java:131)\n" +
|
||||
"\tat org.apache.solr.parser.SolrQueryParserBase.parse(SolrQueryParserBase.java:262)\n" +
|
||||
"\t... 44 more\n" +
|
||||
"\n"+
|
||||
"2019-12-09 15:05:01.931 INFO (qtp2103763750-21) [c:logs4 s:shard1 r:core_node2 x:logs4_shard1_replica_n1] o.a.s.c.S.Request [logs4_shard1_replica_n1] webapp=/solr path=/select params={q=*:*&_=1575835181759&isShard=true&wt=javabin&distrib=false} hits=234868 status=0 QTime=8\n";
|
||||
List<SolrInputDocument> docs = readDocs(record);
|
||||
assertEquals(docs.size(), 2);
|
||||
SolrInputDocument doc = docs.get(0);
|
||||
SolrInputField date = doc.getField("date_dt");
|
||||
SolrInputField type = doc.getField("type_s");
|
||||
SolrInputField shard = doc.getField("shard_s");
|
||||
SolrInputField replica = doc.getField("replica_s");
|
||||
SolrInputField core = doc.getField("core_s");
|
||||
SolrInputField stack = doc.getField("stack_t");
|
||||
SolrInputField root = doc.getField("root_cause_t");
|
||||
SolrInputField collection = doc.getField("collection_s");
|
||||
|
||||
|
||||
assertEquals(date.getValue(), "2019-12-31T01:49:53.251");
|
||||
assertEquals(type.getValue(), "error");
|
||||
assertEquals(collection.getValue(), "logs6");
|
||||
|
||||
|
||||
assertEquals(shard.getValue(), "shard1");
|
||||
assertEquals(replica.getValue(), "core_node2");
|
||||
assertEquals(core.getValue(), "logs6_shard1_replica_n1");
|
||||
assertTrue(stack.getValue().toString().contains(root.getValue().toString()));
|
||||
|
||||
SolrInputDocument doc1 = docs.get(1);
|
||||
SolrInputField date1 = doc1.getField("date_dt");
|
||||
SolrInputField type1 = doc1.getField("type_s");
|
||||
assertEquals(date1.getValue(), "2019-12-09T15:05:01.931");
|
||||
assertEquals(type1.getValue(), "query");
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommit() throws Exception{
|
||||
String record = "2019-12-16 14:20:19.708 INFO (qtp812143047-22671) [c:production_201912 s:shard128 r:core_node7 x:production_201912_shard128_replica] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1653086376121335808,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}\n";
|
||||
List<SolrInputDocument> docs = readDocs(record);
|
||||
assertEquals(docs.size(), 1);
|
||||
SolrInputDocument doc = docs.get(0);
|
||||
|
||||
SolrInputField date = doc.getField("date_dt");
|
||||
SolrInputField type = doc.getField("type_s");
|
||||
SolrInputField shard = doc.getField("shard_s");
|
||||
SolrInputField replica = doc.getField("replica_s");
|
||||
SolrInputField core = doc.getField("core_s");
|
||||
SolrInputField openSearcher = doc.getField("open_searcher_s");
|
||||
SolrInputField softCommit = doc.getField("soft_commit_s");
|
||||
SolrInputField collection = doc.getField("collection_s");
|
||||
|
||||
assertEquals(date.getValue(), "2019-12-16T14:20:19.708");
|
||||
assertEquals(type.getValue(), "commit");
|
||||
assertEquals(shard.getValue(), "shard128");
|
||||
assertEquals(replica.getValue(), "core_node7");
|
||||
assertEquals(core.getValue(), "production_201912_shard128_replica");
|
||||
assertEquals(openSearcher.getValue(), "true");
|
||||
assertEquals(softCommit.getValue(), "false");
|
||||
assertEquals(collection.getValue(), "production_201912");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewSearcher() throws Exception{
|
||||
String record = "2019-12-16 19:00:23.931 INFO (searcherExecutor-66-thread-1) [ ] o.a.s.c.SolrCore [production_cv_month_201912_shard35_replica_n1] Registered new searcher Searcher@16ef5fac[production_cv_month_201912_shard35_replica_n1] ...";
|
||||
List<SolrInputDocument> docs = readDocs(record);
|
||||
assertEquals(docs.size(), 1);
|
||||
SolrInputDocument doc = docs.get(0);
|
||||
SolrInputField date = doc.getField("date_dt");
|
||||
SolrInputField type = doc.getField("type_s");
|
||||
SolrInputField core = doc.getField("core_s");
|
||||
assertEquals(date.getValue(), "2019-12-16T19:00:23.931");
|
||||
assertEquals(type.getValue(), "newSearcher");
|
||||
assertEquals(core.getValue(), "production_cv_month_201912_shard35_replica_n1");
|
||||
}
|
||||
|
||||
private List<SolrInputDocument> readDocs(String records) throws Exception {
|
||||
BufferedReader bufferedReader = new BufferedReader(new StringReader(records));
|
||||
ArrayList<SolrInputDocument> list = new ArrayList();
|
||||
|
||||
try {
|
||||
LogRecordReader logRecordReader = new SolrLogPostTool.LogRecordReader(bufferedReader);
|
||||
SolrInputDocument doc = null;
|
||||
while ((doc = logRecordReader.readRecord()) != null) {
|
||||
list.add(doc);
|
||||
}
|
||||
} finally {
|
||||
bufferedReader.close();
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue