HDDS-393. Audit Parser tool for processing ozone audit logs.

Contributed by Dinesh Chitlangia.
This commit is contained in:
Anu Engineer 2019-01-02 11:52:12 -08:00
parent 0cb3316cec
commit 996ab4874a
19 changed files with 1109 additions and 0 deletions

View File

@ -0,0 +1,72 @@
---
title: "Audit Parser"
date: 2018-12-17
menu:
main:
parent: Tools
---
<!---
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
Audit Parser tool can be used for querying the ozone audit logs.
This tool creates a sqllite database at the specified path. If the database
already exists, it will avoid creating a database.
The database contains only one table called `audit` defined as:
CREATE TABLE IF NOT EXISTS audit (
datetime text,
level varchar(7),
logger varchar(7),
user text,
ip text,
op text,
params text,
result varchar(7),
exception text,
UNIQUE(datetime,level,logger,user,ip,op,params,result))
Usage:
{{< highlight bash >}}
ozone auditparser <path to db file> [COMMAND] [PARAM]
{{< /highlight >}}
To load an audit log to database:
{{< highlight bash >}}
ozone auditparser <path to db file> load <path to audit log>
{{< /highlight >}}
Load command creates the audit table described above.
To run a custom read-only query:
{{< highlight bash >}}
ozone auditparser <path to db file> query <select query enclosed within double quotes>
{{< /highlight >}}
Audit Parser comes with a set of templates(most commonly used queries).
To run a template query:
{{< highlight bash >}}
ozone auditparser <path to db file> template <templateName>
{{< /highlight >}}
Following templates are available:
|Template Name|Description|SQL|
|----------------|----------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------|
|top5users|Top 5 users|select user,count(*) as total from audit group by user order by total DESC limit 5|
|top5cmds|Top 5 commands|select op,count(*) as total from audit group by op order by total DESC limit 5|
|top5activetimebyseconds|Top 5 active times, grouped by seconds|select substr(datetime,1,charindex(',',datetime)-1) as dt,count(*) as thecount from audit group by dt order by thecount DESC limit 5|

View File

@ -32,6 +32,7 @@ function hadoop_usage
hadoop_add_option "--loglevel level" "set the log4j level for this command" hadoop_add_option "--loglevel level" "set the log4j level for this command"
hadoop_add_option "--workers" "turn on worker mode" hadoop_add_option "--workers" "turn on worker mode"
hadoop_add_subcommand "auditparser" client "runs audit parser tool"
hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries" hadoop_add_subcommand "classpath" client "prints the class path needed to get the hadoop jar and the required libraries"
hadoop_add_subcommand "datanode" daemon "run a HDDS datanode" hadoop_add_subcommand "datanode" daemon "run a HDDS datanode"
hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables" hadoop_add_subcommand "envvars" client "display computed Hadoop environment variables"
@ -63,6 +64,10 @@ function ozonecmd_case
shift shift
case ${subcmd} in case ${subcmd} in
auditparser)
HADOOP_CLASSNAME=org.apache.hadoop.ozone.audit.parser.AuditParser
OZONE_RUN_ARTIFACT_NAME="hadoop-ozone-tools"
;;
classpath) classpath)
hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@" hadoop_do_classpath_subcommand HADOOP_CLASSNAME "$@"
;; ;;

View File

@ -278,6 +278,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<exclude>src/site/resources/images/*</exclude> <exclude>src/site/resources/images/*</exclude>
<exclude>src/test/all-tests</exclude> <exclude>src/test/all-tests</exclude>
<exclude>src/test/empty-file</exclude> <exclude>src/test/empty-file</exclude>
<exclude>src/test/resources/*.log</exclude>
<exclude>src/test/resources/*.tgz</exclude> <exclude>src/test/resources/*.tgz</exclude>
<exclude>src/test/resources/data*</exclude> <exclude>src/test/resources/data*</exclude>
<exclude>src/test/resources/empty-file</exclude> <exclude>src/test/resources/empty-file</exclude>

View File

@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser;
import org.apache.hadoop.hdds.cli.GenericCli;
import org.apache.hadoop.ozone.audit.parser.handler.LoadCommandHandler;
import org.apache.hadoop.ozone.audit.parser.handler.QueryCommandHandler;
import org.apache.hadoop.ozone.audit.parser.handler.TemplateCommandHandler;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
/**
* Ozone audit parser tool.
*/
@Command(name = "ozone auditparser",
description = "Shell parser for Ozone Audit Logs",
subcommands = {
LoadCommandHandler.class,
TemplateCommandHandler.class,
QueryCommandHandler.class
},
versionProvider = HddsVersionProvider.class,
mixinStandardHelpOptions = true)
public class AuditParser extends GenericCli {
/*
<.db file path> load <file>
<.db file path> template <template name>
<.db file path> query <custom sql>
*/
@Parameters(arity = "1..1", description = "Existing or new .db file")
private String database;
public static void main(String[] argv) throws Exception {
new AuditParser().run(argv);
}
public String getDatabase(){
return database;
}
}

View File

@ -0,0 +1,232 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.common;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.ozone.audit.parser.model.AuditEntry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.sql.*;
import java.util.ArrayList;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
/**
* Database helper for ozone audit parser tool.
*/
public final class DatabaseHelper {
private DatabaseHelper() {
//Never constructed
}
static {
loadProperties();
}
private static final Logger LOG =
LoggerFactory.getLogger(DatabaseHelper.class);
private static Map<String, String> properties;
public static boolean setup(String dbName, String logs) {
//loadProperties();
if(createAuditTable(dbName)) {
return insertAudits(dbName, logs);
} else {
return false;
}
}
private static Connection getConnection(String dbName) {
Connection connection = null;
try{
Class.forName(ParserConsts.DRIVER);
connection = DriverManager.getConnection(
ParserConsts.CONNECTION_PREFIX + dbName);
} catch (ClassNotFoundException e) {
LOG.error(e.getMessage());
} catch (SQLException e) {
LOG.error(e.getMessage());
}
return connection;
}
private static void loadProperties() {
Properties props = new Properties();
try{
InputStream inputStream = DatabaseHelper.class.getClassLoader()
.getResourceAsStream(ParserConsts.PROPS_FILE);
if (inputStream != null) {
props.load(inputStream);
properties = props.entrySet().stream().collect(
Collectors.toMap(
e -> e.getKey().toString(),
e -> e.getValue().toString()
)
);
} else {
throw new FileNotFoundException("property file '"
+ ParserConsts.PROPS_FILE + "' not found in the classpath");
}
} catch(Exception e){
LOG.error(e.getMessage());
}
}
private static boolean createAuditTable(String dbName) {
try(Connection connection = getConnection(dbName);
Statement st = connection.createStatement()) {
st.executeUpdate(properties.get(ParserConsts.CREATE_AUDIT_TABLE));
} catch (SQLException e) {
LOG.error(e.getMessage());
return false;
}
return true;
}
private static boolean insertAudits(String dbName, String logs) {
try(Connection connection = getConnection(dbName);
PreparedStatement preparedStatement = connection.prepareStatement(
properties.get(ParserConsts.INSERT_AUDITS))) {
ArrayList<AuditEntry> auditEntries = parseAuditLogs(logs);
//Insert list to db
for(AuditEntry audit : auditEntries) {
preparedStatement.setString(1, audit.getTimestamp());
preparedStatement.setString(2, audit.getLevel());
preparedStatement.setString(3, audit.getLogger());
preparedStatement.setString(4, audit.getUser());
preparedStatement.setString(5, audit.getIp());
preparedStatement.setString(6, audit.getOp());
preparedStatement.setString(7, audit.getParams());
preparedStatement.setString(8, audit.getResult());
preparedStatement.setString(9, audit.getException());
preparedStatement.executeUpdate();
}
} catch (Exception e) {
LOG.error(e.getMessage());
return false;
}
return true;
}
private static ArrayList<AuditEntry> parseAuditLogs(String filePath)
throws Exception {
ArrayList<AuditEntry> listResult = new ArrayList<AuditEntry>();
try(FileInputStream fis = new FileInputStream(filePath);
InputStreamReader isr = new InputStreamReader(fis);
BufferedReader bReader = new BufferedReader(isr)) {
String currentLine = null;
String[] entry = null;
AuditEntry tempEntry = null;
String nextLine = null;
currentLine = bReader.readLine();
nextLine = bReader.readLine();
while(true) {
if(tempEntry == null){
tempEntry = new AuditEntry();
}
if(currentLine == null) {
break;
} else {
if(!currentLine.matches(ParserConsts.DATE_REGEX)){
tempEntry.appendException(currentLine);
} else {
entry = StringUtils.stripAll(currentLine.split("\\|"));
tempEntry = new AuditEntry.Builder()
.setTimestamp(entry[0])
.setLevel(entry[1])
.setLogger(entry[2])
.setUser(entry[3].substring(entry[3].indexOf('=') + 1))
.setIp(entry[4].substring(entry[4].indexOf('=') + 1))
.setOp(entry[5].substring(entry[5].indexOf('=') + 1))
.setParams(entry[6])
.setResult(entry[7].substring(entry[7].indexOf('=') + 1))
.build();
if(entry.length == 9){
tempEntry.setException(entry[8]);
}
}
if(nextLine == null || nextLine.matches(ParserConsts.DATE_REGEX)){
listResult.add(tempEntry);
tempEntry = null;
}
currentLine = nextLine;
nextLine = bReader.readLine();
}
}
} catch (RuntimeException rx) {
throw rx;
} catch (Exception ex) {
throw ex;
}
return listResult;
}
public static String executeCustomQuery(String dbName, String query)
throws SQLException {
return executeStatement(dbName, query);
}
public static String executeTemplate(String dbName, String template)
throws SQLException {
return executeStatement(dbName,
properties.get(template));
}
private static String executeStatement(String dbName, String sql)
throws SQLException {
StringBuilder result = new StringBuilder();
ResultSet rs = null;
Statement st = null;
ResultSetMetaData rsm = null;
try(Connection connection = getConnection(dbName)) {
//loadProperties();
if(connection != null){
st = connection.createStatement();
rs = st.executeQuery(sql);
if(rs != null) {
rsm = rs.getMetaData();
int cols = rsm.getColumnCount();
while(rs.next()){
for(int index =1; index<=cols; index++){
result.append(rs.getObject(index) + "\t");
}
result.append("\n");
}
}
st.close();
rs.close();
}
}
return result.toString();
}
public static boolean validateTemplate(String templateName) {
return (properties.get(templateName) != null);
}
}

View File

@ -0,0 +1,35 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.common;
/**
* Constants used for ozone audit parser.
*/
public final class ParserConsts {
private ParserConsts() {
//Never constructed
}
public static final String DRIVER = "org.sqlite.JDBC";
public static final String CONNECTION_PREFIX = "jdbc:sqlite:";
public static final String DATE_REGEX = "^\\d{4}-\\d{2}-\\d{2}.*$";
public static final String PROPS_FILE = "commands.properties";
public static final String INSERT_AUDITS = "insertAuditEntry";
public static final String CREATE_AUDIT_TABLE = "createAuditTable";
}

View File

@ -0,0 +1,20 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.common;
/**
* Classes to define constants & helpers for Ozone audit parser tool.
*/

View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.handler;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.ozone.audit.parser.AuditParser;
import org.apache.hadoop.ozone.audit.parser.common.DatabaseHelper;
import picocli.CommandLine.*;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import java.util.concurrent.Callable;
/**
* Load command handler for ozone audit parser.
*/
@Command(name = "load",
aliases = "l",
description = "Load ozone audit log files",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class LoadCommandHandler implements Callable<Void> {
@Parameters(arity = "1..1", description = "Audit Log file(s)")
private String logs;
@ParentCommand
private AuditParser auditParser;
public Void call() {
if(DatabaseHelper.setup(auditParser.getDatabase(), logs)) {
System.out.println(logs + " has been loaded successfully");
} else {
System.out.println("Failed to load " + logs);
}
return null;
}
}

View File

@ -0,0 +1,57 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.handler;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.ozone.audit.parser.AuditParser;
import org.apache.hadoop.ozone.audit.parser.common.DatabaseHelper;
import picocli.CommandLine.*;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import java.sql.SQLException;
import java.util.concurrent.Callable;
/**
* Custom query command handler for ozone audit parser.
* The query must be enclosed within double quotes.
*/
@Command(name = "query",
aliases = "q",
description = "Execute custom query",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class QueryCommandHandler implements Callable<Void> {
@Parameters(arity = "1..1", description = "Custom query enclosed within " +
"double quotes.")
private String query;
@ParentCommand
private AuditParser auditParser;
public Void call() {
try {
System.out.println(
DatabaseHelper.executeCustomQuery(auditParser.getDatabase(), query)
);
} catch (SQLException ex) {
System.err.println(ex.getMessage());
}
return null;
}
}

View File

@ -0,0 +1,61 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.handler;
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
import org.apache.hadoop.ozone.audit.parser.AuditParser;
import org.apache.hadoop.ozone.audit.parser.common.DatabaseHelper;
import picocli.CommandLine.*;
import picocli.CommandLine.Command;
import picocli.CommandLine.Parameters;
import java.sql.SQLException;
import java.util.concurrent.Callable;
/**
* Template command handler for ozone audit parser.
*/
@Command(name = "template",
aliases = "t",
description = "Execute template query",
mixinStandardHelpOptions = true,
versionProvider = HddsVersionProvider.class)
public class TemplateCommandHandler implements Callable<Void> {
@Parameters(arity = "1..1", description = "Custom query")
private String template;
@ParentCommand
private AuditParser auditParser;
public Void call() {
try {
if(DatabaseHelper.validateTemplate(template)) {
System.out.println(
DatabaseHelper.executeTemplate(auditParser.getDatabase(),
template)
);
} else {
System.err.println("ERROR: Invalid template name - " + template);
}
} catch (SQLException ex) {
System.err.println(ex.getMessage());
}
return null;
}
}

View File

@ -0,0 +1,20 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.handler;
/**
* Command handlers used for Ozone audit parser tool.
*/

View File

@ -0,0 +1,188 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.model;
/**
* POJO used for ozone audit parser tool.
*/
public class AuditEntry {
private String timestamp;
private String level;
private String logger;
private String user;
private String ip;
private String op;
private String params;
private String result;
private String exception;
public AuditEntry(){}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getTimestamp() {
return timestamp;
}
public void setTimestamp(String timestamp) {
this.timestamp = timestamp;
}
public String getLevel() {
return level;
}
public void setLevel(String level) {
this.level = level;
}
public String getLogger() {
return logger;
}
public void setLogger(String logger) {
this.logger = logger;
}
public String getOp() {
return op;
}
public void setOp(String op) {
this.op = op;
}
public String getParams() {
return params;
}
public void setParams(String params) {
this.params = params;
}
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
public String getException() {
return exception;
}
public void setException(String exception) {
this.exception = exception.trim();
}
public void appendException(String text){
this.exception += "\n" + text.trim();
}
/**
* Builder for AuditEntry.
*/
public static class Builder {
private String timestamp;
private String level;
private String logger;
private String user;
private String ip;
private String op;
private String params;
private String result;
private String exception;
public Builder() {
}
public Builder setTimestamp(String ts){
this.timestamp = ts;
return this;
}
public Builder setLevel(String lvl){
this.level = lvl;
return this;
}
public Builder setLogger(String lgr){
this.logger = lgr;
return this;
}
public Builder setUser(String usr){
this.user = usr;
return this;
}
public Builder setIp(String ipAddress){
this.ip = ipAddress;
return this;
}
public Builder setOp(String operation){
this.op = operation;
return this;
}
public Builder setParams(String prms){
this.params = prms;
return this;
}
public Builder setResult(String res){
this.result = res;
return this;
}
public Builder setException(String exp){
this.exception = exp;
return this;
}
public AuditEntry build() {
AuditEntry aentry = new AuditEntry();
aentry.timestamp = this.timestamp;
aentry.level = this.level;
aentry.logger = this.logger;
aentry.user = this.user;
aentry.ip = this.ip;
aentry.op = this.op;
aentry.params = this.params;
aentry.result = this.result;
aentry.exception = this.exception;
return aentry;
}
}
}

View File

@ -0,0 +1,20 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser.model;
/**
* POJO used for Ozone audit parser tool.
*/

View File

@ -0,0 +1,20 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.ozone.audit.parser;
/**
* Classes used for Ozone audit parser tool.
*/

View File

@ -0,0 +1,22 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
createAuditTable=CREATE TABLE IF NOT EXISTS audit (datetime text,level varchar(7),logger varchar(7),user text,ip text,op text,params text,result varchar(7),exception text,UNIQUE(datetime,level,logger,user,ip,op,params,result))
insertAuditEntry=INSERT INTO AUDIT VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)
top5users=select user,count(*) as total from audit group by user order by total DESC limit 5
top5cmds=select op,count(*) as total from audit group by op order by total DESC limit 5
top5activetimebyseconds=select substr(datetime,1,charindex(',',datetime)-1) as dt,count(*) as thecount from audit group by dt order by thecount DESC limit 5

View File

@ -0,0 +1,191 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.audit.parser;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.ExecutionException;
import picocli.CommandLine.IExceptionHandler2;
import picocli.CommandLine.ParseResult;
import picocli.CommandLine.ParameterException;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Tests GenerateOzoneRequiredConfigurations.
*/
public class TestAuditParser {
private static File outputBaseDir;
private static AuditParser parserTool;
private static final Logger LOG =
LoggerFactory.getLogger(TestAuditParser.class);
private static final ByteArrayOutputStream OUT = new ByteArrayOutputStream();
private final ByteArrayOutputStream err = new ByteArrayOutputStream();
private static final PrintStream OLD_OUT = System.out;
private static final PrintStream OLD_ERR = System.err;
private static String dbName;
private static final String LOGS = TestAuditParser.class
.getClassLoader().getResource("testaudit.log").getPath();
/**
* Creates output directory which will be used by the test-cases.
* If a test-case needs a separate directory, it has to create a random
* directory inside {@code outputBaseDir}.
*
* @throws Exception In case of exception while creating output directory.
*/
@BeforeClass
public static void init() throws Exception {
outputBaseDir = getRandomTempDir();
dbName = getRandomTempDir() + "/testAudit.db";
parserTool = new AuditParser();
String[] args = new String[]{dbName, "load", LOGS};
execute(args, "");
}
@Before
public void setup() {
System.setOut(new PrintStream(OUT));
System.setErr(new PrintStream(err));
}
@After
public void reset() {
// reset stream after each unit test
OUT.reset();
err.reset();
// restore system streams
System.setOut(OLD_OUT);
System.setErr(OLD_ERR);
}
/**
* Cleans up the output base directory.
*/
@AfterClass
public static void cleanup() throws IOException {
FileUtils.deleteDirectory(outputBaseDir);
}
private static void execute(String[] args, String msg) {
List<String> arguments = new ArrayList(Arrays.asList(args));
LOG.info("Executing shell command with args {}", arguments);
CommandLine cmd = parserTool.getCmd();
IExceptionHandler2<List<Object>> exceptionHandler =
new IExceptionHandler2<List<Object>>() {
@Override
public List<Object> handleParseException(ParameterException ex,
String[] args) {
throw ex;
}
@Override
public List<Object> handleExecutionException(ExecutionException ex,
ParseResult parseResult) {
throw ex;
}
};
cmd.parseWithHandlers(new CommandLine.RunLast(),
exceptionHandler, args);
Assert.assertTrue(OUT.toString().contains(msg));
}
/**
* Test to find top 5 commands.
*/
@Test
public void testTemplateTop5Cmds() {
String[] args = new String[]{dbName, "template", "top5cmds"};
execute(args,
"DELETE_KEY\t3\t\n" +
"ALLOCATE_KEY\t2\t\n" +
"COMMIT_KEY\t2\t\n" +
"CREATE_BUCKET\t1\t\n" +
"CREATE_VOLUME\t1\t\n\n");
}
/**
* Test to find top 5 users.
*/
@Test
public void testTemplateTop5Users() {
String[] args = new String[]{dbName, "template", "top5users"};
execute(args, "hadoop\t9\t\n");
}
/**
* Test to find top 5 users.
*/
@Test
public void testTemplateTop5ActiveTimeBySeconds() {
String[] args = new String[]{dbName, "template", "top5activetimebyseconds"};
execute(args,
"2018-09-06 01:57:22\t3\t\n" +
"2018-09-06 01:58:08\t1\t\n" +
"2018-09-06 01:58:18\t1\t\n" +
"2018-09-06 01:59:36\t1\t\n" +
"2018-09-06 01:59:41\t1\t\n");
}
/**
* Test to execute custom query.
*/
@Test
public void testQueryCommand() {
String[] args = new String[]{dbName, "query",
"select count(*) from audit"};
execute(args,
"9");
}
/**
* Test to check help message.
* @throws Exception
*/
@Test
public void testHelp() throws Exception {
String[] args = new String[]{"--help"};
execute(args,
"Usage: ozone auditparser [-hV] [--verbose] [-D=<String=String>]... " +
"<database>\n" +
" [COMMAND]");
}
private static File getRandomTempDir() throws IOException {
File tempDir = new File(outputBaseDir,
RandomStringUtils.randomAlphanumeric(5));
FileUtils.forceMkdir(tempDir);
return tempDir;
}
}

View File

@ -0,0 +1,21 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.audit.parser;
/**
* Tests for AuditParser.
*/

View File

@ -0,0 +1,22 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
createAuditTable=CREATE TABLE IF NOT EXISTS audit (datetime text,level varchar(7),logger varchar(7),user text,ip text,op text,params text,result varchar(7),exception text,UNIQUE(datetime,level,logger,user,ip,op,params,result))
insertAuditEntry=INSERT INTO AUDIT VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)
top5users=select user,count(*) as total from audit group by user order by total DESC limit 5
top5cmds=select op,count(*) as total from audit group by op order by total DESC limit 5
top5activetimebyseconds=select substr(datetime,1,charindex(',',datetime)-1) as dt,count(*) as thecount from audit group by dt order by thecount DESC limit 5

View File

@ -0,0 +1,15 @@
2018-09-06 01:57:22,996 | INFO | OMAudit | user=hadoop | ip=172.18.0.4 | op=ALLOCATE_KEY | {volume=vol-7-67105, bucket=bucket-0-68911, key=key-246-29031, dataSize=10240, replicationType=STAND_ALONE, replicationFactor=ONE, keyLocationInfo=null} | ret=SUCCESS |
2018-09-06 01:57:22,997 | INFO | OMAudit | user=hadoop | ip=172.18.0.4 | op=COMMIT_KEY | {volume=vol-4-88912, bucket=bucket-0-27678, key=key-241-42688, dataSize=10240, replicationType=null, replicationFactor=null, keyLocationInfo=[org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo@25bd7387], clientID=61987500296} | ret=SUCCESS |
2018-09-06 01:57:22,997 | INFO | OMAudit | user=hadoop | ip=172.18.0.4 | op=COMMIT_KEY | {volume=vol-1-59303, bucket=bucket-0-47510, key=key-248-17213, dataSize=10240, replicationType=null, replicationFactor=null, keyLocationInfo=[org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo@788f5bea], clientID=61990833797} | ret=SUCCESS |
2018-09-06 01:58:08,035 | ERROR | OMAudit | user=hadoop | ip=172.18.0.4 | op=CREATE_VOLUME | {admin=hadoop, owner=tom, volume=dcv, creationTime=0, quotaInBytes=1152921504606846976} | ret=FAILURE | org.apache.hadoop.ozone.om.exceptions.OMException
at org.apache.hadoop.ozone.om.VolumeManagerImpl.createVolume(VolumeManagerImpl.java:137)
at org.apache.hadoop.ozone.om.OzoneManager.createVolume(OzoneManager.java:469)
2018-09-06 01:58:18,447 | ERROR | OMAudit | user=hadoop | ip=172.18.0.4 | op=CREATE_BUCKET | {volume=dcv, bucket=dcb, acls=[USER:hadoop:rw, GROUP:users:rw], isVersionEnabled=false, storageType=DISK, creationTime=0} | ret=FAILURE | org.apache.hadoop.ozone.om.exceptions.OMException: Bucket already exist
at org.apache.hadoop.ozone.om.BucketManagerImpl.createBucket(BucketManagerImpl.java:98)
at org.apache.hadoop.ozone.om.OzoneManager.createBucket(OzoneManager.java:694)
2018-09-06 01:59:36,686 | INFO | OMAudit | user=hadoop | ip=172.18.0.4 | op=DELETE_KEY | {volume=dcv, bucket=dcb, key=dck1, dataSize=0, replicationType=null, replicationFactor=null, keyLocationInfo=null} | ret=SUCCESS |
2018-09-06 01:59:41,027 | INFO | OMAudit | user=hadoop | ip=172.18.0.4 | op=DELETE_KEY | {volume=dcv, bucket=dcb, key=dck2, dataSize=0, replicationType=null, replicationFactor=null, keyLocationInfo=null} | ret=SUCCESS |
2018-09-06 01:59:47,169 | ERROR | OMAudit | user=hadoop | ip=172.18.0.4 | op=DELETE_KEY | {volume=dcv, bucket=dcb, key=dck2, dataSize=0, replicationType=null, replicationFactor=null, keyLocationInfo=null} | ret=FAILURE | org.apache.hadoop.ozone.om.exceptions.OMException: Key not found
at org.apache.hadoop.ozone.om.KeyManagerImpl.deleteKey(KeyManagerImpl.java:448)
at org.apache.hadoop.ozone.om.OzoneManager.deleteKey(OzoneManager.java:892)
2018-09-06 01:60:22,900 | INFO | OMAudit | user=hadoop | ip=172.18.0.4 | op=ALLOCATE_KEY | {volume=vol-8-67105, bucket=bucket-0-68911, key=key-246-29031, dataSize=10240, replicationType=STAND_ALONE, replicationFactor=ONE, keyLocationInfo=null} | ret=SUCCESS |