monitors) {
- String tmp = getProperty("executor.interval.min");
- if (tmp != null)
- MIN_INTERVAL = Integer.parseInt(tmp);
-
- int[] monIntervals = new int[monitors.size()];
-
- for (int i = 0; i < monitors.size(); i++)
- monIntervals[i] = monitors.get(i).interval;
-
- return Math.max(MIN_INTERVAL, gcd(monIntervals));
- }
-
- /**
- * Checks whether a specific shell command is available
- * in the system.
- *
- * @param cmd the command to check against
- *
- * @return true, if the command is availble, false otherwise
- */
- public static boolean checkExistence(String cmd) {
- CharSequence sb = runCommandGeneric("which " + cmd);
- if (sb.length() > 1)
- return true;
-
- return false;
- }
-
- /**
- * Runs a shell command in the system and provides a StringBuilder
- * with the output of the command.
- * This method is deprecated. See related method that returns a CharSequence as oppposed to a StringBuffer.
- *
- * @param cmd an array of string that form the command to run
- *
- * @return a text that contains the output of the command
- * @see #runCommandGeneric(String[])
- * @deprecated
- */
- public static StringBuffer runCommand(String[] cmd) {
- return new StringBuffer(runCommandGeneric(cmd));
- }
-
- /**
- * Runs a shell command in the system and provides a StringBuilder
- * with the output of the command.
- *
- * @param cmd an array of string that form the command to run
- *
- * @return a text that contains the output of the command
- */
- public static CharSequence runCommandGeneric(String[] cmd) {
- StringBuilder retval = new StringBuilder(MAX_OUTPUT_LENGTH);
- Process p;
- try {
- p = Runtime.getRuntime().exec(cmd);
- InputStream tmp = p.getInputStream();
- p.waitFor();
- int c;
- while ((c = tmp.read()) != -1)
- retval.append((char) c);
- } catch (IOException e) {
- e.printStackTrace();
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
-
- return retval;
- }
-
- /**
- * Runs a shell command in the system and provides a StringBuilder
- * with the output of the command.
- *
This method is deprecated in favor of the one that returns CharSequence as opposed to StringBuffer
- * @param cmd the command to run
- *
- * @return a text that contains the output of the command
- * @see #runCommandGeneric(String)
- * @deprecated
- */
- public static StringBuffer runCommand(String cmd) {
- return new StringBuffer(runCommandGeneric(cmd));
- }
-
- /**
- * Runs a shell command in the system and provides a StringBuilder
- * with the output of the command.
- *
- * @param cmd the command to run
- *
- * @return a text that contains the output of the command
- */
- public static CharSequence runCommandGeneric(String cmd) {
- return runCommandGeneric(cmd.split("\\s+"));
- }
- /**
- * Determines the greatest common divisor (GCD) of two integers.
- *
- * @param m the first integer
- * @param n the second integer
- *
- * @return the greatest common divisor of m and n
- */
- public static int gcd(int m, int n) {
- if (m == 0 && n == 0)
- return 0;
- if (m < n) {
- int t = m;
- m = n;
- n = t;
- }
- int r = m % n;
- if (r == 0) {
- return n;
- } else {
- return gcd(n, r);
- }
- }
-
- /**
- * Determines the greatest common divisor (GCD) of a list
- * of integers.
- *
- * @param numbers the list of integers to process
- *
- * @return the greatest common divisor of all numbers
- */
- public static int gcd(int[] numbers) {
-
- if (numbers.length == 1)
- return numbers[0];
-
- int g = gcd(numbers[0], numbers[1]);
-
- for (int i = 2; i < numbers.length; i++)
- g = gcd(g, numbers[i]);
-
- return g;
- }
-
- private static String [] expandDirs(String [] input, String patternStr) {
-
- ArrayList fnames = new ArrayList();
- Pattern pattern = Pattern.compile(patternStr);
- Matcher matcher;
- File f;
-
- for (String fname : input) {
- f = new File(fname);
- if (f.exists()) {
- if (f.isDirectory()) {
- // add all matching files
- File [] fcs = f.listFiles();
- for (File fc : fcs) {
- matcher = pattern.matcher(fc.getName());
- if (matcher.find() && fc.isFile())
- fnames.add(fc.getAbsolutePath());
- }
- } else {
- // normal file, just add to output
- fnames.add(f.getAbsolutePath());
- }
- }
- }
- return fnames.toArray(input);
- }
-
- private static int setValue(String propname, int defaultValue) {
-
- String v = getProperty(propname);
-
- if (v != null)
- return Integer.parseInt(v);
- else
- return defaultValue;
- }
-
-
- public static void logInfo(String str) {
- LOG.info(str);
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/EventRecord.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/EventRecord.java
deleted file mode 100644
index cf1e0c80e9e..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/EventRecord.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.HashMap;
-
-/**********************************************************
- * Objects of this class represent metrics collected for
- * a specific hardware source. Each EventRecord contains a HashMap of
- * (key, value) pairs, each of which represents a property of
- * the metered value. For instance, when parsing a log file, an
- * EventRecord is created for each log entry, which contains
- * the hostname and the ip addresses of the node, timestamp of
- * the log entry, the actual message etc. Each and every EventRecord
- * contains the hostname of the machine on which it was collected,
- * its IP address and the time of collection.
- *
- * The main purpose of this class is to provide a uniform format
- * for records collected from various system compontents (logs,
- * ifconfig, smartmontools, lm-sensors etc). All metric values are
- * converted into this format after they are collected by a
- * Monitored object.
- *
- **********************************************************/
-
-public class EventRecord {
-
- HashMap fields;
-
- /**
- * Create the EventRecord given the most common properties
- * among different metric types.
- */
- public EventRecord(String _hostname, Object [] _ips, Calendar _timestamp,
- String _type, String _logLevel, String _source, String _message) {
- fields = new HashMap();
- fields.clear();
- set("hostname", _hostname);
- set("ips", _ips);
- set("timestamp", _timestamp);
- set("type", _type);
- set("logLevel", _logLevel);
- set("source", _source);
- set("message", _message);
- }
-
- /**
- * Create the EventRecord with no fields other than "invalid" as
- * the hostname. This is only used as a dummy.
- */
- public EventRecord() {
- // creates an invalid record
- fields = new HashMap();
- fields.clear();
- set("hostname", "invalid");
- }
-
- /**
- * Return the HashMap of properties of the EventRecord.
- *
- * @return a HashMap that contains all properties of the record.
- */
- public final HashMap getMap() {
- return fields;
- }
-
- /**
- * Set the value of a property of the EventRecord.
- *
- * @param fieldName the name of the property to set
- * @param fieldValue the value of the property to set
- *
- */
- public void set(String fieldName, Object fieldValue) {
- if (fieldValue != null)
- fields.put(fieldName, fieldValue);
- }
-
- /**
- * Get the value of a property of the EventRecord.
- * If the property with the specific key is not found,
- * null is returned.
- *
- * @param fieldName the name of the property to get.
- */
- public Object get(String fieldName) {
- return fields.get(fieldName);
- }
-
- /**
- * Check if the EventRecord is a valid one, i.e., whether
- * it represents meaningful metric values.
- *
- * @return true if the EventRecord is a valid one, false otherwise.
- */
- public boolean isValid() {
- return !("invalid".equalsIgnoreCase((String) fields.get("hostname")));
- }
-
- /**
- * Creates and returns a string representation of the object.
- *
- * @return a String representation of the object
- */
-
- public String toString() {
- String retval = "";
- ArrayList keys = new ArrayList(fields.keySet());
- Collections.sort(keys);
-
- for (int i = 0; i < keys.size(); i++) {
- Object value = fields.get(keys.get(i));
- if (value == null)
- retval += keys.get(i) + ":\tnull\n";
- else if (value instanceof String)
- retval += keys.get(i) + ":\t" + value + "\n";
- else if (value instanceof Calendar)
- retval += keys.get(i) + ":\t" + ((Calendar) value).getTime() + "\n";
- else if (value instanceof InetAddress[] || value instanceof String []) {
- retval += "Known IPs:\t";
- for (InetAddress ip : ((InetAddress[]) value))
- retval += ip.getHostAddress() + " ";
- retval += "\n";
- } else {
- retval += keys.get(i) + ":\t" + value.toString() + "\n";
- }
- }
- return retval;
- }
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Executor.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Executor.java
deleted file mode 100644
index fc23d6917c5..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Executor.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.util.ArrayList;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**********************************************************
- * This class executes monitoring jobs on all nodes of the
- * cluster, on which we intend to gather failure metrics.
- * It is basically a thread that sleeps and periodically wakes
- * up to execute monitoring jobs and ship all gathered data to
- * a "safe" location, which in most cases will be the HDFS
- * filesystem of the monitored cluster.
- *
- **********************************************************/
-
-public class Executor implements Runnable {
-
- public static final int DEFAULT_LOG_INTERVAL = 3600;
-
- public static final int DEFAULT_POLL_INTERVAL = 360;
-
- public static int MIN_INTERVAL = 5;
-
- public static int instances = 0;
-
- LocalStore lstore;
-
- ArrayList monitors;
-
- int interval;
-
- int upload_interval;
- int upload_counter;
-
- /**
- * Create an instance of the class and read the configuration
- * file to determine the set of jobs that will be run and the
- * maximum interval for which the thread can sleep before it
- * wakes up to execute a monitoring job on the node.
- *
- */
-
- public Executor(Configuration conf) {
-
- Environment.prepare("conf/failmon.properties");
-
- String localTmpDir;
-
- if (conf == null) {
- // running as a stand-alone application
- localTmpDir = System.getProperty("java.io.tmpdir");
- Environment.setProperty("local.tmp.dir", localTmpDir);
- } else {
- // running from within Hadoop
- localTmpDir = conf.get("hadoop.tmp.dir");
- String hadoopLogPath = System.getProperty("hadoop.log.dir") + "/" + System.getProperty("hadoop.log.file");
- Environment.setProperty("hadoop.log.file", hadoopLogPath);
- Environment.setProperty("local.tmp.dir", localTmpDir);
- }
-
- monitors = Environment.getJobs();
- interval = Environment.getInterval(monitors);
- upload_interval = LocalStore.UPLOAD_INTERVAL;
- lstore = new LocalStore();
-
- if (Environment.getProperty("local.upload.interval") != null)
- upload_interval = Integer.parseInt(Environment.getProperty("local.upload.interval"));
-
- instances++;
- }
-
- public void run() {
- upload_counter = upload_interval;
-
- Environment.logInfo("Failmon Executor thread started successfully.");
- while (true) {
- try {
- Thread.sleep(interval * 1000);
- for (int i = 0; i < monitors.size(); i++) {
- monitors.get(i).counter -= interval;
- if (monitors.get(i).counter <= 0) {
- monitors.get(i).reset();
- Environment.logInfo("Calling " + monitors.get(i).job.getInfo() + "...\t");
- monitors.get(i).job.monitor(lstore);
- }
- }
- upload_counter -= interval;
- if (upload_counter <= 0) {
- lstore.upload();
- upload_counter = upload_interval;
- }
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- }
-
- public void cleanup() {
- instances--;
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java
deleted file mode 100644
index 7d4230e6ddd..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HDFSMerger.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.io.BufferedOutputStream;
-import java.io.InputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.FileNotFoundException;
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.zip.CRC32;
-import java.util.zip.CheckedOutputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSDataInputStream;
-
-public class HDFSMerger {
-
- Configuration hadoopConf;
- FileSystem hdfs;
-
- String hdfsDir;
-
- FileStatus [] inputFiles;
-
- Path outputFilePath;
- FSDataOutputStream outputFile;
-
- boolean compress;
-
- FileWriter fw;
-
- BufferedWriter writer;
-
- public HDFSMerger() throws IOException {
-
- String hadoopConfPath;
-
- if (Environment.getProperty("hadoop.conf.path") == null)
- hadoopConfPath = "../../../conf";
- else
- hadoopConfPath = Environment.getProperty("hadoop.conf.path");
-
- // Read the configuration for the Hadoop environment
- Configuration hadoopConf = new Configuration();
- hadoopConf.addResource(new Path(hadoopConfPath + "/hadoop-default.xml"));
- hadoopConf.addResource(new Path(hadoopConfPath + "/hadoop-site.xml"));
-
- // determine the local output file name
- if (Environment.getProperty("local.tmp.filename") == null)
- Environment.setProperty("local.tmp.filename", "failmon.dat");
-
- // determine the upload location
- hdfsDir = Environment.getProperty("hdfs.upload.dir");
- if (hdfsDir == null)
- hdfsDir = "/failmon";
-
- hdfs = FileSystem.get(hadoopConf);
-
- Path hdfsDirPath = new Path(hadoopConf.get("fs.default.name") + hdfsDir);
-
- try {
- if (!hdfs.getFileStatus(hdfsDirPath).isDir()) {
- Environment.logInfo("HDFSMerger: Not an HDFS directory: " + hdfsDirPath.toString());
- System.exit(0);
- }
- } catch (FileNotFoundException e) {
- Environment.logInfo("HDFSMerger: Directory not found: " + hdfsDirPath.toString());
- }
-
- inputFiles = hdfs.listStatus(hdfsDirPath);
-
- outputFilePath = new Path(hdfsDirPath.toString() + "/" + "merge-"
- + Calendar.getInstance().getTimeInMillis() + ".dat");
- outputFile = hdfs.create(outputFilePath);
-
- for (FileStatus fstatus : inputFiles) {
- appendFile(fstatus.getPath());
- hdfs.delete(fstatus.getPath(), true);
- }
-
- outputFile.close();
-
- Environment.logInfo("HDFS file merging complete!");
- }
-
- private void appendFile (Path inputPath) throws IOException {
-
- FSDataInputStream anyInputFile = hdfs.open(inputPath);
- InputStream inputFile;
- byte buffer[] = new byte[4096];
-
- if (inputPath.toString().endsWith(LocalStore.COMPRESSION_SUFFIX)) {
- // the file is compressed
- inputFile = new ZipInputStream(anyInputFile);
- ((ZipInputStream) inputFile).getNextEntry();
- } else {
- inputFile = anyInputFile;
- }
-
- try {
- int bytesRead = 0;
- while ((bytesRead = inputFile.read(buffer)) > 0) {
- outputFile.write(buffer, 0, bytesRead);
- }
- } catch (IOException e) {
- Environment.logInfo("Error while copying file:" + inputPath.toString());
- } finally {
- inputFile.close();
- }
- }
-
-
- public static void main(String [] args) {
-
- Environment.prepare("./conf/failmon.properties");
-
- try {
- new HDFSMerger();
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java
deleted file mode 100644
index e30400338bb..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/HadoopLogParser.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.io.IOException;
-import java.util.Calendar;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**********************************************************
- * An object of this class parses a Hadoop log file to create
- * appropriate EventRecords. The log file can either be the log
- * of a NameNode or JobTracker or DataNode or TaskTracker.
- *
- **********************************************************/
-
-public class HadoopLogParser extends LogParser {
-
- /**
- * Create a new parser object and try to find the hostname
- * of the node that generated the log
- */
- public HadoopLogParser(String fname) {
- super(fname);
- if ((dateformat = Environment.getProperty("log.hadoop.dateformat")) == null)
- dateformat = "\\d{4}-\\d{2}-\\d{2}";
- if ((timeformat = Environment.getProperty("log.hadoop.timeformat")) == null)
- timeformat = "\\d{2}:\\d{2}:\\d{2}";
- findHostname();
- }
-
- /**
- * Parses one line of the log. If the line contains a valid
- * log entry, then an appropriate EventRecord is returned, after all
- * relevant fields have been parsed.
- *
- * @param line the log line to be parsed
- *
- * @return the EventRecord representing the log entry of the line. If
- * the line does not contain a valid log entry, then the EventRecord
- * returned has isValid() = false. When the end-of-file has been reached,
- * null is returned to the caller.
- */
- public EventRecord parseLine(String line) throws IOException {
- EventRecord retval = null;
-
- if (line != null) {
- // process line
- String patternStr = "(" + dateformat + ")";
- patternStr += "\\s+";
- patternStr += "(" + timeformat + ")";
- patternStr += ".{4}\\s(\\w*)\\s"; // for logLevel
- patternStr += "\\s*([\\w+\\.?]+)"; // for source
- patternStr += ":\\s+(.+)"; // for the message
- Pattern pattern = Pattern.compile(patternStr);
- Matcher matcher = pattern.matcher(line);
-
- if (matcher.find(0) && matcher.groupCount() >= 5) {
- retval = new EventRecord(hostname, ips, parseDate(matcher.group(1),
- matcher.group(2)),
- "HadoopLog",
- matcher.group(3), // loglevel
- matcher.group(4), // source
- matcher.group(5)); // message
- } else {
- retval = new EventRecord();
- }
- }
-
- return retval;
- }
-
- /**
- * Parse a date found in the Hadoop log.
- *
- * @return a Calendar representing the date
- */
- protected Calendar parseDate(String strDate, String strTime) {
- Calendar retval = Calendar.getInstance();
- // set date
- String[] fields = strDate.split("-");
- retval.set(Calendar.YEAR, Integer.parseInt(fields[0]));
- retval.set(Calendar.MONTH, Integer.parseInt(fields[1]));
- retval.set(Calendar.DATE, Integer.parseInt(fields[2]));
- // set time
- fields = strTime.split(":");
- retval.set(Calendar.HOUR_OF_DAY, Integer.parseInt(fields[0]));
- retval.set(Calendar.MINUTE, Integer.parseInt(fields[1]));
- retval.set(Calendar.SECOND, Integer.parseInt(fields[2]));
- return retval;
- }
-
- /**
- * Attempt to determine the hostname of the node that created the
- * log file. This information can be found in the STARTUP_MSG lines
- * of the Hadoop log, which are emitted when the node starts.
- *
- */
- private void findHostname() {
- String startupInfo = Environment.runCommandGeneric(
- "grep --max-count=1 STARTUP_MSG:\\s*host " + file.getName()).toString();
- Pattern pattern = Pattern.compile("\\s+(\\w+/.+)\\s+");
- Matcher matcher = pattern.matcher(startupInfo);
- if (matcher.find(0)) {
- hostname = matcher.group(1).split("/")[0];
- ips = new String[1];
- ips[0] = matcher.group(1).split("/")[1];
- }
- }
-
- /**
- * Return a String with information about this class
- *
- * @return A String describing this class
- */
- public String getInfo() {
- return ("Hadoop Log Parser for file: " + file.getName());
- }
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java
deleted file mode 100644
index 4785874eb07..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LocalStore.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.io.BufferedOutputStream;
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.zip.CRC32;
-import java.util.zip.CheckedOutputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-
-/**********************************************************
- * This class takes care of the temporary local storage of
- * gathered metrics before they get uploaded into HDFS. It writes
- * Serialized Records as lines in a temporary file and then
- * compresses and uploads it into HDFS.
- *
- **********************************************************/
-
-public class LocalStore {
-
- public final static char FIELD_SEPARATOR = '|';
-
- public final static char RECORD_SEPARATOR = '\n';
-
- public final static String COMPRESSION_SUFFIX = ".zip";
-
- public final static int UPLOAD_INTERVAL = 600;
-
- String filename;
- String hdfsDir;
-
- boolean compress;
-
- FileWriter fw;
-
- BufferedWriter writer;
-
- /**
- * Create an instance of the class and read the configuration
- * file to determine some output parameters. Then, initiate the
- * structured needed for the buffered I/O (so that smal appends
- * can be handled efficiently).
- *
- */
-
- public LocalStore() {
- // determine the local output file name
- if (Environment.getProperty("local.tmp.filename") == null)
- Environment.setProperty("local.tmp.filename", "failmon.dat");
-
- // local.tmp.dir has been set by the Executor
- if (Environment.getProperty("local.tmp.dir") == null)
- Environment.setProperty("local.tmp.dir", System.getProperty("java.io.tmpdir"));
-
- filename = Environment.getProperty("local.tmp.dir") + "/" +
- Environment.getProperty("local.tmp.filename");
-
- // determine the upload location
- hdfsDir = Environment.getProperty("hdfs.upload.dir");
- if (hdfsDir == null)
- hdfsDir = "/failmon";
-
- // determine if compression is enabled
- compress = true;
- if ("false".equalsIgnoreCase(Environment
- .getProperty("local.tmp.compression")))
- compress = false;
-
- try {
- fw = new FileWriter(filename, true);
- writer = new BufferedWriter(fw);
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Insert an EventRecord to the local storage, after it
- * gets serialized and anonymized.
- *
- * @param er the EventRecord to be inserted
- */
-
- public void insert(EventRecord er) {
- SerializedRecord sr = new SerializedRecord(er);
- try {
- Anonymizer.anonymize(sr);
- } catch (Exception e) {
- e.printStackTrace();
- }
- append(sr);
- }
-
- /**
- * Insert an array of EventRecords to the local storage, after they
- * get serialized and anonymized.
- *
- * @param ers the array of EventRecords to be inserted
- */
- public void insert(EventRecord[] ers) {
- for (EventRecord er : ers)
- insert(er);
- }
-
- private void append(SerializedRecord sr) {
- try {
- writer.write(pack(sr).toString());
- writer.write(RECORD_SEPARATOR);
- // writer.flush();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Pack a SerializedRecord into an array of bytes
- *
- * This method is deprecated.
- * @param sr the SerializedRecord to be packed
- * @return Packed representation fo the Serialized Record
- * @see #packConcurrent(SerializedRecord)
- * @deprecated
- */
- public static StringBuffer pack(SerializedRecord sr) {
- return new StringBuffer(packConcurrent(sr));
- }
-
- /**
- * Pack a SerializedRecord into an array of bytes
- *
- * @param sr the SerializedRecord to be packed
- * @return Packed representation fo the Serialized Record
- */
- public static CharSequence packConcurrent(SerializedRecord sr) {
- StringBuilder sb = new StringBuilder();
-
- ArrayList keys = new ArrayList(sr.fields.keySet());
-
- if (sr.isValid())
- SerializedRecord.arrangeKeys(keys);
-
- for (int i = 0; i < keys.size(); i++) {
- String value = sr.fields.get(keys.get(i));
- sb.append(keys.get(i) + ":" + value);
- sb.append(FIELD_SEPARATOR);
- }
- return sb;
- }
-
- /**
- * Upload the local file store into HDFS, after it
- * compressing it. Then a new local file is created
- * as a temporary record store.
- *
- */
- public void upload() {
- try {
- writer.flush();
- if (compress)
- zipCompress(filename);
- String remoteName = "failmon-";
- if ("true".equalsIgnoreCase(Environment.getProperty("anonymizer.hash.hostnames")))
- remoteName += Anonymizer.getMD5Hash(InetAddress.getLocalHost().getCanonicalHostName()) + "-";
- else
- remoteName += InetAddress.getLocalHost().getCanonicalHostName() + "-";
- remoteName += Calendar.getInstance().getTimeInMillis();//.toString();
- if (compress)
- copyToHDFS(filename + COMPRESSION_SUFFIX, hdfsDir + "/" + remoteName + COMPRESSION_SUFFIX);
- else
- copyToHDFS(filename, hdfsDir + "/" + remoteName);
- } catch (IOException e) {
- e.printStackTrace();
- }
-
- // delete and re-open
- try {
- fw.close();
- fw = new FileWriter(filename);
- writer = new BufferedWriter(fw);
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Compress a text file using the ZIP compressing algorithm.
- *
- * @param filename the path to the file to be compressed
- */
- public static void zipCompress(String filename) throws IOException {
- FileOutputStream fos = new FileOutputStream(filename + COMPRESSION_SUFFIX);
- CheckedOutputStream csum = new CheckedOutputStream(fos, new CRC32());
- ZipOutputStream out = new ZipOutputStream(new BufferedOutputStream(csum));
- out.setComment("Failmon records.");
-
- BufferedReader in = new BufferedReader(new FileReader(filename));
- out.putNextEntry(new ZipEntry(new File(filename).getName()));
- int c;
- while ((c = in.read()) != -1)
- out.write(c);
- in.close();
-
- out.finish();
- out.close();
- }
-
- /**
- * Copy a local file to HDFS
- *
- * @param localFile the filename of the local file
- * @param hdfsFile the HDFS filename to copy to
- */
- public static void copyToHDFS(String localFile, String hdfsFile) throws IOException {
-
- String hadoopConfPath;
-
- if (Environment.getProperty("hadoop.conf.path") == null)
- hadoopConfPath = "../../../conf";
- else
- hadoopConfPath = Environment.getProperty("hadoop.conf.path");
-
- // Read the configuration for the Hadoop environment
- Configuration hadoopConf = new Configuration();
- hadoopConf.addResource(new Path(hadoopConfPath + "/hadoop-default.xml"));
- hadoopConf.addResource(new Path(hadoopConfPath + "/hadoop-site.xml"));
-
- // System.out.println(hadoopConf.get("hadoop.tmp.dir"));
- // System.out.println(hadoopConf.get("fs.default.name"));
- FileSystem fs = FileSystem.get(hadoopConf);
-
- // HadoopDFS deals with Path
- Path inFile = new Path("file://" + localFile);
- Path outFile = new Path(hadoopConf.get("fs.default.name") + hdfsFile);
-
- // Read from and write to new file
- Environment.logInfo("Uploading to HDFS (file " + outFile + ") ...");
- fs.copyFromLocalFile(false, inFile, outFile);
- }
-
- /**
- * Close the temporary local file
- *
- */
- public void close() {
- try {
- writer.flush();
- writer.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java
deleted file mode 100644
index c92911cd730..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/LogParser.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Calendar;
-
-/**********************************************************
- * This class represents objects that provide log parsing
- * functionality. Typically, such objects read log files line
- * by line and for each log entry they identify, they create a
- * corresponding EventRecord. In this way, disparate log files
- * can be merged using the uniform format of EventRecords and can,
- * thus, be processed in a uniform way.
- *
- **********************************************************/
-
-public abstract class LogParser implements Monitored {
-
- File file;
-
- BufferedReader reader;
-
- String hostname;
-
- Object [] ips;
-
- String dateformat;
-
- String timeformat;
-
- private String firstLine;
- private long offset;
-
- /**
- * Create a parser that will read from the specified log file.
- *
- * @param fname the filename of the log file to be read
- */
- public LogParser(String fname) {
- file = new File(fname);
-
- ParseState ps = PersistentState.getState(file.getAbsolutePath());
- firstLine = ps.firstLine;
- offset = ps.offset;
-
- try {
- reader = new BufferedReader(new FileReader(file));
- checkForRotation();
- Environment.logInfo("Checked for rotation...");
- reader.skip(offset);
- } catch (FileNotFoundException e) {
- System.err.println(e.getMessage());
- e.printStackTrace();
- } catch (IOException e) {
- System.err.println(e.getMessage());
- e.printStackTrace();
- }
-
- setNetworkProperties();
- }
-
- protected void setNetworkProperties() {
- // determine hostname and ip addresses for the node
- try {
- // Get hostname
- hostname = InetAddress.getLocalHost().getCanonicalHostName();
- // Get all associated ip addresses
- ips = InetAddress.getAllByName(hostname);
-
- } catch (UnknownHostException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Insert all EventRecords that can be extracted for
- * the represented hardware component into a LocalStore.
- *
- * @param ls the LocalStore into which the EventRecords
- * are to be stored.
- */
- public void monitor(LocalStore ls) {
- int in = 0;
- EventRecord er = null;
- Environment.logInfo("Started processing log...");
-
- while ((er = getNext()) != null) {
- // Environment.logInfo("Processing log line:\t" + in++);
- if (er.isValid()) {
- ls.insert(er);
- }
- }
-
- PersistentState.updateState(file.getAbsolutePath(), firstLine, offset);
- PersistentState.writeState("conf/parsing.state");
- }
-
- /**
- * Get an array of all EventRecords that can be extracted for
- * the represented hardware component.
- *
- * @return The array of EventRecords
- */
- public EventRecord[] monitor() {
-
- ArrayList recs = new ArrayList();
- EventRecord er;
-
- while ((er = getNext()) != null)
- recs.add(er);
-
- EventRecord[] T = new EventRecord[recs.size()];
-
- return recs.toArray(T);
- }
-
- /**
- * Continue parsing the log file until a valid log entry is identified.
- * When one such entry is found, parse it and return a corresponding EventRecord.
- *
- *
- * @return The EventRecord corresponding to the next log entry
- */
- public EventRecord getNext() {
- try {
- String line = reader.readLine();
- if (line != null) {
- if (firstLine == null)
- firstLine = new String(line);
- offset += line.length() + 1;
- return parseLine(line);
- }
- } catch (IOException e) {
- e.printStackTrace();
- }
- return null;
- }
-
- /**
- * Return the BufferedReader, that reads the log file
- *
- * @return The BufferedReader that reads the log file
- */
- public BufferedReader getReader() {
- return reader;
- }
-
- /**
- * Check whether the log file has been rotated. If so,
- * start reading the file from the beginning.
- *
- */
- public void checkForRotation() {
- try {
- BufferedReader probe = new BufferedReader(new FileReader(file.getAbsoluteFile()));
- if (firstLine == null || (!firstLine.equals(probe.readLine()))) {
- probe.close();
- // start reading the file from the beginning
- reader.close();
- reader = new BufferedReader(new FileReader(file.getAbsoluteFile()));
- firstLine = null;
- offset = 0;
- }
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Parses one line of the log. If the line contains a valid
- * log entry, then an appropriate EventRecord is returned, after all
- * relevant fields have been parsed.
- *
- * @param line the log line to be parsed
- *
- * @return the EventRecord representing the log entry of the line. If
- * the line does not contain a valid log entry, then the EventRecord
- * returned has isValid() = false. When the end-of-file has been reached,
- * null is returned to the caller.
- */
- abstract public EventRecord parseLine(String line) throws IOException;
-
- /**
- * Parse a date found in Hadoop log file.
- *
- * @return a Calendar representing the date
- */
- abstract protected Calendar parseDate(String strDate, String strTime);
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java
deleted file mode 100644
index 5efd4f66a26..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/MonitorJob.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-/**********************************************************
- * This class is a wrapper for a monitoring job.
- *
- **********************************************************/
-
-public class MonitorJob {
- Monitored job;
-
- String type;
- int interval;
- int counter;
-
- public MonitorJob(Monitored _job, String _type, int _interval) {
- job = _job;
- type = _type;
- interval = _interval;
- counter = _interval;
- }
-
- public void reset() {
- counter = interval;
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java
deleted file mode 100644
index 559053035dd..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/Monitored.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-/**********************************************************
- * Represents objects that monitor specific hardware resources and
- * can query them to get EventRecords describing the state of these
- * resources.
- *
- **********************************************************/
-
-public interface Monitored {
- /**
- * Get an array of all EventRecords that can be extracted for
- * the represented hardware component.
- *
- * @return The array of EventRecords
- */
- public EventRecord[] monitor();
-
- /**
- * Inserts all EventRecords that can be extracted for
- * the represented hardware component into a LocalStore.
- *
- * @param ls the LocalStore into which the EventRecords
- * are to be stored.
- */
- public void monitor(LocalStore ls);
-
- /**
- * Return a String with information about the implementing
- * class
- *
- * @return A String describing the implementing class
- */
- public String getInfo();
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java
deleted file mode 100644
index 90b4c2f7c5c..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/NICParser.java
+++ /dev/null
@@ -1,140 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.ArrayList;
-import java.util.Calendar;
-
-/**********************************************************
- * Objects of this class parse the output of ifconfig to
- * gather information about present Network Interface Cards
- * in the system. The list of NICs to poll is specified in the
- * configuration file.
- *
- **********************************************************/
-
-
-public class NICParser extends ShellParser {
-
- String[] nics;
-
- /**
- * Constructs a NICParser and reads the list of NICs to query
- */
- public NICParser() {
- super();
- nics = Environment.getProperty("nic.list").split(",\\s*");
- }
-
- /**
- * Reads and parses the output of ifconfig for a specified NIC and
- * creates an appropriate EventRecord that holds the desirable
- * information for it.
- *
- * @param device the NIC device name to query
- *
- * @return the EventRecord created
- */
- public EventRecord query(String device) throws UnknownHostException {
- CharSequence sb = Environment.runCommandGeneric("/sbin/ifconfig " + device);
- EventRecord retval = new EventRecord(InetAddress.getLocalHost()
- .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost()
- .getHostName()), Calendar.getInstance(), "NIC", "Unknown", device, "-");
-
- retval.set("hwAddress", findPattern("HWaddr\\s*([\\S{2}:]{17})", sb
- .toString(), 1));
-
- retval.set("ipAddress", findPattern("inet\\s+addr:\\s*([\\w.?]*)", sb
- .toString(), 1));
-
- String tmp = findPattern("inet\\s+addr:\\s*([\\w.?]*)", sb.toString(), 1);
- retval.set("status", (tmp == null) ? "DOWN" : "UP");
- if (tmp != null)
- retval.set("ipAddress", tmp);
-
- retval.set("rxPackets", findPattern("RX\\s*packets\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("rxErrors", findPattern("RX.+errors\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("rxDropped", findPattern("RX.+dropped\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("rxOverruns", findPattern("RX.+overruns\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("rxFrame", findPattern("RX.+frame\\s*:\\s*(\\d+)",
- sb.toString(), 1));
-
- retval.set("txPackets", findPattern("TX\\s*packets\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("txErrors", findPattern("TX.+errors\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("txDropped", findPattern("TX.+dropped\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("txOverruns", findPattern("TX.+overruns\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("txCarrier", findPattern("TX.+carrier\\s*:\\s*(\\d+)", sb
- .toString(), 1));
-
- retval.set("collisions", findPattern("\\s+collisions\\s*:\\s*(\\d+)", sb
- .toString(), 1));
-
- retval.set("rxBytes", findPattern("RX\\s*bytes\\s*:\\s*(\\d+)", sb
- .toString(), 1));
- retval.set("txBytes", findPattern("TX\\s*bytes\\s*:\\s*(\\d+)", sb
- .toString(), 1));
-
- return retval;
- }
-
- /**
- * Invokes query() to do the parsing and handles parsing errors for
- * each one of the NICs specified in the configuration.
- *
- * @return an array of EventRecords that holds one element that represents
- * the current state of network interfaces.
- */
- public EventRecord[] monitor() {
- ArrayList recs = new ArrayList();
-
- for (String nic : nics) {
- try {
- recs.add(query(nic));
- } catch (UnknownHostException e) {
- e.printStackTrace();
- }
- }
-
- EventRecord[] T = new EventRecord[recs.size()];
-
- return recs.toArray(T);
- }
-
- /**
- * Return a String with information about this class
- *
- * @return A String describing this class
- */
- public String getInfo() {
- String retval = "ifconfig parser for interfaces: ";
- for (String nic : nics)
- retval += nic + " ";
- return retval;
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java
deleted file mode 100644
index aaa6b17a9bb..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/OfflineAnonymizer.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileWriter;
-
-/**********************************************************
- * This class can be used to anonymize logs independently of
- * Hadoop and the Executor. It parses the specified log file to
- * create log records for it and then passes them to the Anonymizer.
- * After they are anonymized, they are written to a local file,
- * which is then compressed and stored locally.
- *
- **********************************************************/
-
-public class OfflineAnonymizer {
-
- public enum LogType {
- HADOOP, SYSTEM
- };
-
- LogType logtype;
-
- File logfile;
-
- LogParser parser;
-
- /**
- * Creates an OfflineAnonymizer for a specific log file.
- *
- * @param logtype the type of the log file. This can either be
- * LogFile.HADOOP or LogFile.SYSTEM
- * @param filename the path to the log file
- *
- */
- public OfflineAnonymizer(LogType logtype, String filename) {
-
- logfile = new File(filename);
-
- if (!logfile.exists()) {
- System.err.println("Input file does not exist!");
- System.exit(0);
- }
-
- if (logtype == LogType.HADOOP)
- parser = new HadoopLogParser(filename);
- else
- parser = new SystemLogParser(filename);
- }
-
- /**
- * Performs anonymization for the log file. Log entries are
- * read one by one and EventRecords are created, which are then
- * anonymized and written to the output.
- *
- */
- public void anonymize() throws Exception {
- EventRecord er = null;
- SerializedRecord sr = null;
-
- BufferedWriter bfw = new BufferedWriter(new FileWriter(logfile.getName()
- + ".anonymized"));
-
- System.out.println("Anonymizing log records...");
- while ((er = parser.getNext()) != null) {
- if (er.isValid()) {
- sr = new SerializedRecord(er);
- Anonymizer.anonymize(sr);
- bfw.write(LocalStore.pack(sr).toString());
- bfw.write(LocalStore.RECORD_SEPARATOR);
- }
- }
- bfw.flush();
- bfw.close();
- System.out.println("Anonymized log records written to " + logfile.getName()
- + ".anonymized");
-
- System.out.println("Compressing output file...");
- LocalStore.zipCompress(logfile.getName() + ".anonymized");
- System.out.println("Compressed output file written to " + logfile.getName()
- + ".anonymized" + LocalStore.COMPRESSION_SUFFIX);
- }
-
- public static void main(String[] args) {
-
- if (args.length < 2) {
- System.out.println("Usage: OfflineAnonymizer ");
- System.out
- .println("where is either \"hadoop\" or \"system\" and is the path to the log file");
- System.exit(0);
- }
-
- LogType logtype = null;
-
- if (args[0].equalsIgnoreCase("-hadoop"))
- logtype = LogType.HADOOP;
- else if (args[0].equalsIgnoreCase("-system"))
- logtype = LogType.SYSTEM;
- else {
- System.err.println("Invalid first argument.");
- System.exit(0);
- }
-
- OfflineAnonymizer oa = new OfflineAnonymizer(logtype, args[1]);
-
- try {
- oa.anonymize();
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- return;
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java
deleted file mode 100644
index 022de0b0765..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/PersistentState.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.util.Properties;
-import java.util.Calendar;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-
-/**********************************************************
- * This class takes care of the information that needs to be
- * persistently stored locally on nodes. Bookkeeping is done for the
- * state of parsing of log files, so that the portion of the file that
- * has already been parsed in previous calls will not be parsed again.
- * For each log file, we maintain the byte offset of the last
- * character parsed in previous passes. Also, the first entry in the
- * log file is stored, so that FailMon can determine when a log file
- * has been rotated (and thus parsing needs to start from the
- * beginning of the file). We use a property file to store that
- * information. For each log file we create a property keyed by the
- * filename, the value of which contains the byte offset and first log
- * entry separated by a SEPARATOR.
- *
- **********************************************************/
-
-public class PersistentState {
-
- private final static String SEPARATOR = "###";
-
- static String filename;
- static Properties persData = new Properties();
-
- /**
- * Read the state of parsing for all open log files from a property
- * file.
- *
- * @param fname the filename of the property file to be read
- */
-
- public static void readState(String fname) {
-
- filename = fname;
-
- try {
- persData.load(new FileInputStream(filename));
- } catch (FileNotFoundException e1) {
- // ignore
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
- /**
- * Read and return the state of parsing for a particular log file.
- *
- * @param fname the log file for which to read the state
- */
- public static ParseState getState(String fname) {
- String [] fields = persData.getProperty(fname, "null" + SEPARATOR + "0").split(SEPARATOR, 2);
- String firstLine;
- long offset;
-
- if (fields.length < 2) {
- System.err.println("Malformed persistent state data found");
- Environment.logInfo("Malformed persistent state data found");
- firstLine = null;
- offset = 0;
- } else {
- firstLine = (fields[0].equals("null") ? null : fields[0]);
- offset = Long.parseLong(fields[1]);
- }
-
- return new ParseState(fname, firstLine, offset);
- }
-
- /**
- * Set the state of parsing for a particular log file.
- *
- * @param state the ParseState to set
- */
- public static void setState(ParseState state) {
-
- if (state == null) {
- System.err.println("Null state found");
- Environment.logInfo("Null state found");
- }
-
- persData.setProperty(state.filename, state.firstLine + SEPARATOR + state.offset);
- }
-
- /**
- * Upadate the state of parsing for a particular log file.
- *
- * @param filename the log file for which to update the state
- * @param firstLine the first line of the log file currently
- * @param offset the byte offset of the last character parsed
- */
- public static void updateState(String filename, String firstLine, long offset) {
-
- ParseState ps = getState(filename);
-
- if (firstLine != null)
- ps.firstLine = firstLine;
-
- ps.offset = offset;
-
- setState(ps);
- }
-
- /**
- * Write the state of parsing for all open log files to a property
- * file on disk.
- *
- * @param fname the filename of the property file to write to
- */
- public static void writeState(String fname) {
- try {
- persData.store(new FileOutputStream(fname), Calendar.getInstance().getTime().toString());
- } catch (FileNotFoundException e1) {
- e1.printStackTrace();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
-
-}
-
-/**********************************************************
- * This class represents the state of parsing for a particular log
- * file.
- *
- **********************************************************/
-
-class ParseState {
-
- public String filename;
- public String firstLine;
- public long offset;
-
- public ParseState(String _filename, String _firstLine, long _offset) {
- this.filename = _filename;
- this.firstLine = _firstLine;
- this.offset = _offset;
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java
deleted file mode 100644
index 38b1af6363c..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/RunOnce.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.util.ArrayList;
-
-/**********************************************************
-* Runs a set of monitoring jobs once for the local node. The set of
-* jobs to be run is the intersection of the jobs specifed in the
-* configuration file and the set of jobs specified in the --only
-* command line argument.
- **********************************************************/
-
-public class RunOnce {
-
- LocalStore lstore;
-
- ArrayList monitors;
-
- boolean uploading = true;
-
- public RunOnce(String confFile) {
-
- Environment.prepare(confFile);
-
- String localTmpDir;
-
- // running as a stand-alone application
- localTmpDir = System.getProperty("java.io.tmpdir");
- Environment.setProperty("local.tmp.dir", localTmpDir);
-
- monitors = Environment.getJobs();
- lstore = new LocalStore();
- uploading = true;
- }
-
- private void filter (String [] ftypes) {
- ArrayList filtered = new ArrayList();
- boolean found;
-
- // filter out unwanted monitor jobs
- for (MonitorJob job : monitors) {
- found = false;
- for (String ftype : ftypes)
- if (job.type.equalsIgnoreCase(ftype))
- found = true;
- if (found)
- filtered.add(job);
- }
-
- // disable uploading if not requested
- found = false;
- for (String ftype : ftypes)
- if (ftype.equalsIgnoreCase("upload"))
- found = true;
-
- if (!found)
- uploading = false;
-
- monitors = filtered;
- }
-
- private void run() {
-
- Environment.logInfo("Failmon started successfully.");
-
- for (int i = 0; i < monitors.size(); i++) {
- Environment.logInfo("Calling " + monitors.get(i).job.getInfo() + "...\t");
- monitors.get(i).job.monitor(lstore);
- }
-
- if (uploading)
- lstore.upload();
-
- lstore.close();
- }
-
- public void cleanup() {
- // nothing to be done
- }
-
-
- public static void main (String [] args) {
-
- String configFilePath = "./conf/failmon.properties";
- String [] onlyList = null;
-
- // Parse command-line parameters
- for (int i = 0; i < args.length - 1; i++) {
- if (args[i].equalsIgnoreCase("--config"))
- configFilePath = args[i + 1];
- else if (args[i].equalsIgnoreCase("--only"))
- onlyList = args[i + 1].split(",");
- }
-
- RunOnce ro = new RunOnce(configFilePath);
- // only keep the requested types of jobs
- if (onlyList != null)
- ro.filter(onlyList);
- // run once only
- ro.run();
- }
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java
deleted file mode 100644
index 82e970c7b4e..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SMARTParser.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**********************************************************
- * Objects of this class parse the output of smartmontools to
- * gather information about the state of disks in the system. The
- * smartmontools utility reads the S.M.A.R.T. attributes from
- * the disk devices and reports them to the user. Note that since
- * running smartctl requires superuser provileges, one should
- * grand sudo privileges to the running user for the command smartctl
- * (without a password). Alternatively, one can set up a cron job that
- * periodically dumps the output of smartctl into a user-readable file.
- * See the configuration file for details.
- *
- **********************************************************/
-
-public class SMARTParser extends ShellParser {
-
- String[] devices;
-
- /**
- * Constructs a SMARTParser and reads the list of disk
- * devices to query
- */
- public SMARTParser() {
- super();
- String devicesStr = Environment.getProperty("disks.list");
- System.out.println("skato " + devicesStr);
- if (devicesStr != null)
- devices = devicesStr.split(",\\s*");
- }
-
- /**
- * Reads and parses the output of smartctl for a specified disk and
- * creates an appropriate EventRecord that holds the desirable
- * information for it. Since the output of smartctl is different for
- * different kinds of disks, we try to identify as many attributes as
- * posssible for all known output formats.
- *
- * @param device the disk device name to query
- *
- * @return the EventRecord created
- */
- public EventRecord query(String device) throws Exception {
- String conf = Environment.getProperty("disks." + device + ".source");
- CharSequence sb;
-
- if (conf == null)
- sb = Environment.runCommandGeneric("sudo smartctl --all " + device);
- else
- sb = Environment.runCommandGeneric("cat " + conf);
-
- EventRecord retval = new EventRecord(InetAddress.getLocalHost()
- .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost()
- .getHostName()), Calendar.getInstance(), "SMART", "Unknown",
- (conf == null ? "sudo smartctl --all " + device : "file " + conf), "-");
- // IBM SCSI disks
- retval.set("model", findPattern("Device\\s*:\\s*(.*)", sb.toString(), 1));
- retval.set("serial", findPattern("Serial\\s+Number\\s*:\\s*(.*)", sb
- .toString(), 1));
- retval.set("firmware", findPattern("Firmware\\s+Version\\s*:\\s*(.*)", sb
- .toString(), 1));
- retval.set("capacity", findPattern("User\\s+Capacity\\s*:\\s*(.*)", sb
- .toString(), 1));
- retval.set("status", findPattern("SMART\\s*Health\\s*Status:\\s*(.*)", sb
- .toString(), 1));
- retval.set("current_temperature", findPattern(
- "Current\\s+Drive\\s+Temperature\\s*:\\s*(.*)", sb.toString(), 1));
- retval.set("trip_temperature", findPattern(
- "Drive\\s+Trip\\s+Temperature\\s*:\\s*(.*)", sb.toString(), 1));
- retval.set("start_stop_count", findPattern(
- "start\\s+stop\\s+count\\s*:\\s*(\\d*)", sb.toString(), 1));
-
- String[] var = { "read", "write", "verify" };
- for (String s : var) {
- retval.set(s + "_ecc_fast", findPattern(s + "\\s*:\\s*(\\d*)", sb
- .toString(), 1));
- retval.set(s + "_ecc_delayed", findPattern(s
- + "\\s*:\\s*(\\d+\\s+){1}(\\d+)", sb.toString(), 2));
- retval.set(s + "_rereads", findPattern(
- s + "\\s*:\\s*(\\d+\\s+){2}(\\d+)", sb.toString(), 2));
- retval.set(s + "_GBs", findPattern(s
- + "\\s*:\\s*(\\d+\\s+){5}(\\d+.?\\d*)", sb.toString(), 2));
- retval.set(s + "_uncorrected",
- findPattern(s + "\\s*:\\s*(\\d+\\s+){5}(\\d+.?\\d*){1}\\s+(\\d+)", sb
- .toString(), 3));
- }
-
- // Hitachi IDE, SATA
- retval.set("model", findPattern("Device\\s*Model\\s*:\\s*(.*)", sb
- .toString(), 1));
- retval.set("serial", findPattern("Serial\\s+number\\s*:\\s*(.*)", sb
- .toString(), 1));
- retval.set("protocol", findPattern("Transport\\s+protocol\\s*:\\s*(.*)", sb
- .toString(), 1));
- retval.set("status", "PASSED".equalsIgnoreCase(findPattern(
- "test\\s*result\\s*:\\s*(.*)", sb.toString(), 1)) ? "OK" : "FAILED");
-
- readColumns(retval, sb);
-
- return retval;
- }
-
- /**
- * Reads attributes in the following format:
- *
- * ID# ATTRIBUTE_NAME FLAG VALUE WORST THRESH TYPE UPDATED WHEN_FAILED RAW_VALUE
- * 3 Spin_Up_Time 0x0027 180 177 063 Pre-fail Always - 10265
- * 4 Start_Stop_Count 0x0032 253 253 000 Old_age Always - 34
- * 5 Reallocated_Sector_Ct 0x0033 253 253 063 Pre-fail Always - 0
- * 6 Read_Channel_Margin 0x0001 253 253 100 Pre-fail Offline - 0
- * 7 Seek_Error_Rate 0x000a 253 252 000 Old_age Always - 0
- * 8 Seek_Time_Performance 0x0027 250 224 187 Pre-fail Always - 53894
- * 9 Power_On_Minutes 0x0032 210 210 000 Old_age Always - 878h+00m
- * 10 Spin_Retry_Count 0x002b 253 252 157 Pre-fail Always - 0
- * 11 Calibration_Retry_Count 0x002b 253 252 223 Pre-fail Always - 0
- * 12 Power_Cycle_Count 0x0032 253 253 000 Old_age Always - 49
- * 192 PowerOff_Retract_Count 0x0032 253 253 000 Old_age Always - 0
- * 193 Load_Cycle_Count 0x0032 253 253 000 Old_age Always - 0
- * 194 Temperature_Celsius 0x0032 037 253 000 Old_age Always - 37
- * 195 Hardware_ECC_Recovered 0x000a 253 252 000 Old_age Always - 2645
- *
- * This format is mostly found in IDE and SATA disks.
- *
- * @param er the EventRecord in which to store attributes found
- * @param sb the text to parse
- *
- * @return the EventRecord in which new attributes are stored.
- */
- private EventRecord readColumns(EventRecord er, CharSequence sb) {
-
- Pattern pattern = Pattern.compile("^\\s{0,2}(\\d{1,3}\\s+.*)$",
- Pattern.MULTILINE);
- Matcher matcher = pattern.matcher(sb);
-
- while (matcher.find()) {
- String[] tokens = matcher.group(1).split("\\s+");
- boolean failed = false;
- // check if this attribute is a failed one
- if (!tokens[8].equals("-"))
- failed = true;
- er.set(tokens[1].toLowerCase(), (failed ? "FAILED:" : "") + tokens[9]);
- }
-
- return er;
- }
-
- /**
- * Invokes query() to do the parsing and handles parsing errors for
- * each one of the disks specified in the configuration.
- *
- * @return an array of EventRecords that holds one element that represents
- * the current state of the disk devices.
- */
- public EventRecord[] monitor() {
- ArrayList recs = new ArrayList();
-
- for (String device : devices) {
- try {
- recs.add(query(device));
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- EventRecord[] T = new EventRecord[recs.size()];
-
- return recs.toArray(T);
- }
-
- /**
- * Return a String with information about this class
- *
- * @return A String describing this class
- */
- public String getInfo() {
- String retval = "S.M.A.R.T. disk attributes parser for disks ";
- for (String device : devices)
- retval += device + " ";
- return retval;
- }
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java
deleted file mode 100644
index 0d0498bc00c..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SensorsParser.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.net.InetAddress;
-import java.util.Calendar;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**********************************************************
- * Objects of this class parse the output of the lm-sensors utility
- * to gather information about fan speed, temperatures for cpus
- * and motherboard etc.
- *
- **********************************************************/
-
-public class SensorsParser extends ShellParser {
-
- /**
- * Reads and parses the output of the 'sensors' command
- * and creates an appropriate EventRecord that holds
- * the desirable information.
- *
- * @param s unused parameter
- *
- * @return the EventRecord created
- */
- public EventRecord query(String s) throws Exception {
- CharSequence sb;
-
- //sb = Environment.runCommandGeneric("sensors -A");
- sb = Environment.runCommandGeneric("cat sensors.out");
-
- EventRecord retval = new EventRecord(InetAddress.getLocalHost()
- .getCanonicalHostName(), InetAddress.getAllByName(InetAddress.getLocalHost()
- .getHostName()), Calendar.getInstance(), "lm-sensors", "Unknown",
- "sensors -A", "-");
- readGroup(retval, sb, "fan");
- readGroup(retval, sb, "in");
- readGroup(retval, sb, "temp");
- readGroup(retval, sb, "Core");
-
- return retval;
- }
-
- /**
- * Reads and parses lines that provide the output
- * of a group of sensors with the same functionality.
- *
- * @param er the EventRecord to which the new attributes are added
- * @param sb the text to parse
- * @param prefix a String prefix specifying the common prefix of the
- * sensors' names in the group (e.g. "fan", "in", "temp"
- *
- * @return the EventRecord created
- */
- private EventRecord readGroup(EventRecord er, CharSequence sb, String prefix) {
-
- Pattern pattern = Pattern.compile(".*(" + prefix
- + "\\s*\\d*)\\s*:\\s*(\\+?\\d+)", Pattern.MULTILINE);
- Matcher matcher = pattern.matcher(sb);
-
- while (matcher.find())
- er.set(matcher.group(1), matcher.group(2));
-
- return er;
- }
-
- /**
- * Invokes query() to do the parsing and handles parsing errors.
- *
- * @return an array of EventRecords that holds one element that represents
- * the current state of the hardware sensors
- */
- public EventRecord[] monitor() {
- EventRecord[] recs = new EventRecord[1];
-
- try {
- recs[0] = query(null);
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- return recs;
- }
-
- /**
- * Return a String with information about this class
- *
- * @return A String describing this class
- */
- public String getInfo() {
- return ("lm-sensors parser");
- }
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java
deleted file mode 100644
index b39cd78be8e..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SerializedRecord.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.net.InetAddress;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.HashMap;
-import java.text.DateFormat;
-
-/**********************************************************
- * Objects of this class hold the serialized representations
- * of EventRecords. A SerializedRecord is essentially an EventRecord
- * with all its property values converted to strings. It also provides
- * some convenience methods for printing the property fields in a
- * more readable way.
- *
- **********************************************************/
-
-public class SerializedRecord {
-
- HashMap fields;
- private static DateFormat dateFormatter =
- DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG);;
-
- /**
- * Create the SerializedRecord given an EventRecord.
- */
-
- public SerializedRecord(EventRecord source) {
- fields = new HashMap();
- fields.clear();
-
- for (String k : source.getMap().keySet()) {
- ArrayList strs = getStrings(source.getMap().get(k));
- if (strs.size() == 1)
- fields.put(k, strs.get(0));
- else
- for (int i = 0; i < strs.size(); i++)
- fields.put(k + "#" + i, strs.get(i));
- }
-
- }
-
- /**
- * Extract String representations from an Object.
- *
- * @param o the input object
- *
- * @return an ArrayList that contains Strings found in o
- */
- private ArrayList getStrings(Object o) {
- ArrayList retval = new ArrayList();
- retval.clear();
- if (o == null)
- retval.add("null");
- else if (o instanceof String)
- retval.add((String) o);
- else if (o instanceof Calendar)
- retval.add(dateFormatter.format(((Calendar) o).getTime()));
- else if (o instanceof InetAddress[])
- for (InetAddress ip : ((InetAddress[]) o))
- retval.add(ip.getHostAddress());
- else if (o instanceof String[])
- for (String s : (String []) o)
- retval.add(s);
- else
- retval.add(o.toString());
-
- return retval;
- }
-
- /**
- * Set the value of a property of the EventRecord.
- *
- * @param fieldName the name of the property to set
- * @param fieldValue the value of the property to set
- *
- */
- public void set(String fieldName, String fieldValue) {
- fields.put(fieldName, fieldValue);
- }
-
- /**
- * Get the value of a property of the EventRecord.
- * If the property with the specific key is not found,
- * null is returned.
- *
- * @param fieldName the name of the property to get.
- */
- public String get(String fieldName) {
- return fields.get(fieldName);
- }
-
- /**
- * Arrange the keys to provide a more readable printing order:
- * first goes the timestamp, then the hostname and then the type, followed
- * by all other keys found.
- *
- * @param keys The input ArrayList of keys to re-arrange.
- */
- public static void arrangeKeys(ArrayList keys) {
- move(keys, "timestamp", 0);
- move(keys, "hostname", 1);
- move(keys, "type", 2);
- }
-
- private static void move(ArrayList keys, String key, int position) {
- int cur = keys.indexOf(key);
- if (cur == -1)
- return;
- keys.set(cur, keys.get(position));
- keys.set(position, key);
- }
-
- /**
- * Check if the SerializedRecord is a valid one, i.e., whether
- * it represents meaningful metric values.
- *
- * @return true if the EventRecord is a valid one, false otherwise.
- */
- public boolean isValid() {
- return !("invalid".equalsIgnoreCase(fields.get("hostname")));
- }
-
-
- /**
- * Creates and returns a string reperssentation of the object
- *
- * @return a String representing the object
- */
-
- public String toString() {
- String retval = "";
- ArrayList keys = new ArrayList(fields.keySet());
- arrangeKeys(keys);
-
- for (int i = 0; i < keys.size(); i++) {
- String value = fields.get(keys.get(i));
- if (value == null)
- retval += keys.get(i) + ":\tnull\n";
- else
- retval += keys.get(i) + ":\t" + value + "\n";
- }
- return retval;
- }
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java
deleted file mode 100644
index 4a07418ad90..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/ShellParser.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**********************************************************
- * Objects of this class parse the output of system command-line
- * utilities that can give information about the state of
- * various hardware components in the system. Typically, each such
- * object either invokes a command and reads its output or reads the
- * output of one such command from a file on the disk. Currently
- * supported utilities include ifconfig, smartmontools, lm-sensors,
- * /proc/cpuinfo.
- *
- **********************************************************/
-
-public abstract class ShellParser implements Monitored {
-
- /**
- * Find the first occurence ofa pattern in a piece of text
- * and return a specific group.
- *
- * @param strPattern the regular expression to match
- * @param text the text to search
- * @param grp the number of the matching group to return
- *
- * @return a String containing the matched group of the regular expression
- */
- protected String findPattern(String strPattern, String text, int grp) {
-
- Pattern pattern = Pattern.compile(strPattern, Pattern.MULTILINE);
- Matcher matcher = pattern.matcher(text);
-
- if (matcher.find(0))
- return matcher.group(grp);
-
- return null;
- }
-
- /**
- * Finds all occurences of a pattern in a piece of text and returns
- * the matching groups.
- *
- * @param strPattern the regular expression to match
- * @param text the text to search
- * @param grp the number of the matching group to return
- * @param separator the string that separates occurences in the returned value
- *
- * @return a String that contains all occurences of strPattern in text,
- * separated by separator
- */
- protected String findAll(String strPattern, String text, int grp,
- String separator) {
-
- String retval = "";
- boolean firstTime = true;
-
- Pattern pattern = Pattern.compile(strPattern);
- Matcher matcher = pattern.matcher(text);
-
- while (matcher.find()) {
- retval += (firstTime ? "" : separator) + matcher.group(grp);
- firstTime = false;
- }
-
- return retval;
- }
-
- /**
- * Insert all EventRecords that can be extracted for
- * the represented hardware component into a LocalStore.
- *
- * @param ls the LocalStore into which the EventRecords
- * are to be stored.
- */
- public void monitor(LocalStore ls) {
- ls.insert(monitor());
- }
-
- abstract public EventRecord[] monitor();
-
- abstract public EventRecord query(String s) throws Exception;
-
-}
diff --git a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java b/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java
deleted file mode 100644
index 0ef7ba0e382..00000000000
--- a/src/contrib/failmon/src/java/org/apache/hadoop/contrib/failmon/SystemLogParser.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.contrib.failmon;
-
-import java.io.IOException;
-import java.util.Calendar;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**********************************************************
- * An object of this class parses a Unix system log file to create
- * appropriate EventRecords. Currently, only the syslogd logging
- * daemon is supported.
- *
- **********************************************************/
-
-public class SystemLogParser extends LogParser {
-
- static String[] months = { "January", "February", "March", "April", "May",
- "June", "July", "August", "September", "October", "November", "December" };
- /**
- * Create a new parser object .
- */
- public SystemLogParser(String fname) {
- super(fname);
- if ((dateformat = Environment.getProperty("log.system.dateformat")) == null)
- dateformat = "(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\\s+(\\d+)";
- if ((timeformat = Environment.getProperty("log.system.timeformat")) == null)
- timeformat = "\\d{2}:\\d{2}:\\d{2}";
- }
-
- /**
- * Parses one line of the log. If the line contains a valid
- * log entry, then an appropriate EventRecord is returned, after all
- * relevant fields have been parsed.
- *
- * @param line the log line to be parsed
- *
- * @return the EventRecord representing the log entry of the line. If
- * the line does not contain a valid log entry, then the EventRecord
- * returned has isValid() = false. When the end-of-file has been reached,
- * null is returned to the caller.
- */
- public EventRecord parseLine(String line) throws IOException {
-
- EventRecord retval = null;
-
- if (line != null) {
- // process line
- String patternStr = "(" + dateformat + ")";
- patternStr += "\\s+";
- patternStr += "(" + timeformat + ")";
- patternStr += "\\s+(\\S*)\\s"; // for hostname
-// patternStr += "\\s*([\\w+\\.?]+)"; // for source
- patternStr += ":?\\s*(.+)"; // for the message
- Pattern pattern = Pattern.compile(patternStr);
- Matcher matcher = pattern.matcher(line);
- if (matcher.find() && matcher.groupCount() >= 0) {
- retval = new EventRecord(hostname, ips, parseDate(matcher.group(1),
- matcher.group(4)), "SystemLog", "Unknown", // loglevel
- "Unknown", // source
- matcher.group(6)); // message
- } else {
- retval = new EventRecord();
- }
- }
-
- return retval;
- }
-
- /**
- * Parse a date found in the system log.
- *
- * @return a Calendar representing the date
- */
- protected Calendar parseDate(String strDate, String strTime) {
- Calendar retval = Calendar.getInstance();
- // set date
- String[] fields = strDate.split("\\s+");
- retval.set(Calendar.MONTH, parseMonth(fields[0]));
- retval.set(Calendar.DATE, Integer.parseInt(fields[1]));
- // set time
- fields = strTime.split(":");
- retval.set(Calendar.HOUR_OF_DAY, Integer.parseInt(fields[0]));
- retval.set(Calendar.MINUTE, Integer.parseInt(fields[1]));
- retval.set(Calendar.SECOND, Integer.parseInt(fields[2]));
- return retval;
- }
-
- /**
- * Convert the name of a month to the corresponding int value.
- *
- * @return the int representation of the month.
- */
- private int parseMonth(String month) {
- for (int i = 0; i < months.length; i++)
- if (months[i].startsWith(month))
- return i;
- return -1;
- }
-
- /**
- * Return a String with information about this class
- *
- * @return A String describing this class
- */
- public String getInfo() {
- return ("System Log Parser for file : " + file.getAbsoluteFile());
- }
-}