HBASE-1021 hbase metrics FileContext not working

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@720286 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2008-11-24 20:47:58 +00:00
parent 69be09bed5
commit c370e1cab2
3 changed files with 111 additions and 4 deletions

View File

@ -81,6 +81,7 @@ Release 0.19.0 - Unreleased
durring a memcache flush
HBASE-1009 Master stuck in loop wanting to assign but regions are closing
HBASE-1016 Fix example in javadoc overvie
HBASE-1021 hbase metrics FileContext not working
IMPROVEMENTS
HBASE-901 Add a limit to key length, check key and value length on client side

View File

@ -8,7 +8,7 @@
hbase.class=org.apache.hadoop.metrics.spi.NullContext
# Configuration of the "hbase" context for file
# hbase.class=org.apache.hadoop.metrics.file.FileContext
# hbase.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# hbase.period=10
# hbase.fileName=/tmp/metrics_hbase.log
@ -21,7 +21,7 @@ hbase.class=org.apache.hadoop.metrics.spi.NullContext
jvm.class=org.apache.hadoop.metrics.spi.NullContext
# Configuration of the "jvm" context for file
# jvm.class=org.apache.hadoop.metrics.file.FileContext
# jvm.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# jvm.period=10
# jvm.fileName=/tmp/metrics_jvm.log
@ -31,10 +31,10 @@ jvm.class=org.apache.hadoop.metrics.spi.NullContext
# jvm.servers=GMETADHOST_IP:8649
# Configuration of the "rpc" context for null
hbase.class=org.apache.hadoop.metrics.spi.NullContext
rpc.class=org.apache.hadoop.metrics.spi.NullContext
# Configuration of the "rpc" context for file
# rpc.class=org.apache.hadoop.metrics.file.FileContext
# rpc.class=org.apache.hadoop.hbase.metrics.file.TimeStampingFileContext
# rpc.period=10
# rpc.fileName=/tmp/metrics_rpc.log

View File

@ -0,0 +1,106 @@
/**
* Copyright 2008 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.metrics.file;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.hadoop.metrics.ContextFactory;
import org.apache.hadoop.metrics.file.FileContext;
import org.apache.hadoop.metrics.spi.OutputRecord;
/**
* Add timestamp to {@link org.apache.hadoop.metrics.file.FileContext#emitRecord(String, String, OutputRecord)}.
*/
public class TimeStampingFileContext extends FileContext {
// Copies bunch of FileContext here because writer and file are private in
// superclass.
private File file = null;
private PrintWriter writer = null;
private final SimpleDateFormat sdf;
public TimeStampingFileContext() {
super();
this.sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
}
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
String fileName = getAttribute(FILE_NAME_PROPERTY);
if (fileName != null) {
file = new File(fileName);
}
}
public void startMonitoring() throws IOException {
if (file == null) {
writer = new PrintWriter(new BufferedOutputStream(System.out));
} else {
writer = new PrintWriter(new FileWriter(file, true));
}
super.startMonitoring();
}
public void stopMonitoring() {
super.stopMonitoring();
if (writer != null) {
writer.close();
writer = null;
}
}
private synchronized String iso8601() {
return this.sdf.format(new Date());
}
public void emitRecord(String contextName, String recordName,
OutputRecord outRec) {
writer.print(iso8601());
writer.print(" ");
writer.print(contextName);
writer.print(".");
writer.print(recordName);
String separator = ": ";
for (String tagName : outRec.getTagNames()) {
writer.print(separator);
separator = ", ";
writer.print(tagName);
writer.print("=");
writer.print(outRec.getTag(tagName));
}
for (String metricName : outRec.getMetricNames()) {
writer.print(separator);
separator = ", ";
writer.print(metricName);
writer.print("=");
writer.print(outRec.getMetric(metricName));
}
writer.println();
}
public void flush() {
writer.flush();
}
}