HADOOP-12504. Remove metrics v1. (aajisaka)
This commit is contained in:
parent
355325bcc7
commit
36972d61d1
|
@ -1,75 +0,0 @@
|
|||
# Configuration of the "dfs" context for null
|
||||
dfs.class=org.apache.hadoop.metrics.spi.NullContext
|
||||
|
||||
# Configuration of the "dfs" context for file
|
||||
#dfs.class=org.apache.hadoop.metrics.file.FileContext
|
||||
#dfs.period=10
|
||||
#dfs.fileName=/tmp/dfsmetrics.log
|
||||
|
||||
# Configuration of the "dfs" context for ganglia
|
||||
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
|
||||
# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext
|
||||
# dfs.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
|
||||
# dfs.period=10
|
||||
# dfs.servers=localhost:8649
|
||||
|
||||
|
||||
# Configuration of the "mapred" context for null
|
||||
mapred.class=org.apache.hadoop.metrics.spi.NullContext
|
||||
|
||||
# Configuration of the "mapred" context for file
|
||||
#mapred.class=org.apache.hadoop.metrics.file.FileContext
|
||||
#mapred.period=10
|
||||
#mapred.fileName=/tmp/mrmetrics.log
|
||||
|
||||
# Configuration of the "mapred" context for ganglia
|
||||
# Pick one: Ganglia 3.0 (former) or Ganglia 3.1 (latter)
|
||||
# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext
|
||||
# mapred.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
|
||||
# mapred.period=10
|
||||
# mapred.servers=localhost:8649
|
||||
|
||||
|
||||
# Configuration of the "jvm" context for null
|
||||
#jvm.class=org.apache.hadoop.metrics.spi.NullContext
|
||||
|
||||
# Configuration of the "jvm" context for file
|
||||
#jvm.class=org.apache.hadoop.metrics.file.FileContext
|
||||
#jvm.period=10
|
||||
#jvm.fileName=/tmp/jvmmetrics.log
|
||||
|
||||
# Configuration of the "jvm" context for ganglia
|
||||
# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext
|
||||
# jvm.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
|
||||
# jvm.period=10
|
||||
# jvm.servers=localhost:8649
|
||||
|
||||
# Configuration of the "rpc" context for null
|
||||
rpc.class=org.apache.hadoop.metrics.spi.NullContext
|
||||
|
||||
# Configuration of the "rpc" context for file
|
||||
#rpc.class=org.apache.hadoop.metrics.file.FileContext
|
||||
#rpc.period=10
|
||||
#rpc.fileName=/tmp/rpcmetrics.log
|
||||
|
||||
# Configuration of the "rpc" context for ganglia
|
||||
# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
|
||||
# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
|
||||
# rpc.period=10
|
||||
# rpc.servers=localhost:8649
|
||||
|
||||
|
||||
# Configuration of the "ugi" context for null
|
||||
ugi.class=org.apache.hadoop.metrics.spi.NullContext
|
||||
|
||||
# Configuration of the "ugi" context for file
|
||||
#ugi.class=org.apache.hadoop.metrics.file.FileContext
|
||||
#ugi.period=10
|
||||
#ugi.fileName=/tmp/ugimetrics.log
|
||||
|
||||
# Configuration of the "ugi" context for ganglia
|
||||
# ugi.class=org.apache.hadoop.metrics.ganglia.GangliaContext
|
||||
# ugi.class=org.apache.hadoop.metrics.ganglia.GangliaContext31
|
||||
# ugi.period=10
|
||||
# ugi.servers=localhost:8649
|
||||
|
|
@ -59,7 +59,6 @@ import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
|
|||
import org.apache.hadoop.security.ssl.SslSocketConnectorSecure;
|
||||
import org.apache.hadoop.jmx.JMXJsonServlet;
|
||||
import org.apache.hadoop.log.LogLevel;
|
||||
import org.apache.hadoop.metrics.MetricsServlet;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
|
@ -572,14 +571,11 @@ public final class HttpServer2 implements FilterContainer {
|
|||
|
||||
/**
|
||||
* Add default servlets.
|
||||
* Note: /metrics servlet will be removed in 3.X release.
|
||||
*/
|
||||
@SuppressWarnings("deprecation")
|
||||
protected void addDefaultServlets() {
|
||||
// set up default servlets
|
||||
addServlet("stacks", "/stacks", StackServlet.class);
|
||||
addServlet("logLevel", "/logLevel", LogLevel.Servlet.class);
|
||||
addServlet("metrics", "/metrics", MetricsServlet.class);
|
||||
addServlet("jmx", "/jmx", JMXJsonServlet.class);
|
||||
addServlet("conf", "/conf", ConfServlet.class);
|
||||
}
|
||||
|
|
|
@ -1,214 +0,0 @@
|
|||
/*
|
||||
* ContextFactory.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.spi.NullContext;
|
||||
|
||||
/**
|
||||
* Factory class for creating MetricsContext objects. To obtain an instance
|
||||
* of this class, use the static <code>getFactory()</code> method.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
@InterfaceStability.Evolving
|
||||
public class ContextFactory {
|
||||
|
||||
private static final String PROPERTIES_FILE =
|
||||
"/hadoop-metrics.properties";
|
||||
private static final String CONTEXT_CLASS_SUFFIX =
|
||||
".class";
|
||||
private static final String DEFAULT_CONTEXT_CLASSNAME =
|
||||
"org.apache.hadoop.metrics.spi.NullContext";
|
||||
|
||||
private static ContextFactory theFactory = null;
|
||||
|
||||
private Map<String,Object> attributeMap = new HashMap<String,Object>();
|
||||
private Map<String,MetricsContext> contextMap =
|
||||
new HashMap<String,MetricsContext>();
|
||||
|
||||
// Used only when contexts, or the ContextFactory itself, cannot be
|
||||
// created.
|
||||
private static Map<String,MetricsContext> nullContextMap =
|
||||
new HashMap<String,MetricsContext>();
|
||||
|
||||
/** Creates a new instance of ContextFactory */
|
||||
protected ContextFactory() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the value of the named attribute, or null if there is no
|
||||
* attribute of that name.
|
||||
*
|
||||
* @param attributeName the attribute name
|
||||
* @return the attribute value
|
||||
*/
|
||||
public Object getAttribute(String attributeName) {
|
||||
return attributeMap.get(attributeName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the names of all the factory's attributes.
|
||||
*
|
||||
* @return the attribute names
|
||||
*/
|
||||
public String[] getAttributeNames() {
|
||||
String[] result = new String[attributeMap.size()];
|
||||
int i = 0;
|
||||
// for (String attributeName : attributeMap.keySet()) {
|
||||
Iterator it = attributeMap.keySet().iterator();
|
||||
while (it.hasNext()) {
|
||||
result[i++] = (String) it.next();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named factory attribute to the specified value, creating it
|
||||
* if it did not already exist. If the value is null, this is the same as
|
||||
* calling removeAttribute.
|
||||
*
|
||||
* @param attributeName the attribute name
|
||||
* @param value the new attribute value
|
||||
*/
|
||||
public void setAttribute(String attributeName, Object value) {
|
||||
attributeMap.put(attributeName, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the named attribute if it exists.
|
||||
*
|
||||
* @param attributeName the attribute name
|
||||
*/
|
||||
public void removeAttribute(String attributeName) {
|
||||
attributeMap.remove(attributeName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the named MetricsContext instance, constructing it if necessary
|
||||
* using the factory's current configuration attributes. <p/>
|
||||
*
|
||||
* When constructing the instance, if the factory property
|
||||
* <i>contextName</i>.class</code> exists,
|
||||
* its value is taken to be the name of the class to instantiate. Otherwise,
|
||||
* the default is to create an instance of
|
||||
* <code>org.apache.hadoop.metrics.spi.NullContext</code>, which is a
|
||||
* dummy "no-op" context which will cause all metric data to be discarded.
|
||||
*
|
||||
* @param contextName the name of the context
|
||||
* @return the named MetricsContext
|
||||
*/
|
||||
public synchronized MetricsContext getContext(String refName, String contextName)
|
||||
throws IOException, ClassNotFoundException,
|
||||
InstantiationException, IllegalAccessException {
|
||||
MetricsContext metricsContext = contextMap.get(refName);
|
||||
if (metricsContext == null) {
|
||||
String classNameAttribute = refName + CONTEXT_CLASS_SUFFIX;
|
||||
String className = (String) getAttribute(classNameAttribute);
|
||||
if (className == null) {
|
||||
className = DEFAULT_CONTEXT_CLASSNAME;
|
||||
}
|
||||
Class contextClass = Class.forName(className);
|
||||
metricsContext = (MetricsContext) contextClass.newInstance();
|
||||
metricsContext.init(contextName, this);
|
||||
contextMap.put(contextName, metricsContext);
|
||||
}
|
||||
return metricsContext;
|
||||
}
|
||||
|
||||
public synchronized MetricsContext getContext(String contextName)
|
||||
throws IOException, ClassNotFoundException, InstantiationException,
|
||||
IllegalAccessException {
|
||||
return getContext(contextName, contextName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all MetricsContexts built by this factory.
|
||||
*/
|
||||
public synchronized Collection<MetricsContext> getAllContexts() {
|
||||
// Make a copy to avoid race conditions with creating new contexts.
|
||||
return new ArrayList<MetricsContext>(contextMap.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a "null" context - one which does nothing.
|
||||
*/
|
||||
public static synchronized MetricsContext getNullContext(String contextName) {
|
||||
MetricsContext nullContext = nullContextMap.get(contextName);
|
||||
if (nullContext == null) {
|
||||
nullContext = new NullContext();
|
||||
nullContextMap.put(contextName, nullContext);
|
||||
}
|
||||
return nullContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the singleton ContextFactory instance, constructing it if
|
||||
* necessary. <p/>
|
||||
*
|
||||
* When the instance is constructed, this method checks if the file
|
||||
* <code>hadoop-metrics.properties</code> exists on the class path. If it
|
||||
* exists, it must be in the format defined by java.util.Properties, and all
|
||||
* the properties in the file are set as attributes on the newly created
|
||||
* ContextFactory instance.
|
||||
*
|
||||
* @return the singleton ContextFactory instance
|
||||
*/
|
||||
public static synchronized ContextFactory getFactory() throws IOException {
|
||||
if (theFactory == null) {
|
||||
theFactory = new ContextFactory();
|
||||
theFactory.setAttributes();
|
||||
}
|
||||
return theFactory;
|
||||
}
|
||||
|
||||
private void setAttributes() throws IOException {
|
||||
InputStream is = getClass().getResourceAsStream(PROPERTIES_FILE);
|
||||
if (is != null) {
|
||||
try {
|
||||
Properties properties = new Properties();
|
||||
properties.load(is);
|
||||
//for (Object propertyNameObj : properties.keySet()) {
|
||||
Iterator it = properties.keySet().iterator();
|
||||
while (it.hasNext()) {
|
||||
String propertyName = (String) it.next();
|
||||
String propertyValue = properties.getProperty(propertyName);
|
||||
setAttribute(propertyName, propertyValue);
|
||||
}
|
||||
} finally {
|
||||
is.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,125 +0,0 @@
|
|||
/*
|
||||
* MetricsContext.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.spi.OutputRecord;
|
||||
|
||||
/**
|
||||
* The main interface to the metrics package.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public interface MetricsContext {
|
||||
|
||||
/**
|
||||
* Default period in seconds at which data is sent to the metrics system.
|
||||
*/
|
||||
public static final int DEFAULT_PERIOD = 5;
|
||||
|
||||
/**
|
||||
* Initialize this context.
|
||||
* @param contextName The given name for this context
|
||||
* @param factory The creator of this context
|
||||
*/
|
||||
public void init(String contextName, ContextFactory factory);
|
||||
|
||||
/**
|
||||
* Returns the context name.
|
||||
*
|
||||
* @return the context name
|
||||
*/
|
||||
public abstract String getContextName();
|
||||
|
||||
/**
|
||||
* Starts or restarts monitoring, the emitting of metrics records as they are
|
||||
* updated.
|
||||
*/
|
||||
public abstract void startMonitoring()
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Stops monitoring. This does not free any data that the implementation
|
||||
* may have buffered for sending at the next timer event. It
|
||||
* is OK to call <code>startMonitoring()</code> again after calling
|
||||
* this.
|
||||
* @see #close()
|
||||
*/
|
||||
public abstract void stopMonitoring();
|
||||
|
||||
/**
|
||||
* Returns true if monitoring is currently in progress.
|
||||
*/
|
||||
public abstract boolean isMonitoring();
|
||||
|
||||
/**
|
||||
* Stops monitoring and also frees any buffered data, returning this
|
||||
* object to its initial state.
|
||||
*/
|
||||
public abstract void close();
|
||||
|
||||
/**
|
||||
* Creates a new MetricsRecord instance with the given <code>recordName</code>.
|
||||
* Throws an exception if the metrics implementation is configured with a fixed
|
||||
* set of record names and <code>recordName</code> is not in that set.
|
||||
*
|
||||
* @param recordName the name of the record
|
||||
* @throws MetricsException if recordName conflicts with configuration data
|
||||
*/
|
||||
public abstract MetricsRecord createRecord(String recordName);
|
||||
|
||||
/**
|
||||
* Registers a callback to be called at regular time intervals, as
|
||||
* determined by the implementation-class specific configuration.
|
||||
*
|
||||
* @param updater object to be run periodically; it should updated
|
||||
* some metrics records and then return
|
||||
*/
|
||||
public abstract void registerUpdater(Updater updater);
|
||||
|
||||
/**
|
||||
* Removes a callback, if it exists.
|
||||
*
|
||||
* @param updater object to be removed from the callback list
|
||||
*/
|
||||
public abstract void unregisterUpdater(Updater updater);
|
||||
|
||||
/**
|
||||
* Returns the timer period.
|
||||
*/
|
||||
public abstract int getPeriod();
|
||||
|
||||
/**
|
||||
* Retrieves all the records managed by this MetricsContext.
|
||||
* Useful for monitoring systems that are polling-based.
|
||||
*
|
||||
* @return A non-null map from all record names to the records managed.
|
||||
*/
|
||||
Map<String, Collection<OutputRecord>> getAllRecords();
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* MetricsException.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* General-purpose, unchecked metrics exception.
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.MetricsException} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
@InterfaceStability.Evolving
|
||||
public class MetricsException extends RuntimeException {
|
||||
|
||||
private static final long serialVersionUID = -1643257498540498497L;
|
||||
|
||||
/** Creates a new instance of MetricsException */
|
||||
public MetricsException() {
|
||||
}
|
||||
|
||||
/** Creates a new instance of MetricsException
|
||||
*
|
||||
* @param message an error message
|
||||
*/
|
||||
public MetricsException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,254 +0,0 @@
|
|||
/*
|
||||
* MetricsRecord.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* A named and optionally tagged set of records to be sent to the metrics
|
||||
* system. <p/>
|
||||
*
|
||||
* A record name identifies the kind of data to be reported. For example, a
|
||||
* program reporting statistics relating to the disks on a computer might use
|
||||
* a record name "diskStats".<p/>
|
||||
*
|
||||
* A record has zero or more <i>tags</i>. A tag has a name and a value. To
|
||||
* continue the example, the "diskStats" record might use a tag named
|
||||
* "diskName" to identify a particular disk. Sometimes it is useful to have
|
||||
* more than one tag, so there might also be a "diskType" with value "ide" or
|
||||
* "scsi" or whatever.<p/>
|
||||
*
|
||||
* A record also has zero or more <i>metrics</i>. These are the named
|
||||
* values that are to be reported to the metrics system. In the "diskStats"
|
||||
* example, possible metric names would be "diskPercentFull", "diskPercentBusy",
|
||||
* "kbReadPerSecond", etc.<p/>
|
||||
*
|
||||
* The general procedure for using a MetricsRecord is to fill in its tag and
|
||||
* metric values, and then call <code>update()</code> to pass the record to the
|
||||
* client library.
|
||||
* Metric data is not immediately sent to the metrics system
|
||||
* each time that <code>update()</code> is called.
|
||||
* An internal table is maintained, identified by the record name. This
|
||||
* table has columns
|
||||
* corresponding to the tag and the metric names, and rows
|
||||
* corresponding to each unique set of tag values. An update
|
||||
* either modifies an existing row in the table, or adds a new row with a set of
|
||||
* tag values that are different from all the other rows. Note that if there
|
||||
* are no tags, then there can be at most one row in the table. <p/>
|
||||
*
|
||||
* Once a row is added to the table, its data will be sent to the metrics system
|
||||
* on every timer period, whether or not it has been updated since the previous
|
||||
* timer period. If this is inappropriate, for example if metrics were being
|
||||
* reported by some transient object in an application, the <code>remove()</code>
|
||||
* method can be used to remove the row and thus stop the data from being
|
||||
* sent.<p/>
|
||||
*
|
||||
* Note that the <code>update()</code> method is atomic. This means that it is
|
||||
* safe for different threads to be updating the same metric. More precisely,
|
||||
* it is OK for different threads to call <code>update()</code> on MetricsRecord instances
|
||||
* with the same set of tag names and tag values. Different threads should
|
||||
* <b>not</b> use the same MetricsRecord instance at the same time.
|
||||
*
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.MetricsRecord} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public interface MetricsRecord {
|
||||
|
||||
/**
|
||||
* Returns the record name.
|
||||
*
|
||||
* @return the record name
|
||||
*/
|
||||
public abstract String getRecordName();
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value. The tagValue may be null,
|
||||
* which is treated the same as an empty String.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
public abstract void setTag(String tagName, String tagValue);
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
public abstract void setTag(String tagName, int tagValue);
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
public abstract void setTag(String tagName, long tagValue);
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
public abstract void setTag(String tagName, short tagValue);
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
public abstract void setTag(String tagName, byte tagValue);
|
||||
|
||||
/**
|
||||
* Removes any tag of the specified name.
|
||||
*
|
||||
* @param tagName name of a tag
|
||||
*/
|
||||
public abstract void removeTag(String tagName);
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void setMetric(String metricName, int metricValue);
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void setMetric(String metricName, long metricValue);
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void setMetric(String metricName, short metricValue);
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void setMetric(String metricName, byte metricValue);
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void setMetric(String metricName, float metricValue);
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void incrMetric(String metricName, int metricValue);
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void incrMetric(String metricName, long metricValue);
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void incrMetric(String metricName, short metricValue);
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void incrMetric(String metricName, byte metricValue);
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
public abstract void incrMetric(String metricName, float metricValue);
|
||||
|
||||
/**
|
||||
* Updates the table of buffered data which is to be sent periodically.
|
||||
* If the tag values match an existing row, that row is updated;
|
||||
* otherwise, a new row is added.
|
||||
*/
|
||||
public abstract void update();
|
||||
|
||||
/**
|
||||
* Removes, from the buffered data table, all rows having tags
|
||||
* that equal the tags that have been set on this record. For example,
|
||||
* if there are no tags on this record, all rows for this record name
|
||||
* would be removed. Or, if there is a single tag on this record, then
|
||||
* just rows containing a tag with the same name and value would be removed.
|
||||
*/
|
||||
public abstract void remove();
|
||||
|
||||
}
|
|
@ -1,188 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.metrics.spi.OutputRecord;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
|
||||
import org.mortbay.util.ajax.JSON;
|
||||
import org.mortbay.util.ajax.JSON.Output;
|
||||
|
||||
/**
|
||||
* A servlet to print out metrics data. By default, the servlet returns a
|
||||
* textual representation (no promises are made for parseability), and
|
||||
* users can use "?format=json" for parseable output.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class MetricsServlet extends HttpServlet {
|
||||
|
||||
/**
|
||||
* A helper class to hold a TagMap and MetricMap.
|
||||
*/
|
||||
static class TagsMetricsPair implements JSON.Convertible {
|
||||
final TagMap tagMap;
|
||||
final MetricMap metricMap;
|
||||
|
||||
public TagsMetricsPair(TagMap tagMap, MetricMap metricMap) {
|
||||
this.tagMap = tagMap;
|
||||
this.metricMap = metricMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public void fromJSON(Map map) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
/** Converts to JSON by providing an array. */
|
||||
@Override
|
||||
public void toJSON(Output out) {
|
||||
out.add(new Object[] { tagMap, metricMap });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects all metric data, and returns a map:
|
||||
* contextName -> recordName -> [ (tag->tagValue), (metric->metricValue) ].
|
||||
* The values are either String or Number. The final value is implemented
|
||||
* as a list of TagsMetricsPair.
|
||||
*/
|
||||
Map<String, Map<String, List<TagsMetricsPair>>> makeMap(
|
||||
Collection<MetricsContext> contexts) throws IOException {
|
||||
Map<String, Map<String, List<TagsMetricsPair>>> map =
|
||||
new TreeMap<String, Map<String, List<TagsMetricsPair>>>();
|
||||
|
||||
for (MetricsContext context : contexts) {
|
||||
Map<String, List<TagsMetricsPair>> records =
|
||||
new TreeMap<String, List<TagsMetricsPair>>();
|
||||
map.put(context.getContextName(), records);
|
||||
|
||||
for (Map.Entry<String, Collection<OutputRecord>> r :
|
||||
context.getAllRecords().entrySet()) {
|
||||
List<TagsMetricsPair> metricsAndTags =
|
||||
new ArrayList<TagsMetricsPair>();
|
||||
records.put(r.getKey(), metricsAndTags);
|
||||
for (OutputRecord outputRecord : r.getValue()) {
|
||||
TagMap tagMap = outputRecord.getTagsCopy();
|
||||
MetricMap metricMap = outputRecord.getMetricsCopy();
|
||||
metricsAndTags.add(new TagsMetricsPair(tagMap, metricMap));
|
||||
}
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doGet(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
|
||||
if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(),
|
||||
request, response)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String format = request.getParameter("format");
|
||||
Collection<MetricsContext> allContexts =
|
||||
ContextFactory.getFactory().getAllContexts();
|
||||
if ("json".equals(format)) {
|
||||
response.setContentType("application/json; charset=utf-8");
|
||||
PrintWriter out = response.getWriter();
|
||||
try {
|
||||
// Uses Jetty's built-in JSON support to convert the map into JSON.
|
||||
out.print(new JSON().toJSON(makeMap(allContexts)));
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
} else {
|
||||
PrintWriter out = response.getWriter();
|
||||
try {
|
||||
printMap(out, makeMap(allContexts));
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints metrics data in a multi-line text form.
|
||||
*/
|
||||
void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> map) {
|
||||
for (Map.Entry<String, Map<String, List<TagsMetricsPair>>> context : map.entrySet()) {
|
||||
out.print(context.getKey());
|
||||
out.print("\n");
|
||||
for (Map.Entry<String, List<TagsMetricsPair>> record : context.getValue().entrySet()) {
|
||||
indent(out, 1);
|
||||
out.print(record.getKey());
|
||||
out.print("\n");
|
||||
for (TagsMetricsPair pair : record.getValue()) {
|
||||
indent(out, 2);
|
||||
// Prints tag values in the form "{key=value,key=value}:"
|
||||
out.print("{");
|
||||
boolean first = true;
|
||||
for (Map.Entry<String, Object> tagValue : pair.tagMap.entrySet()) {
|
||||
if (first) {
|
||||
first = false;
|
||||
} else {
|
||||
out.print(",");
|
||||
}
|
||||
out.print(tagValue.getKey());
|
||||
out.print("=");
|
||||
out.print(tagValue.getValue().toString());
|
||||
}
|
||||
out.print("}:\n");
|
||||
|
||||
// Now print metric values, one per line
|
||||
for (Map.Entry<String, Number> metricValue :
|
||||
pair.metricMap.entrySet()) {
|
||||
indent(out, 3);
|
||||
out.print(metricValue.getKey());
|
||||
out.print("=");
|
||||
out.print(metricValue.getValue().toString());
|
||||
out.print("\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void indent(PrintWriter out, int indent) {
|
||||
for (int i = 0; i < indent; ++i) {
|
||||
out.append(" ");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* Utility class to simplify creation and reporting of hadoop metrics.
|
||||
*
|
||||
* For examples of usage, see NameNodeMetrics.
|
||||
* @see org.apache.hadoop.metrics.MetricsRecord
|
||||
* @see org.apache.hadoop.metrics.MetricsContext
|
||||
* @see org.apache.hadoop.metrics.ContextFactory
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
@InterfaceStability.Evolving
|
||||
public class MetricsUtil {
|
||||
|
||||
public static final Log LOG =
|
||||
LogFactory.getLog(MetricsUtil.class);
|
||||
|
||||
/**
|
||||
* Don't allow creation of a new instance of Metrics
|
||||
*/
|
||||
private MetricsUtil() {}
|
||||
|
||||
public static MetricsContext getContext(String contextName) {
|
||||
return getContext(contextName, contextName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method to return the named context.
|
||||
* If the desired context cannot be created for any reason, the exception
|
||||
* is logged, and a null context is returned.
|
||||
*/
|
||||
public static MetricsContext getContext(String refName, String contextName) {
|
||||
MetricsContext metricsContext;
|
||||
try {
|
||||
metricsContext =
|
||||
ContextFactory.getFactory().getContext(refName, contextName);
|
||||
if (!metricsContext.isMonitoring()) {
|
||||
metricsContext.startMonitoring();
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
LOG.error("Unable to create metrics context " + contextName, ex);
|
||||
metricsContext = ContextFactory.getNullContext(contextName);
|
||||
}
|
||||
return metricsContext;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method to create and return new metrics record instance within the
|
||||
* given context. This record is tagged with the host name.
|
||||
*
|
||||
* @param context the context
|
||||
* @param recordName name of the record
|
||||
* @return newly created metrics record
|
||||
*/
|
||||
public static MetricsRecord createRecord(MetricsContext context,
|
||||
String recordName)
|
||||
{
|
||||
MetricsRecord metricsRecord = context.createRecord(recordName);
|
||||
metricsRecord.setTag("hostName", getHostName());
|
||||
return metricsRecord;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the host name. If the host name is unobtainable, logs the
|
||||
* exception and returns "unknown".
|
||||
*/
|
||||
private static String getHostName() {
|
||||
String hostName = null;
|
||||
try {
|
||||
hostName = InetAddress.getLocalHost().getHostName();
|
||||
} catch (UnknownHostException ex) {
|
||||
LOG.info("Unable to obtain hostName", ex);
|
||||
hostName = "unknown";
|
||||
}
|
||||
return hostName;
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Updater.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* Call-back interface. See <code>MetricsContext.registerUpdater()</code>.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
@InterfaceStability.Evolving
|
||||
public interface Updater {
|
||||
|
||||
/**
|
||||
* Timer-based call-back from the metric library.
|
||||
*/
|
||||
public abstract void doUpdates(MetricsContext context);
|
||||
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
<html>
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<body>
|
||||
Implementation of the metrics package that writes the metrics to a file.
|
||||
Programmers should not normally need to use this package directly. Instead
|
||||
they should use org.hadoop.metrics.
|
||||
|
||||
<p/>
|
||||
These are the implementation specific factory attributes
|
||||
(See ContextFactory.getFactory()):
|
||||
|
||||
<dl>
|
||||
<dt><i>contextName</i>.fileName</dt>
|
||||
<dd>The path of the file to which metrics in context <i>contextName</i>
|
||||
are to be appended. If this attribute is not specified, the metrics
|
||||
are written to standard output by default.</dd>
|
||||
|
||||
<dt><i>contextName</i>.period</dt>
|
||||
<dd>The period in seconds on which the metric data is written to the
|
||||
file.</dd>
|
||||
|
||||
</dl>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,276 +0,0 @@
|
|||
/*
|
||||
* GangliaContext.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.ganglia;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.*;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
|
||||
import org.apache.hadoop.metrics.spi.OutputRecord;
|
||||
import org.apache.hadoop.metrics.spi.Util;
|
||||
|
||||
/**
|
||||
* Context for sending metrics to Ganglia.
|
||||
*
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30}
|
||||
* instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class GangliaContext extends AbstractMetricsContext {
|
||||
|
||||
private static final String PERIOD_PROPERTY = "period";
|
||||
private static final String SERVERS_PROPERTY = "servers";
|
||||
private static final String UNITS_PROPERTY = "units";
|
||||
private static final String SLOPE_PROPERTY = "slope";
|
||||
private static final String TMAX_PROPERTY = "tmax";
|
||||
private static final String DMAX_PROPERTY = "dmax";
|
||||
private static final String MULTICAST_PROPERTY = "multicast";
|
||||
private static final String MULTICAST_TTL_PROPERTY = "multicast.ttl";
|
||||
|
||||
private static final String DEFAULT_UNITS = "";
|
||||
private static final String DEFAULT_SLOPE = "both";
|
||||
private static final int DEFAULT_TMAX = 60;
|
||||
private static final int DEFAULT_DMAX = 0;
|
||||
private static final int DEFAULT_PORT = 8649;
|
||||
private static final int BUFFER_SIZE = 1500; // as per libgmond.c
|
||||
private static final int DEFAULT_MULTICAST_TTL = 1;
|
||||
|
||||
private final Log LOG = LogFactory.getLog(this.getClass());
|
||||
|
||||
private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
|
||||
|
||||
static {
|
||||
typeTable.put(String.class, "string");
|
||||
typeTable.put(Byte.class, "int8");
|
||||
typeTable.put(Short.class, "int16");
|
||||
typeTable.put(Integer.class, "int32");
|
||||
typeTable.put(Long.class, "float");
|
||||
typeTable.put(Float.class, "float");
|
||||
}
|
||||
|
||||
protected byte[] buffer = new byte[BUFFER_SIZE];
|
||||
protected int offset;
|
||||
|
||||
protected List<? extends SocketAddress> metricsServers;
|
||||
private Map<String,String> unitsTable;
|
||||
private Map<String,String> slopeTable;
|
||||
private Map<String,String> tmaxTable;
|
||||
private Map<String,String> dmaxTable;
|
||||
private boolean multicastEnabled;
|
||||
private int multicastTtl;
|
||||
|
||||
protected DatagramSocket datagramSocket;
|
||||
|
||||
/** Creates a new instance of GangliaContext */
|
||||
@InterfaceAudience.Private
|
||||
public GangliaContext() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
public void init(String contextName, ContextFactory factory) {
|
||||
super.init(contextName, factory);
|
||||
parseAndSetPeriod(PERIOD_PROPERTY);
|
||||
|
||||
metricsServers =
|
||||
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
|
||||
|
||||
unitsTable = getAttributeTable(UNITS_PROPERTY);
|
||||
slopeTable = getAttributeTable(SLOPE_PROPERTY);
|
||||
tmaxTable = getAttributeTable(TMAX_PROPERTY);
|
||||
dmaxTable = getAttributeTable(DMAX_PROPERTY);
|
||||
multicastEnabled = Boolean.parseBoolean(getAttribute(MULTICAST_PROPERTY));
|
||||
String multicastTtlValue = getAttribute(MULTICAST_TTL_PROPERTY);
|
||||
if (multicastEnabled) {
|
||||
if (multicastTtlValue == null) {
|
||||
multicastTtl = DEFAULT_MULTICAST_TTL;
|
||||
} else {
|
||||
multicastTtl = Integer.parseInt(multicastTtlValue);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (multicastEnabled) {
|
||||
LOG.info("Enabling multicast for Ganglia with TTL " + multicastTtl);
|
||||
datagramSocket = new MulticastSocket();
|
||||
((MulticastSocket) datagramSocket).setTimeToLive(multicastTtl);
|
||||
} else {
|
||||
datagramSocket = new DatagramSocket();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOG.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* method to close the datagram socket
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
super.close();
|
||||
if (datagramSocket != null) {
|
||||
datagramSocket.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
public void emitRecord(String contextName, String recordName,
|
||||
OutputRecord outRec)
|
||||
throws IOException {
|
||||
// Setup so that the records have the proper leader names so they are
|
||||
// unambiguous at the ganglia level, and this prevents a lot of rework
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(contextName);
|
||||
sb.append('.');
|
||||
|
||||
if (contextName.equals("jvm") && outRec.getTag("processName") != null) {
|
||||
sb.append(outRec.getTag("processName"));
|
||||
sb.append('.');
|
||||
}
|
||||
|
||||
sb.append(recordName);
|
||||
sb.append('.');
|
||||
int sbBaseLen = sb.length();
|
||||
|
||||
// emit each metric in turn
|
||||
for (String metricName : outRec.getMetricNames()) {
|
||||
Object metric = outRec.getMetric(metricName);
|
||||
String type = typeTable.get(metric.getClass());
|
||||
if (type != null) {
|
||||
sb.append(metricName);
|
||||
emitMetric(sb.toString(), type, metric.toString());
|
||||
sb.setLength(sbBaseLen);
|
||||
} else {
|
||||
LOG.warn("Unknown metrics type: " + metric.getClass());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void emitMetric(String name, String type, String value)
|
||||
throws IOException {
|
||||
String units = getUnits(name);
|
||||
int slope = getSlope(name);
|
||||
int tmax = getTmax(name);
|
||||
int dmax = getDmax(name);
|
||||
|
||||
offset = 0;
|
||||
xdr_int(0); // metric_user_defined
|
||||
xdr_string(type);
|
||||
xdr_string(name);
|
||||
xdr_string(value);
|
||||
xdr_string(units);
|
||||
xdr_int(slope);
|
||||
xdr_int(tmax);
|
||||
xdr_int(dmax);
|
||||
|
||||
for (SocketAddress socketAddress : metricsServers) {
|
||||
DatagramPacket packet =
|
||||
new DatagramPacket(buffer, offset, socketAddress);
|
||||
datagramSocket.send(packet);
|
||||
}
|
||||
}
|
||||
|
||||
protected String getUnits(String metricName) {
|
||||
String result = unitsTable.get(metricName);
|
||||
if (result == null) {
|
||||
result = DEFAULT_UNITS;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
protected int getSlope(String metricName) {
|
||||
String slopeString = slopeTable.get(metricName);
|
||||
if (slopeString == null) {
|
||||
slopeString = DEFAULT_SLOPE;
|
||||
}
|
||||
return ("zero".equals(slopeString) ? 0 : 3); // see gmetric.c
|
||||
}
|
||||
|
||||
protected int getTmax(String metricName) {
|
||||
if (tmaxTable == null) {
|
||||
return DEFAULT_TMAX;
|
||||
}
|
||||
String tmaxString = tmaxTable.get(metricName);
|
||||
if (tmaxString == null) {
|
||||
return DEFAULT_TMAX;
|
||||
}
|
||||
else {
|
||||
return Integer.parseInt(tmaxString);
|
||||
}
|
||||
}
|
||||
|
||||
protected int getDmax(String metricName) {
|
||||
String dmaxString = dmaxTable.get(metricName);
|
||||
if (dmaxString == null) {
|
||||
return DEFAULT_DMAX;
|
||||
}
|
||||
else {
|
||||
return Integer.parseInt(dmaxString);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts a string into the buffer by first writing the size of the string
|
||||
* as an int, followed by the bytes of the string, padded if necessary to
|
||||
* a multiple of 4.
|
||||
*/
|
||||
protected void xdr_string(String s) {
|
||||
byte[] bytes = s.getBytes(Charsets.UTF_8);
|
||||
int len = bytes.length;
|
||||
xdr_int(len);
|
||||
System.arraycopy(bytes, 0, buffer, offset, len);
|
||||
offset += len;
|
||||
pad();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pads the buffer with zero bytes up to the nearest multiple of 4.
|
||||
*/
|
||||
private void pad() {
|
||||
int newOffset = ((offset + 3) / 4) * 4;
|
||||
while (offset < newOffset) {
|
||||
buffer[offset++] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts an integer into the buffer as 4 bytes, big-endian.
|
||||
*/
|
||||
protected void xdr_int(int i) {
|
||||
buffer[offset++] = (byte)((i >> 24) & 0xff);
|
||||
buffer[offset++] = (byte)((i >> 16) & 0xff);
|
||||
buffer[offset++] = (byte)((i >> 8) & 0xff);
|
||||
buffer[offset++] = (byte)(i & 0xff);
|
||||
}
|
||||
}
|
|
@ -1,147 +0,0 @@
|
|||
/*
|
||||
* GangliaContext.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.ganglia;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.DatagramPacket;
|
||||
import java.net.SocketAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
import org.apache.hadoop.net.DNS;
|
||||
|
||||
/**
|
||||
* Context for sending metrics to Ganglia version 3.1.x.
|
||||
*
|
||||
* 3.1.1 has a slightly different wire portal compared to 3.0.x.
|
||||
*
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31}
|
||||
* instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class GangliaContext31 extends GangliaContext {
|
||||
|
||||
String hostName = "UNKNOWN.example.com";
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog("org.apache.hadoop.util.GangliaContext31");
|
||||
|
||||
@Override
|
||||
public void init(String contextName, ContextFactory factory) {
|
||||
super.init(contextName, factory);
|
||||
|
||||
LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics.");
|
||||
|
||||
// Take the hostname from the DNS class.
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
|
||||
if (conf.get("slave.host.name") != null) {
|
||||
hostName = conf.get("slave.host.name");
|
||||
} else {
|
||||
try {
|
||||
hostName = DNS.getDefaultHost(
|
||||
conf.get("dfs.datanode.dns.interface","default"),
|
||||
conf.get("dfs.datanode.dns.nameserver","default"));
|
||||
} catch (UnknownHostException uhe) {
|
||||
LOG.error(uhe);
|
||||
hostName = "UNKNOWN.example.com";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void emitMetric(String name, String type, String value)
|
||||
throws IOException
|
||||
{
|
||||
if (name == null) {
|
||||
LOG.warn("Metric was emitted with no name.");
|
||||
return;
|
||||
} else if (value == null) {
|
||||
LOG.warn("Metric name " + name +" was emitted with a null value.");
|
||||
return;
|
||||
} else if (type == null) {
|
||||
LOG.warn("Metric name " + name + ", value " + value + " has no type.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Emitting metric " + name + ", type " + type + ", value " +
|
||||
value + " from hostname" + hostName);
|
||||
}
|
||||
|
||||
String units = getUnits(name);
|
||||
int slope = getSlope(name);
|
||||
int tmax = getTmax(name);
|
||||
int dmax = getDmax(name);
|
||||
offset = 0;
|
||||
String groupName = name.substring(0,name.lastIndexOf("."));
|
||||
|
||||
// The following XDR recipe was done through a careful reading of
|
||||
// gm_protocol.x in Ganglia 3.1 and carefully examining the output of
|
||||
// the gmetric utility with strace.
|
||||
|
||||
// First we send out a metadata message
|
||||
xdr_int(128); // metric_id = metadata_msg
|
||||
xdr_string(hostName); // hostname
|
||||
xdr_string(name); // metric name
|
||||
xdr_int(0); // spoof = False
|
||||
xdr_string(type); // metric type
|
||||
xdr_string(name); // metric name
|
||||
xdr_string(units); // units
|
||||
xdr_int(slope); // slope
|
||||
xdr_int(tmax); // tmax, the maximum time between metrics
|
||||
xdr_int(dmax); // dmax, the maximum data value
|
||||
|
||||
xdr_int(1); /*Num of the entries in extra_value field for
|
||||
Ganglia 3.1.x*/
|
||||
xdr_string("GROUP"); /*Group attribute*/
|
||||
xdr_string(groupName); /*Group value*/
|
||||
|
||||
for (SocketAddress socketAddress : metricsServers) {
|
||||
DatagramPacket packet =
|
||||
new DatagramPacket(buffer, offset, socketAddress);
|
||||
datagramSocket.send(packet);
|
||||
}
|
||||
|
||||
// Now we send out a message with the actual value.
|
||||
// Technically, we only need to send out the metadata message once for
|
||||
// each metric, but I don't want to have to record which metrics we did and
|
||||
// did not send.
|
||||
offset = 0;
|
||||
xdr_int(133); // we are sending a string value
|
||||
xdr_string(hostName); // hostName
|
||||
xdr_string(name); // metric name
|
||||
xdr_int(0); // spoof = False
|
||||
xdr_string("%s"); // format field
|
||||
xdr_string(value); // metric value
|
||||
|
||||
for (SocketAddress socketAddress : metricsServers) {
|
||||
DatagramPacket packet =
|
||||
new DatagramPacket(buffer, offset, socketAddress);
|
||||
datagramSocket.send(packet);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
|
||||
<html>
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<body>
|
||||
<!--
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
-->
|
||||
|
||||
Implementation of the metrics package that sends metric data to
|
||||
<a href="http://ganglia.sourceforge.net/">Ganglia</a>.
|
||||
Programmers should not normally need to use this package directly. Instead
|
||||
they should use org.hadoop.metrics.
|
||||
|
||||
<p/>
|
||||
These are the implementation specific factory attributes
|
||||
(See ContextFactory.getFactory()):
|
||||
|
||||
<dl>
|
||||
<dt><i>contextName</i>.servers</dt>
|
||||
<dd>Space and/or comma separated sequence of servers to which UDP
|
||||
messages should be sent.</dd>
|
||||
|
||||
<dt><i>contextName</i>.period</dt>
|
||||
<dd>The period in seconds on which the metric data is sent to the
|
||||
server(s).</dd>
|
||||
|
||||
<dt><i>contextName</i>.multicast</dt>
|
||||
<dd>Enable multicast for Ganglia</dd>
|
||||
|
||||
<dt><i>contextName</i>.multicast.ttl</dt>
|
||||
<dd>TTL for multicast packets</dd>
|
||||
|
||||
<dt><i>contextName</i>.units.<i>recordName</i>.<i>metricName</i></dt>
|
||||
<dd>The units for the specified metric in the specified record.</dd>
|
||||
|
||||
<dt><i>contextName</i>.slope.<i>recordName</i>.<i>metricName</i></dt>
|
||||
<dd>The slope for the specified metric in the specified record.</dd>
|
||||
|
||||
<dt><i>contextName</i>.tmax.<i>recordName</i>.<i>metricName</i></dt>
|
||||
<dd>The tmax for the specified metric in the specified record.</dd>
|
||||
|
||||
<dt><i>contextName</i>.dmax.<i>recordName</i>.<i>metricName</i></dt>
|
||||
<dd>The dmax for the specified metric in the specified record.</dd>
|
||||
|
||||
</dl>
|
||||
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1,36 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.jvm;
|
||||
|
||||
/**
|
||||
* A log4J Appender that simply counts logging events in three levels:
|
||||
* fatal, error and warn.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter {
|
||||
|
||||
static {
|
||||
// The logging system is not started yet.
|
||||
System.err.println("WARNING: "+ EventCounter.class.getName() +
|
||||
" is deprecated. Please use "+
|
||||
org.apache.hadoop.log.metrics.EventCounter.class.getName() +
|
||||
" in all the log4j.properties files.");
|
||||
}
|
||||
}
|
|
@ -1,203 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.jvm;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.MemoryMXBean;
|
||||
import java.lang.management.MemoryUsage;
|
||||
import java.lang.management.ThreadInfo;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.MetricsContext;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
import org.apache.hadoop.metrics.Updater;
|
||||
|
||||
import static java.lang.Thread.State.*;
|
||||
import java.lang.management.GarbageCollectorMXBean;
|
||||
import java.util.List;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* Singleton class which reports Java Virtual Machine metrics to the metrics API.
|
||||
* Any application can create an instance of this class in order to emit
|
||||
* Java VM metrics.
|
||||
*
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.source.JvmMetrics} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public class JvmMetrics implements Updater {
|
||||
|
||||
private static final float M = 1024*1024;
|
||||
private static JvmMetrics theInstance = null;
|
||||
private static Log log = LogFactory.getLog(JvmMetrics.class);
|
||||
|
||||
private MetricsRecord metrics;
|
||||
|
||||
// garbage collection counters
|
||||
private long gcCount = 0;
|
||||
private long gcTimeMillis = 0;
|
||||
|
||||
// logging event counters
|
||||
private long fatalCount = 0;
|
||||
private long errorCount = 0;
|
||||
private long warnCount = 0;
|
||||
private long infoCount = 0;
|
||||
|
||||
public synchronized static JvmMetrics init(String processName, String sessionId) {
|
||||
return init(processName, sessionId, "metrics");
|
||||
}
|
||||
|
||||
public synchronized static JvmMetrics init(String processName, String sessionId,
|
||||
String recordName) {
|
||||
if (theInstance != null) {
|
||||
log.info("Cannot initialize JVM Metrics with processName=" +
|
||||
processName + ", sessionId=" + sessionId +
|
||||
" - already initialized");
|
||||
}
|
||||
else {
|
||||
log.info("Initializing JVM Metrics with processName="
|
||||
+ processName + ", sessionId=" + sessionId);
|
||||
theInstance = new JvmMetrics(processName, sessionId, recordName);
|
||||
}
|
||||
return theInstance;
|
||||
}
|
||||
|
||||
/** Creates a new instance of JvmMetrics */
|
||||
private JvmMetrics(String processName, String sessionId,
|
||||
String recordName) {
|
||||
MetricsContext context = MetricsUtil.getContext("jvm");
|
||||
metrics = MetricsUtil.createRecord(context, recordName);
|
||||
metrics.setTag("processName", processName);
|
||||
metrics.setTag("sessionId", sessionId);
|
||||
context.registerUpdater(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* This will be called periodically (with the period being configuration
|
||||
* dependent).
|
||||
*/
|
||||
@Override
|
||||
public void doUpdates(MetricsContext context) {
|
||||
doMemoryUpdates();
|
||||
doGarbageCollectionUpdates();
|
||||
doThreadUpdates();
|
||||
doEventCountUpdates();
|
||||
metrics.update();
|
||||
}
|
||||
|
||||
private void doMemoryUpdates() {
|
||||
MemoryMXBean memoryMXBean =
|
||||
ManagementFactory.getMemoryMXBean();
|
||||
MemoryUsage memNonHeap =
|
||||
memoryMXBean.getNonHeapMemoryUsage();
|
||||
MemoryUsage memHeap =
|
||||
memoryMXBean.getHeapMemoryUsage();
|
||||
Runtime runtime = Runtime.getRuntime();
|
||||
|
||||
metrics.setMetric("memNonHeapUsedM", memNonHeap.getUsed()/M);
|
||||
metrics.setMetric("memNonHeapCommittedM", memNonHeap.getCommitted()/M);
|
||||
metrics.setMetric("memHeapUsedM", memHeap.getUsed()/M);
|
||||
metrics.setMetric("memHeapCommittedM", memHeap.getCommitted()/M);
|
||||
metrics.setMetric("maxMemoryM", runtime.maxMemory()/M);
|
||||
}
|
||||
|
||||
private void doGarbageCollectionUpdates() {
|
||||
List<GarbageCollectorMXBean> gcBeans =
|
||||
ManagementFactory.getGarbageCollectorMXBeans();
|
||||
long count = 0;
|
||||
long timeMillis = 0;
|
||||
for (GarbageCollectorMXBean gcBean : gcBeans) {
|
||||
count += gcBean.getCollectionCount();
|
||||
timeMillis += gcBean.getCollectionTime();
|
||||
}
|
||||
metrics.incrMetric("gcCount", (int)(count - gcCount));
|
||||
metrics.incrMetric("gcTimeMillis", (int)(timeMillis - gcTimeMillis));
|
||||
|
||||
gcCount = count;
|
||||
gcTimeMillis = timeMillis;
|
||||
}
|
||||
|
||||
private void doThreadUpdates() {
|
||||
ThreadMXBean threadMXBean =
|
||||
ManagementFactory.getThreadMXBean();
|
||||
long threadIds[] =
|
||||
threadMXBean.getAllThreadIds();
|
||||
ThreadInfo[] threadInfos =
|
||||
threadMXBean.getThreadInfo(threadIds, 0);
|
||||
|
||||
int threadsNew = 0;
|
||||
int threadsRunnable = 0;
|
||||
int threadsBlocked = 0;
|
||||
int threadsWaiting = 0;
|
||||
int threadsTimedWaiting = 0;
|
||||
int threadsTerminated = 0;
|
||||
|
||||
for (ThreadInfo threadInfo : threadInfos) {
|
||||
// threadInfo is null if the thread is not alive or doesn't exist
|
||||
if (threadInfo == null) continue;
|
||||
Thread.State state = threadInfo.getThreadState();
|
||||
if (state == NEW) {
|
||||
threadsNew++;
|
||||
}
|
||||
else if (state == RUNNABLE) {
|
||||
threadsRunnable++;
|
||||
}
|
||||
else if (state == BLOCKED) {
|
||||
threadsBlocked++;
|
||||
}
|
||||
else if (state == WAITING) {
|
||||
threadsWaiting++;
|
||||
}
|
||||
else if (state == TIMED_WAITING) {
|
||||
threadsTimedWaiting++;
|
||||
}
|
||||
else if (state == TERMINATED) {
|
||||
threadsTerminated++;
|
||||
}
|
||||
}
|
||||
metrics.setMetric("threadsNew", threadsNew);
|
||||
metrics.setMetric("threadsRunnable", threadsRunnable);
|
||||
metrics.setMetric("threadsBlocked", threadsBlocked);
|
||||
metrics.setMetric("threadsWaiting", threadsWaiting);
|
||||
metrics.setMetric("threadsTimedWaiting", threadsTimedWaiting);
|
||||
metrics.setMetric("threadsTerminated", threadsTerminated);
|
||||
}
|
||||
|
||||
private void doEventCountUpdates() {
|
||||
long newFatal = EventCounter.getFatal();
|
||||
long newError = EventCounter.getError();
|
||||
long newWarn = EventCounter.getWarn();
|
||||
long newInfo = EventCounter.getInfo();
|
||||
|
||||
metrics.incrMetric("logFatal", (int)(newFatal - fatalCount));
|
||||
metrics.incrMetric("logError", (int)(newError - errorCount));
|
||||
metrics.incrMetric("logWarn", (int)(newWarn - warnCount));
|
||||
metrics.incrMetric("logInfo", (int)(newInfo - infoCount));
|
||||
|
||||
fatalCount = newFatal;
|
||||
errorCount = newError;
|
||||
warnCount = newWarn;
|
||||
infoCount = newInfo;
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
package org.apache.hadoop.metrics.jvm;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
|
@ -1,159 +0,0 @@
|
|||
<html>
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<head>
|
||||
<title>org.apache.hadoop.metrics</title>
|
||||
</head>
|
||||
<body>
|
||||
This package defines an API for reporting performance metric information.
|
||||
<p/>
|
||||
The API is abstract so that it can be implemented on top of
|
||||
a variety of metrics client libraries. The choice of
|
||||
client library is a configuration option, and different
|
||||
modules within the same application can use
|
||||
different metrics implementation libraries.
|
||||
<p/>
|
||||
Sub-packages:
|
||||
<dl>
|
||||
<dt><code>org.apache.hadoop.metrics.spi</code></dt>
|
||||
<dd>The abstract Server Provider Interface package. Those wishing to
|
||||
integrate the metrics API with a particular metrics client library should
|
||||
extend this package.</dd>
|
||||
|
||||
<dt><code>org.apache.hadoop.metrics.file</code></dt>
|
||||
<dd>An implementation package which writes the metric data to
|
||||
a file, or sends it to the standard output stream.</dd>
|
||||
|
||||
<dt> <code>org.apache.hadoop.metrics.ganglia</code></dt>
|
||||
<dd>An implementation package which sends metric data to
|
||||
<a href="http://ganglia.sourceforge.net/">Ganglia</a>.</dd>
|
||||
</dl>
|
||||
|
||||
<h3>Introduction to the Metrics API</h3>
|
||||
|
||||
Here is a simple example of how to use this package to report a single
|
||||
metric value:
|
||||
<pre>
|
||||
private ContextFactory contextFactory = ContextFactory.getFactory();
|
||||
|
||||
void reportMyMetric(float myMetric) {
|
||||
MetricsContext myContext = contextFactory.getContext("myContext");
|
||||
MetricsRecord myRecord = myContext.getRecord("myRecord");
|
||||
myRecord.setMetric("myMetric", myMetric);
|
||||
myRecord.update();
|
||||
}
|
||||
</pre>
|
||||
|
||||
In this example there are three names:
|
||||
<dl>
|
||||
<dt><i>myContext</i></dt>
|
||||
<dd>The context name will typically identify either the application, or else a
|
||||
module within an application or library.</dd>
|
||||
|
||||
<dt><i>myRecord</i></dt>
|
||||
<dd>The record name generally identifies some entity for which a set of
|
||||
metrics are to be reported. For example, you could have a record named
|
||||
"cacheStats" for reporting a number of statistics relating to the usage of
|
||||
some cache in your application.</dd>
|
||||
|
||||
<dt><i>myMetric</i></dt>
|
||||
<dd>This identifies a particular metric. For example, you might have metrics
|
||||
named "cache_hits" and "cache_misses".
|
||||
</dd>
|
||||
</dl>
|
||||
|
||||
<h3>Tags</h3>
|
||||
|
||||
In some cases it is useful to have multiple records with the same name. For
|
||||
example, suppose that you want to report statistics about each disk on a computer.
|
||||
In this case, the record name would be something like "diskStats", but you also
|
||||
need to identify the disk which is done by adding a <i>tag</i> to the record.
|
||||
The code could look something like this:
|
||||
<pre>
|
||||
private MetricsRecord diskStats =
|
||||
contextFactory.getContext("myContext").getRecord("diskStats");
|
||||
|
||||
void reportDiskMetrics(String diskName, float diskBusy, float diskUsed) {
|
||||
diskStats.setTag("diskName", diskName);
|
||||
diskStats.setMetric("diskBusy", diskBusy);
|
||||
diskStats.setMetric("diskUsed", diskUsed);
|
||||
diskStats.update();
|
||||
}
|
||||
</pre>
|
||||
|
||||
<h3>Buffering and Callbacks</h3>
|
||||
|
||||
Data is not sent immediately to the metrics system when
|
||||
<code>MetricsRecord.update()</code> is called. Instead it is stored in an
|
||||
internal table, and the contents of the table are sent periodically.
|
||||
This can be important for two reasons:
|
||||
<ol>
|
||||
<li>It means that a programmer is free to put calls to this API in an
|
||||
inner loop, since updates can be very frequent without slowing down
|
||||
the application significantly.</li>
|
||||
<li>Some implementations can gain efficiency by combining many metrics
|
||||
into a single UDP message.</li>
|
||||
</ol>
|
||||
|
||||
The API provides a timer-based callback via the
|
||||
<code>registerUpdater()</code> method. The benefit of this
|
||||
versus using <code>java.util.Timer</code> is that the callbacks will be done
|
||||
immediately before sending the data, making the data as current as possible.
|
||||
|
||||
<h3>Configuration</h3>
|
||||
|
||||
It is possible to programmatically examine and modify configuration data
|
||||
before creating a context, like this:
|
||||
<pre>
|
||||
ContextFactory factory = ContextFactory.getFactory();
|
||||
... examine and/or modify factory attributes ...
|
||||
MetricsContext context = factory.getContext("myContext");
|
||||
</pre>
|
||||
The factory attributes can be examined and modified using the following
|
||||
<code>ContextFactory</code>methods:
|
||||
<ul>
|
||||
<li><code>Object getAttribute(String attributeName)</code></li>
|
||||
<li><code>String[] getAttributeNames()</code></li>
|
||||
<li><code>void setAttribute(String name, Object value)</code></li>
|
||||
<li><code>void removeAttribute(attributeName)</code></li>
|
||||
</ul>
|
||||
|
||||
<p/>
|
||||
<code>ContextFactory.getFactory()</code> initializes the factory attributes by
|
||||
reading the properties file <code>hadoop-metrics.properties</code> if it exists
|
||||
on the class path.
|
||||
|
||||
<p/>
|
||||
A factory attribute named:
|
||||
<pre>
|
||||
<i>contextName</i>.class
|
||||
</pre>
|
||||
should have as its value the fully qualified name of the class to be
|
||||
instantiated by a call of the <code>CodeFactory</code> method
|
||||
<code>getContext(<i>contextName</i>)</code>. If this factory attribute is not
|
||||
specified, the default is to instantiate
|
||||
<code>org.apache.hadoop.metrics.file.FileContext</code>.
|
||||
|
||||
<p/>
|
||||
Other factory attributes are specific to a particular implementation of this
|
||||
API and are documented elsewhere. For example, configuration attributes for
|
||||
the file and Ganglia implementations can be found in the javadoc for
|
||||
their respective packages.
|
||||
</body>
|
||||
</html>
|
|
@ -1,494 +0,0 @@
|
|||
/*
|
||||
* AbstractMetricsContext.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
import org.apache.hadoop.metrics.MetricsContext;
|
||||
import org.apache.hadoop.metrics.MetricsException;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.Updater;
|
||||
|
||||
/**
|
||||
* The main class of the Service Provider Interface. This class should be
|
||||
* extended in order to integrate the Metrics API with a specific metrics
|
||||
* client library. <p/>
|
||||
*
|
||||
* This class implements the internal table of metric data, and the timer
|
||||
* on which data is to be sent to the metrics system. Subclasses must
|
||||
* override the abstract <code>emitRecord</code> method in order to transmit
|
||||
* the data. <p/>
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public abstract class AbstractMetricsContext implements MetricsContext {
|
||||
|
||||
private int period = MetricsContext.DEFAULT_PERIOD;
|
||||
private Timer timer = null;
|
||||
|
||||
private Set<Updater> updaters = new HashSet<Updater>(1);
|
||||
private volatile boolean isMonitoring = false;
|
||||
|
||||
private ContextFactory factory = null;
|
||||
private String contextName = null;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public static class TagMap extends TreeMap<String,Object> {
|
||||
private static final long serialVersionUID = 3546309335061952993L;
|
||||
TagMap() {
|
||||
super();
|
||||
}
|
||||
TagMap(TagMap orig) {
|
||||
super(orig);
|
||||
}
|
||||
/**
|
||||
* Returns true if this tagmap contains every tag in other.
|
||||
*/
|
||||
public boolean containsAll(TagMap other) {
|
||||
for (Map.Entry<String,Object> entry : other.entrySet()) {
|
||||
Object value = get(entry.getKey());
|
||||
if (value == null || !value.equals(entry.getValue())) {
|
||||
// either key does not exist here, or the value is different
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public static class MetricMap extends TreeMap<String,Number> {
|
||||
private static final long serialVersionUID = -7495051861141631609L;
|
||||
MetricMap() {
|
||||
super();
|
||||
}
|
||||
MetricMap(MetricMap orig) {
|
||||
super(orig);
|
||||
}
|
||||
}
|
||||
|
||||
static class RecordMap extends HashMap<TagMap,MetricMap> {
|
||||
private static final long serialVersionUID = 259835619700264611L;
|
||||
}
|
||||
|
||||
private Map<String,RecordMap> bufferedData = new HashMap<String,RecordMap>();
|
||||
|
||||
|
||||
/**
|
||||
* Creates a new instance of AbstractMetricsContext
|
||||
*/
|
||||
protected AbstractMetricsContext() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the context.
|
||||
*/
|
||||
@Override
|
||||
public void init(String contextName, ContextFactory factory)
|
||||
{
|
||||
this.contextName = contextName;
|
||||
this.factory = factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method for subclasses to access factory attributes.
|
||||
*/
|
||||
protected String getAttribute(String attributeName) {
|
||||
String factoryAttribute = contextName + "." + attributeName;
|
||||
return (String) factory.getAttribute(factoryAttribute);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an attribute-value map derived from the factory attributes
|
||||
* by finding all factory attributes that begin with
|
||||
* <i>contextName</i>.<i>tableName</i>. The returned map consists of
|
||||
* those attributes with the contextName and tableName stripped off.
|
||||
*/
|
||||
protected Map<String,String> getAttributeTable(String tableName) {
|
||||
String prefix = contextName + "." + tableName + ".";
|
||||
Map<String,String> result = new HashMap<String,String>();
|
||||
for (String attributeName : factory.getAttributeNames()) {
|
||||
if (attributeName.startsWith(prefix)) {
|
||||
String name = attributeName.substring(prefix.length());
|
||||
String value = (String) factory.getAttribute(attributeName);
|
||||
result.put(name, value);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the context name.
|
||||
*/
|
||||
@Override
|
||||
public String getContextName() {
|
||||
return contextName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the factory by which this context was created.
|
||||
*/
|
||||
public ContextFactory getContextFactory() {
|
||||
return factory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts or restarts monitoring, the emitting of metrics records.
|
||||
*/
|
||||
@Override
|
||||
public synchronized void startMonitoring()
|
||||
throws IOException {
|
||||
if (!isMonitoring) {
|
||||
startTimer();
|
||||
isMonitoring = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops monitoring. This does not free buffered data.
|
||||
* @see #close()
|
||||
*/
|
||||
@Override
|
||||
public synchronized void stopMonitoring() {
|
||||
if (isMonitoring) {
|
||||
stopTimer();
|
||||
isMonitoring = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if monitoring is currently in progress.
|
||||
*/
|
||||
@Override
|
||||
public boolean isMonitoring() {
|
||||
return isMonitoring;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops monitoring and frees buffered data, returning this
|
||||
* object to its initial state.
|
||||
*/
|
||||
@Override
|
||||
public synchronized void close() {
|
||||
stopMonitoring();
|
||||
clearUpdaters();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new AbstractMetricsRecord instance with the given <code>recordName</code>.
|
||||
* Throws an exception if the metrics implementation is configured with a fixed
|
||||
* set of record names and <code>recordName</code> is not in that set.
|
||||
*
|
||||
* @param recordName the name of the record
|
||||
* @throws MetricsException if recordName conflicts with configuration data
|
||||
*/
|
||||
@Override
|
||||
public final synchronized MetricsRecord createRecord(String recordName) {
|
||||
if (bufferedData.get(recordName) == null) {
|
||||
bufferedData.put(recordName, new RecordMap());
|
||||
}
|
||||
return newRecord(recordName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses should override this if they subclass MetricsRecordImpl.
|
||||
* @param recordName the name of the record
|
||||
* @return newly created instance of MetricsRecordImpl or subclass
|
||||
*/
|
||||
protected MetricsRecord newRecord(String recordName) {
|
||||
return new MetricsRecordImpl(recordName, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a callback to be called at time intervals determined by
|
||||
* the configuration.
|
||||
*
|
||||
* @param updater object to be run periodically; it should update
|
||||
* some metrics records
|
||||
*/
|
||||
@Override
|
||||
public synchronized void registerUpdater(final Updater updater) {
|
||||
if (!updaters.contains(updater)) {
|
||||
updaters.add(updater);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a callback, if it exists.
|
||||
*
|
||||
* @param updater object to be removed from the callback list
|
||||
*/
|
||||
@Override
|
||||
public synchronized void unregisterUpdater(Updater updater) {
|
||||
updaters.remove(updater);
|
||||
}
|
||||
|
||||
private synchronized void clearUpdaters() {
|
||||
updaters.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts timer if it is not already started
|
||||
*/
|
||||
private synchronized void startTimer() {
|
||||
if (timer == null) {
|
||||
timer = new Timer("Timer thread for monitoring " + getContextName(),
|
||||
true);
|
||||
TimerTask task = new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
timerEvent();
|
||||
} catch (IOException ioe) {
|
||||
ioe.printStackTrace();
|
||||
}
|
||||
}
|
||||
};
|
||||
long millis = period * 1000;
|
||||
timer.scheduleAtFixedRate(task, millis, millis);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops timer if it is running
|
||||
*/
|
||||
private synchronized void stopTimer() {
|
||||
if (timer != null) {
|
||||
timer.cancel();
|
||||
timer = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Timer callback.
|
||||
*/
|
||||
private void timerEvent() throws IOException {
|
||||
if (isMonitoring) {
|
||||
Collection<Updater> myUpdaters;
|
||||
synchronized (this) {
|
||||
myUpdaters = new ArrayList<Updater>(updaters);
|
||||
}
|
||||
// Run all the registered updates without holding a lock
|
||||
// on this context
|
||||
for (Updater updater : myUpdaters) {
|
||||
try {
|
||||
updater.doUpdates(this);
|
||||
} catch (Throwable throwable) {
|
||||
throwable.printStackTrace();
|
||||
}
|
||||
}
|
||||
emitRecords();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Emits the records.
|
||||
*/
|
||||
private synchronized void emitRecords() throws IOException {
|
||||
for (Map.Entry<String,RecordMap> recordEntry : bufferedData.entrySet()) {
|
||||
RecordMap recordMap = recordEntry.getValue();
|
||||
synchronized (recordMap) {
|
||||
Set<Entry<TagMap, MetricMap>> entrySet = recordMap.entrySet ();
|
||||
for (Entry<TagMap, MetricMap> entry : entrySet) {
|
||||
OutputRecord outRec = new OutputRecord(entry.getKey(), entry.getValue());
|
||||
emitRecord(contextName, recordEntry.getKey(), outRec);
|
||||
}
|
||||
}
|
||||
}
|
||||
flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves all the records managed by this MetricsContext.
|
||||
* Useful for monitoring systems that are polling-based.
|
||||
* @return A non-null collection of all monitoring records.
|
||||
*/
|
||||
@Override
|
||||
public synchronized Map<String, Collection<OutputRecord>> getAllRecords() {
|
||||
Map<String, Collection<OutputRecord>> out = new TreeMap<String, Collection<OutputRecord>>();
|
||||
for (Map.Entry<String,RecordMap> recordEntry : bufferedData.entrySet()) {
|
||||
RecordMap recordMap = recordEntry.getValue();
|
||||
synchronized (recordMap) {
|
||||
List<OutputRecord> records = new ArrayList<OutputRecord>();
|
||||
Set<Entry<TagMap, MetricMap>> entrySet = recordMap.entrySet();
|
||||
for (Entry<TagMap, MetricMap> entry : entrySet) {
|
||||
OutputRecord outRec = new OutputRecord(entry.getKey(), entry.getValue());
|
||||
records.add(outRec);
|
||||
}
|
||||
out.put(recordEntry.getKey(), records);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a record to the metrics system.
|
||||
*/
|
||||
protected abstract void emitRecord(String contextName, String recordName,
|
||||
OutputRecord outRec) throws IOException;
|
||||
|
||||
/**
|
||||
* Called each period after all records have been emitted, this method does nothing.
|
||||
* Subclasses may override it in order to perform some kind of flush.
|
||||
*/
|
||||
protected void flush() throws IOException {
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by MetricsRecordImpl.update(). Creates or updates a row in
|
||||
* the internal table of metric data.
|
||||
*/
|
||||
protected void update(MetricsRecordImpl record) {
|
||||
String recordName = record.getRecordName();
|
||||
TagMap tagTable = record.getTagTable();
|
||||
Map<String,MetricValue> metricUpdates = record.getMetricTable();
|
||||
|
||||
RecordMap recordMap = getRecordMap(recordName);
|
||||
synchronized (recordMap) {
|
||||
MetricMap metricMap = recordMap.get(tagTable);
|
||||
if (metricMap == null) {
|
||||
metricMap = new MetricMap();
|
||||
TagMap tagMap = new TagMap(tagTable); // clone tags
|
||||
recordMap.put(tagMap, metricMap);
|
||||
}
|
||||
|
||||
Set<Entry<String, MetricValue>> entrySet = metricUpdates.entrySet();
|
||||
for (Entry<String, MetricValue> entry : entrySet) {
|
||||
String metricName = entry.getKey ();
|
||||
MetricValue updateValue = entry.getValue ();
|
||||
Number updateNumber = updateValue.getNumber();
|
||||
Number currentNumber = metricMap.get(metricName);
|
||||
if (currentNumber == null || updateValue.isAbsolute()) {
|
||||
metricMap.put(metricName, updateNumber);
|
||||
}
|
||||
else {
|
||||
Number newNumber = sum(updateNumber, currentNumber);
|
||||
metricMap.put(metricName, newNumber);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized RecordMap getRecordMap(String recordName) {
|
||||
return bufferedData.get(recordName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds two numbers, coercing the second to the type of the first.
|
||||
*
|
||||
*/
|
||||
private Number sum(Number a, Number b) {
|
||||
if (a instanceof Integer) {
|
||||
return Integer.valueOf(a.intValue() + b.intValue());
|
||||
}
|
||||
else if (a instanceof Float) {
|
||||
return new Float(a.floatValue() + b.floatValue());
|
||||
}
|
||||
else if (a instanceof Short) {
|
||||
return Short.valueOf((short)(a.shortValue() + b.shortValue()));
|
||||
}
|
||||
else if (a instanceof Byte) {
|
||||
return Byte.valueOf((byte)(a.byteValue() + b.byteValue()));
|
||||
}
|
||||
else if (a instanceof Long) {
|
||||
return Long.valueOf((a.longValue() + b.longValue()));
|
||||
}
|
||||
else {
|
||||
// should never happen
|
||||
throw new MetricsException("Invalid number type");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by MetricsRecordImpl.remove(). Removes all matching rows in
|
||||
* the internal table of metric data. A row matches if it has the same
|
||||
* tag names and values as record, but it may also have additional
|
||||
* tags.
|
||||
*/
|
||||
protected void remove(MetricsRecordImpl record) {
|
||||
String recordName = record.getRecordName();
|
||||
TagMap tagTable = record.getTagTable();
|
||||
|
||||
RecordMap recordMap = getRecordMap(recordName);
|
||||
synchronized (recordMap) {
|
||||
Iterator<TagMap> it = recordMap.keySet().iterator();
|
||||
while (it.hasNext()) {
|
||||
TagMap rowTags = it.next();
|
||||
if (rowTags.containsAll(tagTable)) {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the timer period.
|
||||
*/
|
||||
@Override
|
||||
public int getPeriod() {
|
||||
return period;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the timer period
|
||||
*/
|
||||
protected void setPeriod(int period) {
|
||||
this.period = period;
|
||||
}
|
||||
|
||||
/**
|
||||
* If a period is set in the attribute passed in, override
|
||||
* the default with it.
|
||||
*/
|
||||
protected void parseAndSetPeriod(String attributeName) {
|
||||
String periodStr = getAttribute(attributeName);
|
||||
if (periodStr != null) {
|
||||
int period = 0;
|
||||
try {
|
||||
period = Integer.parseInt(periodStr);
|
||||
} catch (NumberFormatException nfe) {
|
||||
}
|
||||
if (period <= 0) {
|
||||
throw new MetricsException("Invalid period: " + periodStr);
|
||||
}
|
||||
setPeriod(period);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,206 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.InvocationHandler;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
import org.apache.hadoop.metrics.MetricsContext;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
import org.apache.hadoop.metrics.Updater;
|
||||
|
||||
/**
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class CompositeContext extends AbstractMetricsContext {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(CompositeContext.class);
|
||||
private static final String ARITY_LABEL = "arity";
|
||||
private static final String SUB_FMT = "%s.sub%d";
|
||||
private final ArrayList<MetricsContext> subctxt =
|
||||
new ArrayList<MetricsContext>();
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public CompositeContext() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
public void init(String contextName, ContextFactory factory) {
|
||||
super.init(contextName, factory);
|
||||
int nKids;
|
||||
try {
|
||||
String sKids = getAttribute(ARITY_LABEL);
|
||||
nKids = Integer.parseInt(sKids);
|
||||
} catch (Exception e) {
|
||||
LOG.error("Unable to initialize composite metric " + contextName +
|
||||
": could not init arity", e);
|
||||
return;
|
||||
}
|
||||
for (int i = 0; i < nKids; ++i) {
|
||||
MetricsContext ctxt = MetricsUtil.getContext(
|
||||
String.format(SUB_FMT, contextName, i), contextName);
|
||||
if (null != ctxt) {
|
||||
subctxt.add(ctxt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public MetricsRecord newRecord(String recordName) {
|
||||
return (MetricsRecord) Proxy.newProxyInstance(
|
||||
MetricsRecord.class.getClassLoader(),
|
||||
new Class[] { MetricsRecord.class },
|
||||
new MetricsRecordDelegator(recordName, subctxt));
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
protected void emitRecord(String contextName, String recordName,
|
||||
OutputRecord outRec) throws IOException {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
try {
|
||||
((AbstractMetricsContext)ctxt).emitRecord(
|
||||
contextName, recordName, outRec);
|
||||
if (contextName == null || recordName == null || outRec == null) {
|
||||
throw new IOException(contextName + ":" + recordName + ":" + outRec);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
LOG.warn("emitRecord failed: " + ctxt.getContextName(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
protected void flush() throws IOException {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
try {
|
||||
((AbstractMetricsContext)ctxt).flush();
|
||||
} catch (IOException e) {
|
||||
LOG.warn("flush failed: " + ctxt.getContextName(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public void startMonitoring() throws IOException {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
try {
|
||||
ctxt.startMonitoring();
|
||||
} catch (IOException e) {
|
||||
LOG.warn("startMonitoring failed: " + ctxt.getContextName(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public void stopMonitoring() {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
ctxt.stopMonitoring();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if all subcontexts are monitoring.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public boolean isMonitoring() {
|
||||
boolean ret = true;
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
ret &= ctxt.isMonitoring();
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public void close() {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
ctxt.close();
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public void registerUpdater(Updater updater) {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
ctxt.registerUpdater(updater);
|
||||
}
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@Override
|
||||
public void unregisterUpdater(Updater updater) {
|
||||
for (MetricsContext ctxt : subctxt) {
|
||||
ctxt.unregisterUpdater(updater);
|
||||
}
|
||||
}
|
||||
|
||||
private static class MetricsRecordDelegator implements InvocationHandler {
|
||||
private static final Method m_getRecordName = initMethod();
|
||||
private static Method initMethod() {
|
||||
try {
|
||||
return MetricsRecord.class.getMethod("getRecordName", new Class[0]);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Internal error", e);
|
||||
}
|
||||
}
|
||||
|
||||
private final String recordName;
|
||||
private final ArrayList<MetricsRecord> subrecs;
|
||||
|
||||
MetricsRecordDelegator(String recordName, ArrayList<MetricsContext> ctxts) {
|
||||
this.recordName = recordName;
|
||||
this.subrecs = new ArrayList<MetricsRecord>(ctxts.size());
|
||||
for (MetricsContext ctxt : ctxts) {
|
||||
subrecs.add(ctxt.createRecord(recordName));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(Object p, Method m, Object[] args) throws Throwable {
|
||||
if (m_getRecordName.equals(m)) {
|
||||
return recordName;
|
||||
}
|
||||
assert Void.TYPE.equals(m.getReturnType());
|
||||
for (MetricsRecord rec : subrecs) {
|
||||
m.invoke(rec, args);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* MetricValue.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* A Number that is either an absolute or an incremental amount.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class MetricValue {
|
||||
|
||||
public static final boolean ABSOLUTE = false;
|
||||
public static final boolean INCREMENT = true;
|
||||
|
||||
private boolean isIncrement;
|
||||
private Number number;
|
||||
|
||||
/** Creates a new instance of MetricValue */
|
||||
public MetricValue(Number number, boolean isIncrement) {
|
||||
this.number = number;
|
||||
this.isIncrement = isIncrement;
|
||||
}
|
||||
|
||||
public boolean isIncrement() {
|
||||
return isIncrement;
|
||||
}
|
||||
|
||||
public boolean isAbsolute() {
|
||||
return !isIncrement;
|
||||
}
|
||||
|
||||
public Number getNumber() {
|
||||
return number;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,304 +0,0 @@
|
|||
/*
|
||||
* MetricsRecordImpl.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.MetricsException;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
|
||||
|
||||
/**
|
||||
* An implementation of MetricsRecord. Keeps a back-pointer to the context
|
||||
* from which it was created, and delegates back to it on <code>update</code>
|
||||
* and <code>remove()</code>.
|
||||
*
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.impl.MetricsRecordImpl}
|
||||
* instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class MetricsRecordImpl implements MetricsRecord {
|
||||
|
||||
private TagMap tagTable = new TagMap();
|
||||
private Map<String,MetricValue> metricTable = new LinkedHashMap<String,MetricValue>();
|
||||
|
||||
private String recordName;
|
||||
private AbstractMetricsContext context;
|
||||
|
||||
|
||||
/** Creates a new instance of FileRecord */
|
||||
protected MetricsRecordImpl(String recordName, AbstractMetricsContext context)
|
||||
{
|
||||
this.recordName = recordName;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the record name.
|
||||
*
|
||||
* @return the record name
|
||||
*/
|
||||
@Override
|
||||
public String getRecordName() {
|
||||
return recordName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setTag(String tagName, String tagValue) {
|
||||
if (tagValue == null) {
|
||||
tagValue = "";
|
||||
}
|
||||
tagTable.put(tagName, tagValue);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setTag(String tagName, int tagValue) {
|
||||
tagTable.put(tagName, Integer.valueOf(tagValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setTag(String tagName, long tagValue) {
|
||||
tagTable.put(tagName, Long.valueOf(tagValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setTag(String tagName, short tagValue) {
|
||||
tagTable.put(tagName, Short.valueOf(tagValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named tag to the specified value.
|
||||
*
|
||||
* @param tagName name of the tag
|
||||
* @param tagValue new value of the tag
|
||||
* @throws MetricsException if the tagName conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setTag(String tagName, byte tagValue) {
|
||||
tagTable.put(tagName, Byte.valueOf(tagValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes any tag of the specified name.
|
||||
*/
|
||||
@Override
|
||||
public void removeTag(String tagName) {
|
||||
tagTable.remove(tagName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setMetric(String metricName, int metricValue) {
|
||||
setAbsolute(metricName, Integer.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setMetric(String metricName, long metricValue) {
|
||||
setAbsolute(metricName, Long.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setMetric(String metricName, short metricValue) {
|
||||
setAbsolute(metricName, Short.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setMetric(String metricName, byte metricValue) {
|
||||
setAbsolute(metricName, Byte.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the named metric to the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue new value of the metric
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void setMetric(String metricName, float metricValue) {
|
||||
setAbsolute(metricName, new Float(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void incrMetric(String metricName, int metricValue) {
|
||||
setIncrement(metricName, Integer.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void incrMetric(String metricName, long metricValue) {
|
||||
setIncrement(metricName, Long.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void incrMetric(String metricName, short metricValue) {
|
||||
setIncrement(metricName, Short.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void incrMetric(String metricName, byte metricValue) {
|
||||
setIncrement(metricName, Byte.valueOf(metricValue));
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the named metric by the specified value.
|
||||
*
|
||||
* @param metricName name of the metric
|
||||
* @param metricValue incremental value
|
||||
* @throws MetricsException if the metricName or the type of the metricValue
|
||||
* conflicts with the configuration
|
||||
*/
|
||||
@Override
|
||||
public void incrMetric(String metricName, float metricValue) {
|
||||
setIncrement(metricName, new Float(metricValue));
|
||||
}
|
||||
|
||||
private void setAbsolute(String metricName, Number metricValue) {
|
||||
metricTable.put(metricName, new MetricValue(metricValue, MetricValue.ABSOLUTE));
|
||||
}
|
||||
|
||||
private void setIncrement(String metricName, Number metricValue) {
|
||||
metricTable.put(metricName, new MetricValue(metricValue, MetricValue.INCREMENT));
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the table of buffered data which is to be sent periodically.
|
||||
* If the tag values match an existing row, that row is updated;
|
||||
* otherwise, a new row is added.
|
||||
*/
|
||||
@Override
|
||||
public void update() {
|
||||
context.update(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the row, if it exists, in the buffered data table having tags
|
||||
* that equal the tags that have been set on this record.
|
||||
*/
|
||||
@Override
|
||||
public void remove() {
|
||||
context.remove(this);
|
||||
}
|
||||
|
||||
TagMap getTagTable() {
|
||||
return tagTable;
|
||||
}
|
||||
|
||||
Map<String, MetricValue> getMetricTable() {
|
||||
return metricTable;
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
import org.apache.hadoop.metrics.MetricsServlet;
|
||||
|
||||
/**
|
||||
* A MetricsContext that does not emit data, but, unlike NullContextWithUpdate,
|
||||
* does save it for retrieval with getAllRecords().
|
||||
*
|
||||
* This is useful if you want to support {@link MetricsServlet}, but
|
||||
* not emit metrics in any other way.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class NoEmitMetricsContext extends AbstractMetricsContext {
|
||||
|
||||
private static final String PERIOD_PROPERTY = "period";
|
||||
|
||||
/** Creates a new instance of NullContextWithUpdateThread */
|
||||
@InterfaceAudience.Private
|
||||
public NoEmitMetricsContext() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
public void init(String contextName, ContextFactory factory) {
|
||||
super.init(contextName, factory);
|
||||
parseAndSetPeriod(PERIOD_PROPERTY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do-nothing version of emitRecord
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void emitRecord(String contextName, String recordName,
|
||||
OutputRecord outRec) {
|
||||
}
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
/*
|
||||
* NullContext.java
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* Null metrics context: a metrics context which does nothing. Used as the
|
||||
* default context, so that no performance data is emitted if no configuration
|
||||
* data is found.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class NullContext extends AbstractMetricsContext {
|
||||
|
||||
/** Creates a new instance of NullContext */
|
||||
@InterfaceAudience.Private
|
||||
public NullContext() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Do-nothing version of startMonitoring
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
public void startMonitoring() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Do-nothing version of emitRecord
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void emitRecord(String contextName, String recordName,
|
||||
OutputRecord outRec)
|
||||
{}
|
||||
|
||||
/**
|
||||
* Do-nothing version of update
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void update(MetricsRecordImpl record) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Do-nothing version of remove
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void remove(MetricsRecordImpl record) {
|
||||
}
|
||||
}
|
|
@ -1,82 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
|
||||
/**
|
||||
* A null context which has a thread calling
|
||||
* periodically when monitoring is started. This keeps the data sampled
|
||||
* correctly.
|
||||
* In all other respects, this is like the NULL context: No data is emitted.
|
||||
* This is suitable for Monitoring systems like JMX which reads the metrics
|
||||
* when someone reads the data from JMX.
|
||||
*
|
||||
* The default impl of start and stop monitoring:
|
||||
* is the AbstractMetricsContext is good enough.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class NullContextWithUpdateThread extends AbstractMetricsContext {
|
||||
|
||||
private static final String PERIOD_PROPERTY = "period";
|
||||
|
||||
/** Creates a new instance of NullContextWithUpdateThread */
|
||||
@InterfaceAudience.Private
|
||||
public NullContextWithUpdateThread() {
|
||||
}
|
||||
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
public void init(String contextName, ContextFactory factory) {
|
||||
super.init(contextName, factory);
|
||||
parseAndSetPeriod(PERIOD_PROPERTY);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Do-nothing version of emitRecord
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void emitRecord(String contextName, String recordName,
|
||||
OutputRecord outRec)
|
||||
{}
|
||||
|
||||
/**
|
||||
* Do-nothing version of update
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void update(MetricsRecordImpl record) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Do-nothing version of remove
|
||||
*/
|
||||
@Override
|
||||
@InterfaceAudience.Private
|
||||
protected void remove(MetricsRecordImpl record) {
|
||||
}
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
/*
|
||||
* OutputRecord.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
|
||||
|
||||
/**
|
||||
* Represents a record of metric data to be sent to a metrics system.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class OutputRecord {
|
||||
|
||||
private TagMap tagMap;
|
||||
private MetricMap metricMap;
|
||||
|
||||
/** Creates a new instance of OutputRecord */
|
||||
OutputRecord(TagMap tagMap, MetricMap metricMap) {
|
||||
this.tagMap = tagMap;
|
||||
this.metricMap = metricMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of tag names
|
||||
*/
|
||||
public Set<String> getTagNames() {
|
||||
return Collections.unmodifiableSet(tagMap.keySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a tag object which is can be a String, Integer, Short or Byte.
|
||||
*
|
||||
* @return the tag value, or null if there is no such tag
|
||||
*/
|
||||
public Object getTag(String name) {
|
||||
return tagMap.get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of metric names.
|
||||
*/
|
||||
public Set<String> getMetricNames() {
|
||||
return Collections.unmodifiableSet(metricMap.keySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the metric object which can be a Float, Integer, Short or Byte.
|
||||
*/
|
||||
public Number getMetric(String name) {
|
||||
return metricMap.get(name);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns a copy of this record's tags.
|
||||
*/
|
||||
public TagMap getTagsCopy() {
|
||||
return new TagMap(tagMap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a copy of this record's metrics.
|
||||
*/
|
||||
public MetricMap getMetricsCopy() {
|
||||
return new MetricMap(metricMap);
|
||||
}
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* Util.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
|
||||
/**
|
||||
* Static utility methods
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class Util {
|
||||
|
||||
/**
|
||||
* This class is not intended to be instantiated
|
||||
*/
|
||||
private Util() {}
|
||||
|
||||
/**
|
||||
* Parses a space and/or comma separated sequence of server specifications
|
||||
* of the form <i>hostname</i> or <i>hostname:port</i>. If
|
||||
* the specs string is null, defaults to localhost:defaultPort.
|
||||
*
|
||||
* @return a list of InetSocketAddress objects.
|
||||
*/
|
||||
public static List<InetSocketAddress> parse(String specs, int defaultPort) {
|
||||
List<InetSocketAddress> result = new ArrayList<InetSocketAddress>(1);
|
||||
if (specs == null) {
|
||||
result.add(new InetSocketAddress("localhost", defaultPort));
|
||||
}
|
||||
else {
|
||||
String[] specStrings = specs.split("[ ,]+");
|
||||
for (String specString : specStrings) {
|
||||
result.add(NetUtils.createSocketAddr(specString, defaultPort));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
|
||||
<html>
|
||||
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<head>
|
||||
<title>org.apache.hadoop.metrics.spi</title>
|
||||
</head>
|
||||
<body>
|
||||
The Service Provider Interface for the Metrics API. This package provides
|
||||
an interface allowing a variety of metrics reporting implementations to be
|
||||
plugged in to the Metrics API. Examples of such implementations can be found
|
||||
in the packages <code>org.apache.hadoop.metrics.file</code> and
|
||||
<code>org.apache.hadoop.metrics.ganglia</code>.<p/>
|
||||
|
||||
Plugging in an implementation involves writing a concrete subclass of
|
||||
<code>AbstractMetricsContext</code>. The subclass should get its
|
||||
configuration information using the <code>getAttribute(<i>attributeName</i>)</code>
|
||||
method.
|
||||
</body>
|
||||
</html>
|
|
@ -1,92 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
|
||||
import javax.management.InstanceNotFoundException;
|
||||
import javax.management.MBeanServer;
|
||||
import javax.management.MalformedObjectNameException;
|
||||
import javax.management.ObjectName;
|
||||
import javax.management.InstanceAlreadyExistsException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
||||
|
||||
/**
|
||||
* This util class provides a method to register an MBean using
|
||||
* our standard naming convention as described in the doc
|
||||
* for {link {@link #registerMBean(String, String, Object)}
|
||||
*
|
||||
* @deprecated Use {@link org.apache.hadoop.metrics2.util.MBeans} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MBeanUtil {
|
||||
|
||||
/**
|
||||
* Register the MBean using our standard MBeanName format
|
||||
* "hadoop:service=<serviceName>,name=<nameName>"
|
||||
* Where the <serviceName> and <nameName> are the supplied parameters
|
||||
*
|
||||
* @param serviceName
|
||||
* @param nameName
|
||||
* @param theMbean - the MBean to register
|
||||
* @return the named used to register the MBean
|
||||
*/
|
||||
static public ObjectName registerMBean(final String serviceName,
|
||||
final String nameName,
|
||||
final Object theMbean) {
|
||||
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
|
||||
ObjectName name = getMBeanName(serviceName, nameName);
|
||||
try {
|
||||
mbs.registerMBean(theMbean, name);
|
||||
return name;
|
||||
} catch (InstanceAlreadyExistsException ie) {
|
||||
// Ignore if instance already exists
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static public void unregisterMBean(ObjectName mbeanName) {
|
||||
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
|
||||
if (mbeanName == null)
|
||||
return;
|
||||
try {
|
||||
mbs.unregisterMBean(mbeanName);
|
||||
} catch (InstanceNotFoundException e ) {
|
||||
// ignore
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
static private ObjectName getMBeanName(final String serviceName,
|
||||
final String nameName) {
|
||||
ObjectName name = null;
|
||||
try {
|
||||
name = new ObjectName("hadoop:" +
|
||||
"service=" + serviceName + ",name=" + nameName);
|
||||
} catch (MalformedObjectNameException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
|
||||
/**
|
||||
*
|
||||
* This is base class for all metrics
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.Private
|
||||
public abstract class MetricsBase {
|
||||
public static final String NO_DESCRIPTION = "NoDescription";
|
||||
final private String name;
|
||||
final private String description;
|
||||
|
||||
protected MetricsBase(final String nam) {
|
||||
name = nam;
|
||||
description = NO_DESCRIPTION;
|
||||
}
|
||||
|
||||
protected MetricsBase(final String nam, final String desc) {
|
||||
name = nam;
|
||||
description = desc;
|
||||
}
|
||||
|
||||
public abstract void pushMetric(final MetricsRecord mr);
|
||||
|
||||
public String getName() { return name; }
|
||||
public String getDescription() { return description; };
|
||||
|
||||
}
|
|
@ -1,229 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import javax.management.Attribute;
|
||||
import javax.management.AttributeList;
|
||||
import javax.management.AttributeNotFoundException;
|
||||
import javax.management.DynamicMBean;
|
||||
import javax.management.InvalidAttributeValueException;
|
||||
import javax.management.MBeanAttributeInfo;
|
||||
import javax.management.MBeanException;
|
||||
import javax.management.MBeanInfo;
|
||||
import javax.management.MBeanOperationInfo;
|
||||
import javax.management.ReflectionException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* This abstract base class facilitates creating dynamic mbeans automatically from
|
||||
* metrics.
|
||||
* The metrics constructors registers metrics in a registry.
|
||||
* Different categories of metrics should be in differnt classes with their own
|
||||
* registry (as in NameNodeMetrics and DataNodeMetrics).
|
||||
* Then the MBean can be created passing the registry to the constructor.
|
||||
* The MBean should be then registered using a mbean name (example):
|
||||
* MetricsHolder myMetrics = new MetricsHolder(); // has metrics and registry
|
||||
* MetricsTestMBean theMBean = new MetricsTestMBean(myMetrics.mregistry);
|
||||
* ObjectName mbeanName = MBeanUtil.registerMBean("ServiceFoo",
|
||||
* "TestStatistics", theMBean);
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public abstract class MetricsDynamicMBeanBase implements DynamicMBean {
|
||||
private final static String AVG_TIME = "AvgTime";
|
||||
private final static String MIN_TIME = "MinTime";
|
||||
private final static String MAX_TIME = "MaxTime";
|
||||
private final static String NUM_OPS = "NumOps";
|
||||
private final static String RESET_ALL_MIN_MAX_OP = "resetAllMinMax";
|
||||
private MetricsRegistry metricsRegistry;
|
||||
private MBeanInfo mbeanInfo;
|
||||
private Map<String, MetricsBase> metricsRateAttributeMod;
|
||||
private int numEntriesInRegistry = 0;
|
||||
private String mbeanDescription;
|
||||
|
||||
protected MetricsDynamicMBeanBase(final MetricsRegistry mr, final String aMBeanDescription) {
|
||||
metricsRegistry = mr;
|
||||
mbeanDescription = aMBeanDescription;
|
||||
metricsRateAttributeMod = new ConcurrentHashMap<String, MetricsBase>();
|
||||
createMBeanInfo();
|
||||
}
|
||||
|
||||
private void updateMbeanInfoIfMetricsListChanged() {
|
||||
if (numEntriesInRegistry != metricsRegistry.size())
|
||||
createMBeanInfo();
|
||||
}
|
||||
|
||||
private void createMBeanInfo() {
|
||||
boolean needsMinMaxResetOperation = false;
|
||||
List<MBeanAttributeInfo> attributesInfo = new ArrayList<MBeanAttributeInfo>();
|
||||
MBeanOperationInfo[] operationsInfo = null;
|
||||
numEntriesInRegistry = metricsRegistry.size();
|
||||
|
||||
for (MetricsBase o : metricsRegistry.getMetricsList()) {
|
||||
|
||||
if (MetricsTimeVaryingRate.class.isInstance(o)) {
|
||||
// For each of the metrics there are 3 different attributes
|
||||
attributesInfo.add(new MBeanAttributeInfo(o.getName() + NUM_OPS, "java.lang.Integer",
|
||||
o.getDescription(), true, false, false));
|
||||
attributesInfo.add(new MBeanAttributeInfo(o.getName() + AVG_TIME, "java.lang.Long",
|
||||
o.getDescription(), true, false, false));
|
||||
attributesInfo.add(new MBeanAttributeInfo(o.getName() + MIN_TIME, "java.lang.Long",
|
||||
o.getDescription(), true, false, false));
|
||||
attributesInfo.add(new MBeanAttributeInfo(o.getName() + MAX_TIME, "java.lang.Long",
|
||||
o.getDescription(), true, false, false));
|
||||
needsMinMaxResetOperation = true; // the min and max can be reset.
|
||||
|
||||
// Note the special attributes (AVG_TIME, MIN_TIME, ..) are derived from metrics
|
||||
// Rather than check for the suffix we store them in a map.
|
||||
metricsRateAttributeMod.put(o.getName() + NUM_OPS, o);
|
||||
metricsRateAttributeMod.put(o.getName() + AVG_TIME, o);
|
||||
metricsRateAttributeMod.put(o.getName() + MIN_TIME, o);
|
||||
metricsRateAttributeMod.put(o.getName() + MAX_TIME, o);
|
||||
|
||||
} else if ( MetricsIntValue.class.isInstance(o) || MetricsTimeVaryingInt.class.isInstance(o) ) {
|
||||
attributesInfo.add(new MBeanAttributeInfo(o.getName(), "java.lang.Integer",
|
||||
o.getDescription(), true, false, false));
|
||||
} else if ( MetricsLongValue.class.isInstance(o) || MetricsTimeVaryingLong.class.isInstance(o) ) {
|
||||
attributesInfo.add(new MBeanAttributeInfo(o.getName(), "java.lang.Long",
|
||||
o.getDescription(), true, false, false));
|
||||
} else {
|
||||
MetricsUtil.LOG.error("unknown metrics type: " + o.getClass().getName());
|
||||
}
|
||||
|
||||
if (needsMinMaxResetOperation) {
|
||||
operationsInfo = new MBeanOperationInfo[] {
|
||||
new MBeanOperationInfo(RESET_ALL_MIN_MAX_OP, "Reset (zero) All Min Max",
|
||||
null, "void", MBeanOperationInfo.ACTION) };
|
||||
}
|
||||
}
|
||||
MBeanAttributeInfo[] attrArray = new MBeanAttributeInfo[attributesInfo.size()];
|
||||
mbeanInfo = new MBeanInfo(this.getClass().getName(), mbeanDescription,
|
||||
attributesInfo.toArray(attrArray), null, operationsInfo, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getAttribute(String attributeName) throws AttributeNotFoundException,
|
||||
MBeanException, ReflectionException {
|
||||
if (attributeName == null || attributeName.isEmpty())
|
||||
throw new IllegalArgumentException();
|
||||
|
||||
updateMbeanInfoIfMetricsListChanged();
|
||||
|
||||
Object o = metricsRateAttributeMod.get(attributeName);
|
||||
if (o == null) {
|
||||
o = metricsRegistry.get(attributeName);
|
||||
}
|
||||
if (o == null)
|
||||
throw new AttributeNotFoundException();
|
||||
|
||||
if (o instanceof MetricsIntValue)
|
||||
return ((MetricsIntValue) o).get();
|
||||
else if (o instanceof MetricsLongValue)
|
||||
return ((MetricsLongValue) o).get();
|
||||
else if (o instanceof MetricsTimeVaryingInt)
|
||||
return ((MetricsTimeVaryingInt) o).getPreviousIntervalValue();
|
||||
else if (o instanceof MetricsTimeVaryingLong)
|
||||
return ((MetricsTimeVaryingLong) o).getPreviousIntervalValue();
|
||||
else if (o instanceof MetricsTimeVaryingRate) {
|
||||
MetricsTimeVaryingRate or = (MetricsTimeVaryingRate) o;
|
||||
if (attributeName.endsWith(NUM_OPS))
|
||||
return or.getPreviousIntervalNumOps();
|
||||
else if (attributeName.endsWith(AVG_TIME))
|
||||
return or.getPreviousIntervalAverageTime();
|
||||
else if (attributeName.endsWith(MIN_TIME))
|
||||
return or.getMinTime();
|
||||
else if (attributeName.endsWith(MAX_TIME))
|
||||
return or.getMaxTime();
|
||||
else {
|
||||
MetricsUtil.LOG.error("Unexpected attribute suffix");
|
||||
throw new AttributeNotFoundException();
|
||||
}
|
||||
} else {
|
||||
MetricsUtil.LOG.error("unknown metrics type: " + o.getClass().getName());
|
||||
throw new AttributeNotFoundException();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public AttributeList getAttributes(String[] attributeNames) {
|
||||
if (attributeNames == null || attributeNames.length == 0)
|
||||
throw new IllegalArgumentException();
|
||||
|
||||
updateMbeanInfoIfMetricsListChanged();
|
||||
|
||||
AttributeList result = new AttributeList(attributeNames.length);
|
||||
for (String iAttributeName : attributeNames) {
|
||||
try {
|
||||
Object value = getAttribute(iAttributeName);
|
||||
result.add(new Attribute(iAttributeName, value));
|
||||
} catch (Exception e) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MBeanInfo getMBeanInfo() {
|
||||
return mbeanInfo;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object invoke(String actionName, Object[] parms, String[] signature)
|
||||
throws MBeanException, ReflectionException {
|
||||
|
||||
if (actionName == null || actionName.isEmpty())
|
||||
throw new IllegalArgumentException();
|
||||
|
||||
|
||||
// Right now we support only one fixed operation (if it applies)
|
||||
if (!(actionName.equals(RESET_ALL_MIN_MAX_OP)) ||
|
||||
mbeanInfo.getOperations().length != 1) {
|
||||
throw new ReflectionException(new NoSuchMethodException(actionName));
|
||||
}
|
||||
for (MetricsBase m : metricsRegistry.getMetricsList()) {
|
||||
if ( MetricsTimeVaryingRate.class.isInstance(m) ) {
|
||||
MetricsTimeVaryingRate.class.cast(m).resetMinMax();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttribute(Attribute attribute)
|
||||
throws AttributeNotFoundException, InvalidAttributeValueException,
|
||||
MBeanException, ReflectionException {
|
||||
throw new ReflectionException(new NoSuchMethodException("set" + attribute));
|
||||
}
|
||||
|
||||
@Override
|
||||
public AttributeList setAttributes(AttributeList attributes) {
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -1,106 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* The MetricsIntValue class is for a metric that is not time varied
|
||||
* but changes only when it is set.
|
||||
* Each time its value is set, it is published only *once* at the next update
|
||||
* call.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MetricsIntValue extends MetricsBase {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog("org.apache.hadoop.metrics.util");
|
||||
|
||||
private int value;
|
||||
private boolean changed;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
*/
|
||||
public MetricsIntValue(final String nam, final MetricsRegistry registry, final String description) {
|
||||
super(nam, description);
|
||||
value = 0;
|
||||
changed = false;
|
||||
registry.add(nam, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
* A description of {@link #NO_DESCRIPTION} is used
|
||||
*/
|
||||
public MetricsIntValue(final String nam, MetricsRegistry registry) {
|
||||
this(nam, registry, NO_DESCRIPTION);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Set the value
|
||||
* @param newValue
|
||||
*/
|
||||
public synchronized void set(final int newValue) {
|
||||
value = newValue;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get value
|
||||
* @return the value last set
|
||||
*/
|
||||
public synchronized int get() {
|
||||
return value;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Push the metric to the mr.
|
||||
* The metric is pushed only if it was updated since last push
|
||||
*
|
||||
* Note this does NOT push to JMX
|
||||
* (JMX gets the info via {@link #get()}
|
||||
*
|
||||
* @param mr
|
||||
*/
|
||||
@Override
|
||||
public synchronized void pushMetric(final MetricsRecord mr) {
|
||||
if (changed) {
|
||||
try {
|
||||
mr.setMetric(getName(), value);
|
||||
} catch (Exception e) {
|
||||
LOG.info("pushMetric failed for " + getName() + "\n", e);
|
||||
}
|
||||
}
|
||||
changed = false;
|
||||
}
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
|
||||
|
||||
/**
|
||||
* The MetricsLongValue class is for a metric that is not time varied
|
||||
* but changes only when it is set.
|
||||
* Each time its value is set, it is published only *once* at the next update
|
||||
* call.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MetricsLongValue extends MetricsBase{
|
||||
private long value;
|
||||
private boolean changed;
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
*/
|
||||
public MetricsLongValue(final String nam, final MetricsRegistry registry, final String description) {
|
||||
super(nam, description);
|
||||
value = 0;
|
||||
changed = false;
|
||||
registry.add(nam, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
* A description of {@link #NO_DESCRIPTION} is used
|
||||
*/
|
||||
public MetricsLongValue(final String nam, MetricsRegistry registry) {
|
||||
this(nam, registry, NO_DESCRIPTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the value
|
||||
* @param newValue
|
||||
*/
|
||||
public synchronized void set(final long newValue) {
|
||||
value = newValue;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get value
|
||||
* @return the value last set
|
||||
*/
|
||||
public synchronized long get() {
|
||||
return value;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Push the metric to the mr.
|
||||
* The metric is pushed only if it was updated since last push
|
||||
*
|
||||
* Note this does NOT push to JMX
|
||||
* (JMX gets the info via {@link #get()}
|
||||
*
|
||||
* @param mr
|
||||
*/
|
||||
@Override
|
||||
public synchronized void pushMetric(final MetricsRecord mr) {
|
||||
if (changed)
|
||||
mr.setMetric(getName(), value);
|
||||
changed = false;
|
||||
}
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
||||
/**
|
||||
*
|
||||
* This is the registry for metrics.
|
||||
* Related set of metrics should be declared in a holding class and registered
|
||||
* in a registry for those metrics which is also stored in the the holding class.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MetricsRegistry {
|
||||
private ConcurrentHashMap<String, MetricsBase> metricsList =
|
||||
new ConcurrentHashMap<String, MetricsBase>();
|
||||
|
||||
public MetricsRegistry() {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return number of metrics in the registry
|
||||
*/
|
||||
public int size() {
|
||||
return metricsList.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new metrics to the registry
|
||||
* @param metricsName - the name
|
||||
* @param theMetricsObj - the metrics
|
||||
* @throws IllegalArgumentException if a name is already registered
|
||||
*/
|
||||
public void add(final String metricsName, final MetricsBase theMetricsObj) {
|
||||
if (metricsList.putIfAbsent(metricsName, theMetricsObj) != null) {
|
||||
throw new IllegalArgumentException("Duplicate metricsName:" +
|
||||
metricsName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @param metricsName
|
||||
* @return the metrics if there is one registered by the supplied name.
|
||||
* Returns null if none is registered
|
||||
*/
|
||||
public MetricsBase get(final String metricsName) {
|
||||
return metricsList.get(metricsName);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the list of metrics names
|
||||
*/
|
||||
public Collection<String> getKeyList() {
|
||||
return metricsList.keySet();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return the list of metrics
|
||||
*/
|
||||
public Collection<MetricsBase> getMetricsList() {
|
||||
return metricsList.values();
|
||||
}
|
||||
}
|
|
@ -1,129 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* The MetricsTimeVaryingInt class is for a metric that naturally
|
||||
* varies over time (e.g. number of files created). The metrics is accumulated
|
||||
* over an interval (set in the metrics config file); the metrics is
|
||||
* published at the end of each interval and then
|
||||
* reset to zero. Hence the counter has the value in the current interval.
|
||||
*
|
||||
* Note if one wants a time associated with the metric then use
|
||||
* @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MetricsTimeVaryingInt extends MetricsBase {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog("org.apache.hadoop.metrics.util");
|
||||
|
||||
private int currentValue;
|
||||
private int previousIntervalValue;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
* @param description - the description
|
||||
*/
|
||||
public MetricsTimeVaryingInt(final String nam,
|
||||
final MetricsRegistry registry,
|
||||
final String description) {
|
||||
super(nam, description);
|
||||
currentValue = 0;
|
||||
previousIntervalValue = 0;
|
||||
registry.add(nam, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
* A description of {@link #NO_DESCRIPTION} is used
|
||||
*/
|
||||
public MetricsTimeVaryingInt(final String nam, final MetricsRegistry registry) {
|
||||
this(nam, registry, NO_DESCRIPTION);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Inc metrics for incr vlaue
|
||||
* @param incr - number of operations
|
||||
*/
|
||||
public synchronized void inc(final int incr) {
|
||||
currentValue += incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inc metrics by one
|
||||
*/
|
||||
public synchronized void inc() {
|
||||
currentValue++;
|
||||
}
|
||||
|
||||
private synchronized void intervalHeartBeat() {
|
||||
previousIntervalValue = currentValue;
|
||||
currentValue = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Push the delta metrics to the mr.
|
||||
* The delta is since the last push/interval.
|
||||
*
|
||||
* Note this does NOT push to JMX
|
||||
* (JMX gets the info via {@link #previousIntervalValue}
|
||||
*
|
||||
* @param mr
|
||||
*/
|
||||
@Override
|
||||
public synchronized void pushMetric(final MetricsRecord mr) {
|
||||
intervalHeartBeat();
|
||||
try {
|
||||
mr.incrMetric(getName(), getPreviousIntervalValue());
|
||||
} catch (Exception e) {
|
||||
LOG.info("pushMetric failed for " + getName() + "\n" , e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The Value at the Previous interval
|
||||
* @return prev interval value
|
||||
*/
|
||||
public synchronized int getPreviousIntervalValue() {
|
||||
return previousIntervalValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Value at the current interval
|
||||
* @return prev interval value
|
||||
*/
|
||||
public synchronized int getCurrentIntervalValue() {
|
||||
return currentValue;
|
||||
}
|
||||
}
|
|
@ -1,125 +0,0 @@
|
|||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* The MetricsTimeVaryingLong class is for a metric that naturally
|
||||
* varies over time (e.g. number of files created). The metrics is accumulated
|
||||
* over an interval (set in the metrics config file); the metrics is
|
||||
* published at the end of each interval and then
|
||||
* reset to zero. Hence the counter has the value in the current interval.
|
||||
*
|
||||
* Note if one wants a time associated with the metric then use
|
||||
* @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MetricsTimeVaryingLong extends MetricsBase{
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog("org.apache.hadoop.metrics.util");
|
||||
|
||||
private long currentValue;
|
||||
private long previousIntervalValue;
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
*/
|
||||
public MetricsTimeVaryingLong(final String nam, MetricsRegistry registry, final String description) {
|
||||
super(nam, description);
|
||||
currentValue = 0;
|
||||
previousIntervalValue = 0;
|
||||
registry.add(nam, this);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
* A description of {@link #NO_DESCRIPTION} is used
|
||||
*/
|
||||
public MetricsTimeVaryingLong(final String nam, MetricsRegistry registry) {
|
||||
this(nam, registry, NO_DESCRIPTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Inc metrics for incr vlaue
|
||||
* @param incr - number of operations
|
||||
*/
|
||||
public synchronized void inc(final long incr) {
|
||||
currentValue += incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Inc metrics by one
|
||||
*/
|
||||
public synchronized void inc() {
|
||||
currentValue++;
|
||||
}
|
||||
|
||||
private synchronized void intervalHeartBeat() {
|
||||
previousIntervalValue = currentValue;
|
||||
currentValue = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Push the delta metrics to the mr.
|
||||
* The delta is since the last push/interval.
|
||||
*
|
||||
* Note this does NOT push to JMX
|
||||
* (JMX gets the info via {@link #previousIntervalValue}
|
||||
*
|
||||
* @param mr
|
||||
*/
|
||||
@Override
|
||||
public synchronized void pushMetric(final MetricsRecord mr) {
|
||||
intervalHeartBeat();
|
||||
try {
|
||||
mr.incrMetric(getName(), getPreviousIntervalValue());
|
||||
} catch (Exception e) {
|
||||
LOG.info("pushMetric failed for " + getName() + "\n" , e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The Value at the Previous interval
|
||||
* @return prev interval value
|
||||
*/
|
||||
public synchronized long getPreviousIntervalValue() {
|
||||
return previousIntervalValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* The Value at the current interval
|
||||
* @return prev interval value
|
||||
*/
|
||||
public synchronized long getCurrentIntervalValue() {
|
||||
return currentValue;
|
||||
}
|
||||
}
|
|
@ -1,198 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.util;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.metrics.MetricsRecord;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* The MetricsTimeVaryingRate class is for a rate based metric that
|
||||
* naturally varies over time (e.g. time taken to create a file).
|
||||
* The rate is averaged at each interval heart beat (the interval
|
||||
* is set in the metrics config file).
|
||||
* This class also keeps track of the min and max rates along with
|
||||
* a method to reset the min-max.
|
||||
*
|
||||
* @deprecated Use org.apache.hadoop.metrics2 package instead.
|
||||
*/
|
||||
@Deprecated
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||
public class MetricsTimeVaryingRate extends MetricsBase {
|
||||
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog("org.apache.hadoop.metrics.util");
|
||||
|
||||
static class Metrics {
|
||||
int numOperations = 0;
|
||||
long time = 0; // total time or average time
|
||||
|
||||
void set(final Metrics resetTo) {
|
||||
numOperations = resetTo.numOperations;
|
||||
time = resetTo.time;
|
||||
}
|
||||
|
||||
void reset() {
|
||||
numOperations = 0;
|
||||
time = 0;
|
||||
}
|
||||
}
|
||||
|
||||
static class MinMax {
|
||||
long minTime = -1;
|
||||
long maxTime = 0;
|
||||
|
||||
void set(final MinMax newVal) {
|
||||
minTime = newVal.minTime;
|
||||
maxTime = newVal.maxTime;
|
||||
}
|
||||
|
||||
void reset() {
|
||||
minTime = -1;
|
||||
maxTime = 0;
|
||||
}
|
||||
void update(final long time) { // update min max
|
||||
minTime = (minTime == -1) ? time : Math.min(minTime, time);
|
||||
minTime = Math.min(minTime, time);
|
||||
maxTime = Math.max(maxTime, time);
|
||||
}
|
||||
}
|
||||
private Metrics currentData;
|
||||
private Metrics previousIntervalData;
|
||||
private MinMax minMax;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
*/
|
||||
public MetricsTimeVaryingRate(final String nam, final MetricsRegistry registry, final String description) {
|
||||
super(nam, description);
|
||||
currentData = new Metrics();
|
||||
previousIntervalData = new Metrics();
|
||||
minMax = new MinMax();
|
||||
registry.add(nam, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor - create a new metric
|
||||
* @param nam the name of the metrics to be used to publish the metric
|
||||
* @param registry - where the metrics object will be registered
|
||||
* A description of {@link #NO_DESCRIPTION} is used
|
||||
*/
|
||||
public MetricsTimeVaryingRate(final String nam, MetricsRegistry registry) {
|
||||
this(nam, registry, NO_DESCRIPTION);
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Increment the metrics for numOps operations
|
||||
* @param numOps - number of operations
|
||||
* @param time - time for numOps operations
|
||||
*/
|
||||
public synchronized void inc(final int numOps, final long time) {
|
||||
currentData.numOperations += numOps;
|
||||
currentData.time += time;
|
||||
long timePerOps = time/numOps;
|
||||
minMax.update(timePerOps);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increment the metrics for one operation
|
||||
* @param time for one operation
|
||||
*/
|
||||
public synchronized void inc(final long time) {
|
||||
currentData.numOperations++;
|
||||
currentData.time += time;
|
||||
minMax.update(time);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private synchronized void intervalHeartBeat() {
|
||||
previousIntervalData.numOperations = currentData.numOperations;
|
||||
previousIntervalData.time = (currentData.numOperations == 0) ?
|
||||
0 : currentData.time / currentData.numOperations;
|
||||
currentData.reset();
|
||||
}
|
||||
|
||||
/**
|
||||
* Push the delta metrics to the mr.
|
||||
* The delta is since the last push/interval.
|
||||
*
|
||||
* Note this does NOT push to JMX
|
||||
* (JMX gets the info via {@link #getPreviousIntervalAverageTime()} and
|
||||
* {@link #getPreviousIntervalNumOps()}
|
||||
*
|
||||
* @param mr
|
||||
*/
|
||||
@Override
|
||||
public synchronized void pushMetric(final MetricsRecord mr) {
|
||||
intervalHeartBeat();
|
||||
try {
|
||||
mr.incrMetric(getName() + "_num_ops", getPreviousIntervalNumOps());
|
||||
mr.setMetric(getName() + "_avg_time", getPreviousIntervalAverageTime());
|
||||
} catch (Exception e) {
|
||||
LOG.info("pushMetric failed for " + getName() + "\n" , e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The number of operations in the previous interval
|
||||
* @return - ops in prev interval
|
||||
*/
|
||||
public synchronized int getPreviousIntervalNumOps() {
|
||||
return previousIntervalData.numOperations;
|
||||
}
|
||||
|
||||
/**
|
||||
* The average rate of an operation in the previous interval
|
||||
* @return - the average rate.
|
||||
*/
|
||||
public synchronized long getPreviousIntervalAverageTime() {
|
||||
return previousIntervalData.time;
|
||||
}
|
||||
|
||||
/**
|
||||
* The min time for a single operation since the last reset
|
||||
* {@link #resetMinMax()}
|
||||
* @return min time for an operation
|
||||
*/
|
||||
public synchronized long getMinTime() {
|
||||
return minMax.minTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* The max time for a single operation since the last reset
|
||||
* {@link #resetMinMax()}
|
||||
* @return max time for an operation
|
||||
*/
|
||||
public synchronized long getMaxTime() {
|
||||
return minMax.maxTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the min max values
|
||||
*/
|
||||
public synchronized void resetMinMax() {
|
||||
minMax.reset();
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
|
||||
@InterfaceStability.Evolving
|
||||
package org.apache.hadoop.metrics.util;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
|
@ -323,7 +323,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
}
|
||||
|
||||
/**
|
||||
* Verify the access for /logs, /stacks, /conf, /logLevel and /metrics
|
||||
* Verify the access for /logs, /stacks, /conf, and /logLevel
|
||||
* servlets, when authentication filters are set, but authorization is not
|
||||
* enabled.
|
||||
* @throws Exception
|
||||
|
@ -349,7 +349,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
myServer.start();
|
||||
String serverURL = "http://" + NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
|
||||
for (String servlet : new String[] { "conf", "logs", "stacks",
|
||||
"logLevel", "metrics" }) {
|
||||
"logLevel" }) {
|
||||
for (String user : new String[] { "userA", "userB" }) {
|
||||
assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
|
||||
+ servlet, user));
|
||||
|
@ -359,8 +359,8 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
}
|
||||
|
||||
/**
|
||||
* Verify the administrator access for /logs, /stacks, /conf, /logLevel and
|
||||
* /metrics servlets.
|
||||
* Verify the administrator access for /logs, /stacks, /conf, and /logLevel
|
||||
* servlets.
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
|
@ -393,7 +393,7 @@ public class TestHttpServer extends HttpServerFunctionalTest {
|
|||
String serverURL = "http://"
|
||||
+ NetUtils.getHostPortString(myServer.getConnectorAddress(0)) + "/";
|
||||
for (String servlet : new String[] { "conf", "logs", "stacks",
|
||||
"logLevel", "metrics" }) {
|
||||
"logLevel" }) {
|
||||
for (String user : new String[] { "userA", "userB", "userC", "userD" }) {
|
||||
assertEquals(HttpURLConnection.HTTP_OK, getHttpStatusCode(serverURL
|
||||
+ servlet, user));
|
||||
|
|
|
@ -1,112 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.metrics.MetricsServlet.TagsMetricsPair;
|
||||
import org.apache.hadoop.metrics.spi.NoEmitMetricsContext;
|
||||
import org.apache.hadoop.metrics.spi.OutputRecord;
|
||||
import org.mortbay.util.ajax.JSON;
|
||||
|
||||
@Deprecated
|
||||
public class TestMetricsServlet extends TestCase {
|
||||
MetricsContext nc1;
|
||||
MetricsContext nc2;
|
||||
// List containing nc1 and nc2.
|
||||
List<MetricsContext> contexts;
|
||||
OutputRecord outputRecord;
|
||||
|
||||
/**
|
||||
* Initializes, for testing, two NoEmitMetricsContext's, and adds one value
|
||||
* to the first of them.
|
||||
*/
|
||||
@Override
|
||||
public void setUp() throws IOException {
|
||||
nc1 = new NoEmitMetricsContext();
|
||||
nc1.init("test1", ContextFactory.getFactory());
|
||||
nc2 = new NoEmitMetricsContext();
|
||||
nc2.init("test2", ContextFactory.getFactory());
|
||||
contexts = new ArrayList<MetricsContext>();
|
||||
contexts.add(nc1);
|
||||
contexts.add(nc2);
|
||||
|
||||
MetricsRecord r = nc1.createRecord("testRecord");
|
||||
|
||||
r.setTag("testTag1", "testTagValue1");
|
||||
r.setTag("testTag2", "testTagValue2");
|
||||
r.setMetric("testMetric1", 1);
|
||||
r.setMetric("testMetric2", 33);
|
||||
r.update();
|
||||
|
||||
Map<String, Collection<OutputRecord>> m = nc1.getAllRecords();
|
||||
assertEquals(1, m.size());
|
||||
assertEquals(1, m.values().size());
|
||||
Collection<OutputRecord> outputRecords = m.values().iterator().next();
|
||||
assertEquals(1, outputRecords.size());
|
||||
outputRecord = outputRecords.iterator().next();
|
||||
}
|
||||
|
||||
|
||||
|
||||
public void testTagsMetricsPair() throws IOException {
|
||||
TagsMetricsPair pair = new TagsMetricsPair(outputRecord.getTagsCopy(),
|
||||
outputRecord.getMetricsCopy());
|
||||
String s = JSON.toString(pair);
|
||||
assertEquals(
|
||||
"[{\"testTag1\":\"testTagValue1\",\"testTag2\":\"testTagValue2\"},"+
|
||||
"{\"testMetric1\":1,\"testMetric2\":33}]", s);
|
||||
}
|
||||
|
||||
public void testGetMap() throws IOException {
|
||||
MetricsServlet servlet = new MetricsServlet();
|
||||
Map<String, Map<String, List<TagsMetricsPair>>> m = servlet.makeMap(contexts);
|
||||
assertEquals("Map missing contexts", 2, m.size());
|
||||
assertTrue(m.containsKey("test1"));
|
||||
|
||||
Map<String, List<TagsMetricsPair>> m2 = m.get("test1");
|
||||
|
||||
assertEquals("Missing records", 1, m2.size());
|
||||
assertTrue(m2.containsKey("testRecord"));
|
||||
assertEquals("Wrong number of tags-values pairs.", 1, m2.get("testRecord").size());
|
||||
}
|
||||
|
||||
public void testPrintMap() throws IOException {
|
||||
StringWriter sw = new StringWriter();
|
||||
PrintWriter out = new PrintWriter(sw);
|
||||
MetricsServlet servlet = new MetricsServlet();
|
||||
servlet.printMap(out, servlet.makeMap(contexts));
|
||||
|
||||
String EXPECTED = "" +
|
||||
"test1\n" +
|
||||
" testRecord\n" +
|
||||
" {testTag1=testTagValue1,testTag2=testTagValue2}:\n" +
|
||||
" testMetric1=1\n" +
|
||||
" testMetric2=33\n" +
|
||||
"test2\n";
|
||||
assertEquals(EXPECTED, sw.toString());
|
||||
}
|
||||
}
|
|
@ -1,84 +0,0 @@
|
|||
/*
|
||||
* TestGangliaContext.java
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
package org.apache.hadoop.metrics.ganglia;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.metrics.ContextFactory;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
|
||||
|
||||
import java.net.MulticastSocket;
|
||||
|
||||
@Deprecated
|
||||
public class TestGangliaContext {
|
||||
@Test
|
||||
public void testShouldCreateDatagramSocketByDefault() throws Exception {
|
||||
GangliaContext context = new GangliaContext();
|
||||
context.init("gangliaContext", ContextFactory.getFactory());
|
||||
assertFalse("Created MulticastSocket", context.datagramSocket instanceof MulticastSocket);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldCreateDatagramSocketIfMulticastIsDisabled() throws Exception {
|
||||
GangliaContext context = new GangliaContext();
|
||||
ContextFactory factory = ContextFactory.getFactory();
|
||||
factory.setAttribute("gangliaContext.multicast", "false");
|
||||
context.init("gangliaContext", factory);
|
||||
assertFalse("Created MulticastSocket", context.datagramSocket instanceof MulticastSocket);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldCreateMulticastSocket() throws Exception {
|
||||
GangliaContext context = new GangliaContext();
|
||||
ContextFactory factory = ContextFactory.getFactory();
|
||||
factory.setAttribute("gangliaContext.multicast", "true");
|
||||
context.init("gangliaContext", factory);
|
||||
assertTrue("Did not create MulticastSocket", context.datagramSocket instanceof MulticastSocket);
|
||||
MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket;
|
||||
assertEquals("Did not set default TTL", multicastSocket.getTimeToLive(), 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testShouldSetMulticastSocketTtl() throws Exception {
|
||||
GangliaContext context = new GangliaContext();
|
||||
ContextFactory factory = ContextFactory.getFactory();
|
||||
factory.setAttribute("gangliaContext.multicast", "true");
|
||||
factory.setAttribute("gangliaContext.multicast.ttl", "10");
|
||||
context.init("gangliaContext", factory);
|
||||
MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket;
|
||||
assertEquals("Did not set TTL", multicastSocket.getTimeToLive(), 10);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCloseShouldCloseTheSocketWhichIsCreatedByInit() throws Exception {
|
||||
AbstractMetricsContext context=new GangliaContext();
|
||||
context.init("gangliaContext", ContextFactory.getFactory());
|
||||
GangliaContext gangliaContext =(GangliaContext) context;
|
||||
assertFalse("Socket already closed",gangliaContext.datagramSocket.isClosed());
|
||||
context.close();
|
||||
assertTrue("Socket not closed",gangliaContext.datagramSocket.isClosed());
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.metrics.spi;
|
||||
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.MetricMap;
|
||||
import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
@Deprecated
|
||||
public class TestOutputRecord extends TestCase {
|
||||
public void testCopy() {
|
||||
TagMap tags = new TagMap();
|
||||
tags.put("tagkey", "tagval");
|
||||
MetricMap metrics = new MetricMap();
|
||||
metrics.put("metrickey", 123.4);
|
||||
OutputRecord r = new OutputRecord(tags, metrics);
|
||||
|
||||
assertEquals(tags, r.getTagsCopy());
|
||||
assertNotSame(tags, r.getTagsCopy());
|
||||
assertEquals(metrics, r.getMetricsCopy());
|
||||
assertNotSame(metrics, r.getMetricsCopy());
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue